PR 68393: Handle SUBREG_PROMOTED_VAR_P in expand_direct_optab_fn
[official-gcc.git] / gcc / gimple-fold.c
blob1ab20d11fa76fc1642411e53f339f1ee6ff2181f
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
58 /* Return true when DECL can be referenced from current unit.
59 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
60 We can get declarations that are not possible to reference for various
61 reasons:
63 1) When analyzing C++ virtual tables.
64 C++ virtual tables do have known constructors even
65 when they are keyed to other compilation unit.
66 Those tables can contain pointers to methods and vars
67 in other units. Those methods have both STATIC and EXTERNAL
68 set.
69 2) In WHOPR mode devirtualization might lead to reference
70 to method that was partitioned elsehwere.
71 In this case we have static VAR_DECL or FUNCTION_DECL
72 that has no corresponding callgraph/varpool node
73 declaring the body.
74 3) COMDAT functions referred by external vtables that
75 we devirtualize only during final compilation stage.
76 At this time we already decided that we will not output
77 the function body and thus we can't reference the symbol
78 directly. */
80 static bool
81 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
83 varpool_node *vnode;
84 struct cgraph_node *node;
85 symtab_node *snode;
87 if (DECL_ABSTRACT_P (decl))
88 return false;
90 /* We are concerned only about static/external vars and functions. */
91 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
92 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
93 return true;
95 /* Static objects can be referred only if they was not optimized out yet. */
96 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
98 /* Before we start optimizing unreachable code we can be sure all
99 static objects are defined. */
100 if (symtab->function_flags_ready)
101 return true;
102 snode = symtab_node::get (decl);
103 if (!snode || !snode->definition)
104 return false;
105 node = dyn_cast <cgraph_node *> (snode);
106 return !node || !node->global.inlined_to;
109 /* We will later output the initializer, so we can refer to it.
110 So we are concerned only when DECL comes from initializer of
111 external var or var that has been optimized out. */
112 if (!from_decl
113 || TREE_CODE (from_decl) != VAR_DECL
114 || (!DECL_EXTERNAL (from_decl)
115 && (vnode = varpool_node::get (from_decl)) != NULL
116 && vnode->definition)
117 || (flag_ltrans
118 && (vnode = varpool_node::get (from_decl)) != NULL
119 && vnode->in_other_partition))
120 return true;
121 /* We are folding reference from external vtable. The vtable may reffer
122 to a symbol keyed to other compilation unit. The other compilation
123 unit may be in separate DSO and the symbol may be hidden. */
124 if (DECL_VISIBILITY_SPECIFIED (decl)
125 && DECL_EXTERNAL (decl)
126 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
127 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
128 return false;
129 /* When function is public, we always can introduce new reference.
130 Exception are the COMDAT functions where introducing a direct
131 reference imply need to include function body in the curren tunit. */
132 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
133 return true;
134 /* We have COMDAT. We are going to check if we still have definition
135 or if the definition is going to be output in other partition.
136 Bypass this when gimplifying; all needed functions will be produced.
138 As observed in PR20991 for already optimized out comdat virtual functions
139 it may be tempting to not necessarily give up because the copy will be
140 output elsewhere when corresponding vtable is output.
141 This is however not possible - ABI specify that COMDATs are output in
142 units where they are used and when the other unit was compiled with LTO
143 it is possible that vtable was kept public while the function itself
144 was privatized. */
145 if (!symtab->function_flags_ready)
146 return true;
148 snode = symtab_node::get (decl);
149 if (!snode
150 || ((!snode->definition || DECL_EXTERNAL (decl))
151 && (!snode->in_other_partition
152 || (!snode->forced_by_abi && !snode->force_output))))
153 return false;
154 node = dyn_cast <cgraph_node *> (snode);
155 return !node || !node->global.inlined_to;
158 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
159 acceptable form for is_gimple_min_invariant.
160 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
162 tree
163 canonicalize_constructor_val (tree cval, tree from_decl)
165 tree orig_cval = cval;
166 STRIP_NOPS (cval);
167 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
168 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
170 tree ptr = TREE_OPERAND (cval, 0);
171 if (is_gimple_min_invariant (ptr))
172 cval = build1_loc (EXPR_LOCATION (cval),
173 ADDR_EXPR, TREE_TYPE (ptr),
174 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
175 ptr,
176 fold_convert (ptr_type_node,
177 TREE_OPERAND (cval, 1))));
179 if (TREE_CODE (cval) == ADDR_EXPR)
181 tree base = NULL_TREE;
182 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
184 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
185 if (base)
186 TREE_OPERAND (cval, 0) = base;
188 else
189 base = get_base_address (TREE_OPERAND (cval, 0));
190 if (!base)
191 return NULL_TREE;
193 if ((TREE_CODE (base) == VAR_DECL
194 || TREE_CODE (base) == FUNCTION_DECL)
195 && !can_refer_decl_in_current_unit_p (base, from_decl))
196 return NULL_TREE;
197 if (TREE_CODE (base) == VAR_DECL)
198 TREE_ADDRESSABLE (base) = 1;
199 else if (TREE_CODE (base) == FUNCTION_DECL)
201 /* Make sure we create a cgraph node for functions we'll reference.
202 They can be non-existent if the reference comes from an entry
203 of an external vtable for example. */
204 cgraph_node::get_create (base);
206 /* Fixup types in global initializers. */
207 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
208 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
210 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
211 cval = fold_convert (TREE_TYPE (orig_cval), cval);
212 return cval;
214 if (TREE_OVERFLOW_P (cval))
215 return drop_tree_overflow (cval);
216 return orig_cval;
219 /* If SYM is a constant variable with known value, return the value.
220 NULL_TREE is returned otherwise. */
222 tree
223 get_symbol_constant_value (tree sym)
225 tree val = ctor_for_folding (sym);
226 if (val != error_mark_node)
228 if (val)
230 val = canonicalize_constructor_val (unshare_expr (val), sym);
231 if (val && is_gimple_min_invariant (val))
232 return val;
233 else
234 return NULL_TREE;
236 /* Variables declared 'const' without an initializer
237 have zero as the initializer if they may not be
238 overridden at link or run time. */
239 if (!val
240 && is_gimple_reg_type (TREE_TYPE (sym)))
241 return build_zero_cst (TREE_TYPE (sym));
244 return NULL_TREE;
249 /* Subroutine of fold_stmt. We perform several simplifications of the
250 memory reference tree EXPR and make sure to re-gimplify them properly
251 after propagation of constant addresses. IS_LHS is true if the
252 reference is supposed to be an lvalue. */
254 static tree
255 maybe_fold_reference (tree expr, bool is_lhs)
257 tree result;
259 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
260 || TREE_CODE (expr) == REALPART_EXPR
261 || TREE_CODE (expr) == IMAGPART_EXPR)
262 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
263 return fold_unary_loc (EXPR_LOCATION (expr),
264 TREE_CODE (expr),
265 TREE_TYPE (expr),
266 TREE_OPERAND (expr, 0));
267 else if (TREE_CODE (expr) == BIT_FIELD_REF
268 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
269 return fold_ternary_loc (EXPR_LOCATION (expr),
270 TREE_CODE (expr),
271 TREE_TYPE (expr),
272 TREE_OPERAND (expr, 0),
273 TREE_OPERAND (expr, 1),
274 TREE_OPERAND (expr, 2));
276 if (!is_lhs
277 && (result = fold_const_aggregate_ref (expr))
278 && is_gimple_min_invariant (result))
279 return result;
281 return NULL_TREE;
285 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
286 replacement rhs for the statement or NULL_TREE if no simplification
287 could be made. It is assumed that the operands have been previously
288 folded. */
290 static tree
291 fold_gimple_assign (gimple_stmt_iterator *si)
293 gimple *stmt = gsi_stmt (*si);
294 enum tree_code subcode = gimple_assign_rhs_code (stmt);
295 location_t loc = gimple_location (stmt);
297 tree result = NULL_TREE;
299 switch (get_gimple_rhs_class (subcode))
301 case GIMPLE_SINGLE_RHS:
303 tree rhs = gimple_assign_rhs1 (stmt);
305 if (TREE_CLOBBER_P (rhs))
306 return NULL_TREE;
308 if (REFERENCE_CLASS_P (rhs))
309 return maybe_fold_reference (rhs, false);
311 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
313 tree val = OBJ_TYPE_REF_EXPR (rhs);
314 if (is_gimple_min_invariant (val))
315 return val;
316 else if (flag_devirtualize && virtual_method_call_p (rhs))
318 bool final;
319 vec <cgraph_node *>targets
320 = possible_polymorphic_call_targets (rhs, stmt, &final);
321 if (final && targets.length () <= 1 && dbg_cnt (devirt))
323 if (dump_enabled_p ())
325 location_t loc = gimple_location_safe (stmt);
326 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
327 "resolving virtual function address "
328 "reference to function %s\n",
329 targets.length () == 1
330 ? targets[0]->name ()
331 : "NULL");
333 if (targets.length () == 1)
335 val = fold_convert (TREE_TYPE (val),
336 build_fold_addr_expr_loc
337 (loc, targets[0]->decl));
338 STRIP_USELESS_TYPE_CONVERSION (val);
340 else
341 /* We can not use __builtin_unreachable here because it
342 can not have address taken. */
343 val = build_int_cst (TREE_TYPE (val), 0);
344 return val;
349 else if (TREE_CODE (rhs) == ADDR_EXPR)
351 tree ref = TREE_OPERAND (rhs, 0);
352 tree tem = maybe_fold_reference (ref, true);
353 if (tem
354 && TREE_CODE (tem) == MEM_REF
355 && integer_zerop (TREE_OPERAND (tem, 1)))
356 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
357 else if (tem)
358 result = fold_convert (TREE_TYPE (rhs),
359 build_fold_addr_expr_loc (loc, tem));
360 else if (TREE_CODE (ref) == MEM_REF
361 && integer_zerop (TREE_OPERAND (ref, 1)))
362 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
364 if (result)
366 /* Strip away useless type conversions. Both the
367 NON_LVALUE_EXPR that may have been added by fold, and
368 "useless" type conversions that might now be apparent
369 due to propagation. */
370 STRIP_USELESS_TYPE_CONVERSION (result);
372 if (result != rhs && valid_gimple_rhs_p (result))
373 return result;
377 else if (TREE_CODE (rhs) == CONSTRUCTOR
378 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
380 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
381 unsigned i;
382 tree val;
384 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
385 if (! CONSTANT_CLASS_P (val))
386 return NULL_TREE;
388 return build_vector_from_ctor (TREE_TYPE (rhs),
389 CONSTRUCTOR_ELTS (rhs));
392 else if (DECL_P (rhs))
393 return get_symbol_constant_value (rhs);
395 break;
397 case GIMPLE_UNARY_RHS:
398 break;
400 case GIMPLE_BINARY_RHS:
401 break;
403 case GIMPLE_TERNARY_RHS:
404 result = fold_ternary_loc (loc, subcode,
405 TREE_TYPE (gimple_assign_lhs (stmt)),
406 gimple_assign_rhs1 (stmt),
407 gimple_assign_rhs2 (stmt),
408 gimple_assign_rhs3 (stmt));
410 if (result)
412 STRIP_USELESS_TYPE_CONVERSION (result);
413 if (valid_gimple_rhs_p (result))
414 return result;
416 break;
418 case GIMPLE_INVALID_RHS:
419 gcc_unreachable ();
422 return NULL_TREE;
426 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
427 adjusting the replacement stmts location and virtual operands.
428 If the statement has a lhs the last stmt in the sequence is expected
429 to assign to that lhs. */
431 static void
432 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
434 gimple *stmt = gsi_stmt (*si_p);
436 if (gimple_has_location (stmt))
437 annotate_all_with_location (stmts, gimple_location (stmt));
439 /* First iterate over the replacement statements backward, assigning
440 virtual operands to their defining statements. */
441 gimple *laststore = NULL;
442 for (gimple_stmt_iterator i = gsi_last (stmts);
443 !gsi_end_p (i); gsi_prev (&i))
445 gimple *new_stmt = gsi_stmt (i);
446 if ((gimple_assign_single_p (new_stmt)
447 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
448 || (is_gimple_call (new_stmt)
449 && (gimple_call_flags (new_stmt)
450 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
452 tree vdef;
453 if (!laststore)
454 vdef = gimple_vdef (stmt);
455 else
456 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
457 gimple_set_vdef (new_stmt, vdef);
458 if (vdef && TREE_CODE (vdef) == SSA_NAME)
459 SSA_NAME_DEF_STMT (vdef) = new_stmt;
460 laststore = new_stmt;
464 /* Second iterate over the statements forward, assigning virtual
465 operands to their uses. */
466 tree reaching_vuse = gimple_vuse (stmt);
467 for (gimple_stmt_iterator i = gsi_start (stmts);
468 !gsi_end_p (i); gsi_next (&i))
470 gimple *new_stmt = gsi_stmt (i);
471 /* If the new statement possibly has a VUSE, update it with exact SSA
472 name we know will reach this one. */
473 if (gimple_has_mem_ops (new_stmt))
474 gimple_set_vuse (new_stmt, reaching_vuse);
475 gimple_set_modified (new_stmt, true);
476 if (gimple_vdef (new_stmt))
477 reaching_vuse = gimple_vdef (new_stmt);
480 /* If the new sequence does not do a store release the virtual
481 definition of the original statement. */
482 if (reaching_vuse
483 && reaching_vuse == gimple_vuse (stmt))
485 tree vdef = gimple_vdef (stmt);
486 if (vdef
487 && TREE_CODE (vdef) == SSA_NAME)
489 unlink_stmt_vdef (stmt);
490 release_ssa_name (vdef);
494 /* Finally replace the original statement with the sequence. */
495 gsi_replace_with_seq (si_p, stmts, false);
498 /* Convert EXPR into a GIMPLE value suitable for substitution on the
499 RHS of an assignment. Insert the necessary statements before
500 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
501 is replaced. If the call is expected to produces a result, then it
502 is replaced by an assignment of the new RHS to the result variable.
503 If the result is to be ignored, then the call is replaced by a
504 GIMPLE_NOP. A proper VDEF chain is retained by making the first
505 VUSE and the last VDEF of the whole sequence be the same as the replaced
506 statement and using new SSA names for stores in between. */
508 void
509 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
511 tree lhs;
512 gimple *stmt, *new_stmt;
513 gimple_stmt_iterator i;
514 gimple_seq stmts = NULL;
516 stmt = gsi_stmt (*si_p);
518 gcc_assert (is_gimple_call (stmt));
520 push_gimplify_context (gimple_in_ssa_p (cfun));
522 lhs = gimple_call_lhs (stmt);
523 if (lhs == NULL_TREE)
525 gimplify_and_add (expr, &stmts);
526 /* We can end up with folding a memcpy of an empty class assignment
527 which gets optimized away by C++ gimplification. */
528 if (gimple_seq_empty_p (stmts))
530 pop_gimplify_context (NULL);
531 if (gimple_in_ssa_p (cfun))
533 unlink_stmt_vdef (stmt);
534 release_defs (stmt);
536 gsi_replace (si_p, gimple_build_nop (), false);
537 return;
540 else
542 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
543 new_stmt = gimple_build_assign (lhs, tmp);
544 i = gsi_last (stmts);
545 gsi_insert_after_without_update (&i, new_stmt,
546 GSI_CONTINUE_LINKING);
549 pop_gimplify_context (NULL);
551 gsi_replace_with_seq_vops (si_p, stmts);
555 /* Replace the call at *GSI with the gimple value VAL. */
557 static void
558 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
560 gimple *stmt = gsi_stmt (*gsi);
561 tree lhs = gimple_call_lhs (stmt);
562 gimple *repl;
563 if (lhs)
565 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
566 val = fold_convert (TREE_TYPE (lhs), val);
567 repl = gimple_build_assign (lhs, val);
569 else
570 repl = gimple_build_nop ();
571 tree vdef = gimple_vdef (stmt);
572 if (vdef && TREE_CODE (vdef) == SSA_NAME)
574 unlink_stmt_vdef (stmt);
575 release_ssa_name (vdef);
577 gsi_replace (gsi, repl, false);
580 /* Replace the call at *GSI with the new call REPL and fold that
581 again. */
583 static void
584 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
586 gimple *stmt = gsi_stmt (*gsi);
587 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
588 gimple_set_location (repl, gimple_location (stmt));
589 if (gimple_vdef (stmt)
590 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
592 gimple_set_vdef (repl, gimple_vdef (stmt));
593 gimple_set_vuse (repl, gimple_vuse (stmt));
594 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
596 gsi_replace (gsi, repl, false);
597 fold_stmt (gsi);
600 /* Return true if VAR is a VAR_DECL or a component thereof. */
602 static bool
603 var_decl_component_p (tree var)
605 tree inner = var;
606 while (handled_component_p (inner))
607 inner = TREE_OPERAND (inner, 0);
608 return SSA_VAR_P (inner);
611 /* Fold function call to builtin mem{{,p}cpy,move}. Return
612 false if no simplification can be made.
613 If ENDP is 0, return DEST (like memcpy).
614 If ENDP is 1, return DEST+LEN (like mempcpy).
615 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
616 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
617 (memmove). */
619 static bool
620 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
621 tree dest, tree src, int endp)
623 gimple *stmt = gsi_stmt (*gsi);
624 tree lhs = gimple_call_lhs (stmt);
625 tree len = gimple_call_arg (stmt, 2);
626 tree destvar, srcvar;
627 location_t loc = gimple_location (stmt);
629 /* If the LEN parameter is zero, return DEST. */
630 if (integer_zerop (len))
632 gimple *repl;
633 if (gimple_call_lhs (stmt))
634 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
635 else
636 repl = gimple_build_nop ();
637 tree vdef = gimple_vdef (stmt);
638 if (vdef && TREE_CODE (vdef) == SSA_NAME)
640 unlink_stmt_vdef (stmt);
641 release_ssa_name (vdef);
643 gsi_replace (gsi, repl, false);
644 return true;
647 /* If SRC and DEST are the same (and not volatile), return
648 DEST{,+LEN,+LEN-1}. */
649 if (operand_equal_p (src, dest, 0))
651 unlink_stmt_vdef (stmt);
652 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
653 release_ssa_name (gimple_vdef (stmt));
654 if (!lhs)
656 gsi_replace (gsi, gimple_build_nop (), false);
657 return true;
659 goto done;
661 else
663 tree srctype, desttype;
664 unsigned int src_align, dest_align;
665 tree off0;
667 /* Build accesses at offset zero with a ref-all character type. */
668 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
669 ptr_mode, true), 0);
671 /* If we can perform the copy efficiently with first doing all loads
672 and then all stores inline it that way. Currently efficiently
673 means that we can load all the memory into a single integer
674 register which is what MOVE_MAX gives us. */
675 src_align = get_pointer_alignment (src);
676 dest_align = get_pointer_alignment (dest);
677 if (tree_fits_uhwi_p (len)
678 && compare_tree_int (len, MOVE_MAX) <= 0
679 /* ??? Don't transform copies from strings with known length this
680 confuses the tree-ssa-strlen.c. This doesn't handle
681 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
682 reason. */
683 && !c_strlen (src, 2))
685 unsigned ilen = tree_to_uhwi (len);
686 if (exact_log2 (ilen) != -1)
688 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
689 if (type
690 && TYPE_MODE (type) != BLKmode
691 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
692 == ilen * 8)
693 /* If the destination pointer is not aligned we must be able
694 to emit an unaligned store. */
695 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
696 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
697 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
698 != CODE_FOR_nothing)))
700 tree srctype = type;
701 tree desttype = type;
702 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
703 srctype = build_aligned_type (type, src_align);
704 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
705 tree tem = fold_const_aggregate_ref (srcmem);
706 if (tem)
707 srcmem = tem;
708 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
709 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
710 src_align)
711 && (optab_handler (movmisalign_optab,
712 TYPE_MODE (type))
713 == CODE_FOR_nothing))
714 srcmem = NULL_TREE;
715 if (srcmem)
717 gimple *new_stmt;
718 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
720 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
721 if (gimple_in_ssa_p (cfun))
722 srcmem = make_ssa_name (TREE_TYPE (srcmem),
723 new_stmt);
724 else
725 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
726 gimple_assign_set_lhs (new_stmt, srcmem);
727 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
728 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
730 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
731 desttype = build_aligned_type (type, dest_align);
732 new_stmt
733 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
734 dest, off0),
735 srcmem);
736 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
737 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
738 if (gimple_vdef (new_stmt)
739 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
740 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
741 if (!lhs)
743 gsi_replace (gsi, new_stmt, false);
744 return true;
746 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
747 goto done;
753 if (endp == 3)
755 /* Both DEST and SRC must be pointer types.
756 ??? This is what old code did. Is the testing for pointer types
757 really mandatory?
759 If either SRC is readonly or length is 1, we can use memcpy. */
760 if (!dest_align || !src_align)
761 return false;
762 if (readonly_data_expr (src)
763 || (tree_fits_uhwi_p (len)
764 && (MIN (src_align, dest_align) / BITS_PER_UNIT
765 >= tree_to_uhwi (len))))
767 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
768 if (!fn)
769 return false;
770 gimple_call_set_fndecl (stmt, fn);
771 gimple_call_set_arg (stmt, 0, dest);
772 gimple_call_set_arg (stmt, 1, src);
773 fold_stmt (gsi);
774 return true;
777 /* If *src and *dest can't overlap, optimize into memcpy as well. */
778 if (TREE_CODE (src) == ADDR_EXPR
779 && TREE_CODE (dest) == ADDR_EXPR)
781 tree src_base, dest_base, fn;
782 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
783 HOST_WIDE_INT size = -1;
784 HOST_WIDE_INT maxsize = -1;
785 bool reverse;
787 srcvar = TREE_OPERAND (src, 0);
788 src_base = get_ref_base_and_extent (srcvar, &src_offset,
789 &size, &maxsize, &reverse);
790 destvar = TREE_OPERAND (dest, 0);
791 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
792 &size, &maxsize, &reverse);
793 if (tree_fits_uhwi_p (len))
794 maxsize = tree_to_uhwi (len);
795 else
796 maxsize = -1;
797 src_offset /= BITS_PER_UNIT;
798 dest_offset /= BITS_PER_UNIT;
799 if (SSA_VAR_P (src_base)
800 && SSA_VAR_P (dest_base))
802 if (operand_equal_p (src_base, dest_base, 0)
803 && ranges_overlap_p (src_offset, maxsize,
804 dest_offset, maxsize))
805 return false;
807 else if (TREE_CODE (src_base) == MEM_REF
808 && TREE_CODE (dest_base) == MEM_REF)
810 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
811 TREE_OPERAND (dest_base, 0), 0))
812 return false;
813 offset_int off = mem_ref_offset (src_base) + src_offset;
814 if (!wi::fits_shwi_p (off))
815 return false;
816 src_offset = off.to_shwi ();
818 off = mem_ref_offset (dest_base) + dest_offset;
819 if (!wi::fits_shwi_p (off))
820 return false;
821 dest_offset = off.to_shwi ();
822 if (ranges_overlap_p (src_offset, maxsize,
823 dest_offset, maxsize))
824 return false;
826 else
827 return false;
829 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
830 if (!fn)
831 return false;
832 gimple_call_set_fndecl (stmt, fn);
833 gimple_call_set_arg (stmt, 0, dest);
834 gimple_call_set_arg (stmt, 1, src);
835 fold_stmt (gsi);
836 return true;
839 /* If the destination and source do not alias optimize into
840 memcpy as well. */
841 if ((is_gimple_min_invariant (dest)
842 || TREE_CODE (dest) == SSA_NAME)
843 && (is_gimple_min_invariant (src)
844 || TREE_CODE (src) == SSA_NAME))
846 ao_ref destr, srcr;
847 ao_ref_init_from_ptr_and_size (&destr, dest, len);
848 ao_ref_init_from_ptr_and_size (&srcr, src, len);
849 if (!refs_may_alias_p_1 (&destr, &srcr, false))
851 tree fn;
852 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
853 if (!fn)
854 return false;
855 gimple_call_set_fndecl (stmt, fn);
856 gimple_call_set_arg (stmt, 0, dest);
857 gimple_call_set_arg (stmt, 1, src);
858 fold_stmt (gsi);
859 return true;
863 return false;
866 if (!tree_fits_shwi_p (len))
867 return false;
868 /* FIXME:
869 This logic lose for arguments like (type *)malloc (sizeof (type)),
870 since we strip the casts of up to VOID return value from malloc.
871 Perhaps we ought to inherit type from non-VOID argument here? */
872 STRIP_NOPS (src);
873 STRIP_NOPS (dest);
874 if (!POINTER_TYPE_P (TREE_TYPE (src))
875 || !POINTER_TYPE_P (TREE_TYPE (dest)))
876 return false;
877 /* In the following try to find a type that is most natural to be
878 used for the memcpy source and destination and that allows
879 the most optimization when memcpy is turned into a plain assignment
880 using that type. In theory we could always use a char[len] type
881 but that only gains us that the destination and source possibly
882 no longer will have their address taken. */
883 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
884 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
886 tree tem = TREE_OPERAND (src, 0);
887 STRIP_NOPS (tem);
888 if (tem != TREE_OPERAND (src, 0))
889 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
891 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
893 tree tem = TREE_OPERAND (dest, 0);
894 STRIP_NOPS (tem);
895 if (tem != TREE_OPERAND (dest, 0))
896 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
898 srctype = TREE_TYPE (TREE_TYPE (src));
899 if (TREE_CODE (srctype) == ARRAY_TYPE
900 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
902 srctype = TREE_TYPE (srctype);
903 STRIP_NOPS (src);
904 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
906 desttype = TREE_TYPE (TREE_TYPE (dest));
907 if (TREE_CODE (desttype) == ARRAY_TYPE
908 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
910 desttype = TREE_TYPE (desttype);
911 STRIP_NOPS (dest);
912 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
914 if (TREE_ADDRESSABLE (srctype)
915 || TREE_ADDRESSABLE (desttype))
916 return false;
918 /* Make sure we are not copying using a floating-point mode or
919 a type whose size possibly does not match its precision. */
920 if (FLOAT_MODE_P (TYPE_MODE (desttype))
921 || TREE_CODE (desttype) == BOOLEAN_TYPE
922 || TREE_CODE (desttype) == ENUMERAL_TYPE)
923 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
924 if (FLOAT_MODE_P (TYPE_MODE (srctype))
925 || TREE_CODE (srctype) == BOOLEAN_TYPE
926 || TREE_CODE (srctype) == ENUMERAL_TYPE)
927 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
928 if (!srctype)
929 srctype = desttype;
930 if (!desttype)
931 desttype = srctype;
932 if (!srctype)
933 return false;
935 src_align = get_pointer_alignment (src);
936 dest_align = get_pointer_alignment (dest);
937 if (dest_align < TYPE_ALIGN (desttype)
938 || src_align < TYPE_ALIGN (srctype))
939 return false;
941 destvar = dest;
942 STRIP_NOPS (destvar);
943 if (TREE_CODE (destvar) == ADDR_EXPR
944 && var_decl_component_p (TREE_OPERAND (destvar, 0))
945 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
946 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
947 else
948 destvar = NULL_TREE;
950 srcvar = src;
951 STRIP_NOPS (srcvar);
952 if (TREE_CODE (srcvar) == ADDR_EXPR
953 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
954 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
956 if (!destvar
957 || src_align >= TYPE_ALIGN (desttype))
958 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
959 srcvar, off0);
960 else if (!STRICT_ALIGNMENT)
962 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
963 src_align);
964 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
966 else
967 srcvar = NULL_TREE;
969 else
970 srcvar = NULL_TREE;
972 if (srcvar == NULL_TREE && destvar == NULL_TREE)
973 return false;
975 if (srcvar == NULL_TREE)
977 STRIP_NOPS (src);
978 if (src_align >= TYPE_ALIGN (desttype))
979 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
980 else
982 if (STRICT_ALIGNMENT)
983 return false;
984 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
985 src_align);
986 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
989 else if (destvar == NULL_TREE)
991 STRIP_NOPS (dest);
992 if (dest_align >= TYPE_ALIGN (srctype))
993 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
994 else
996 if (STRICT_ALIGNMENT)
997 return false;
998 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
999 dest_align);
1000 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1004 gimple *new_stmt;
1005 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1007 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1008 if (gimple_in_ssa_p (cfun))
1009 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1010 else
1011 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1012 gimple_assign_set_lhs (new_stmt, srcvar);
1013 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1014 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1016 new_stmt = gimple_build_assign (destvar, srcvar);
1017 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1018 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1019 if (gimple_vdef (new_stmt)
1020 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1021 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1022 if (!lhs)
1024 gsi_replace (gsi, new_stmt, false);
1025 return true;
1027 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1030 done:
1031 gimple_seq stmts = NULL;
1032 if (endp == 0 || endp == 3)
1033 len = NULL_TREE;
1034 else if (endp == 2)
1035 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1036 ssize_int (1));
1037 if (endp == 2 || endp == 1)
1039 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1040 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1041 TREE_TYPE (dest), dest, len);
1044 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1045 gimple *repl = gimple_build_assign (lhs, dest);
1046 gsi_replace (gsi, repl, false);
1047 return true;
1050 /* Fold function call to builtin memset or bzero at *GSI setting the
1051 memory of size LEN to VAL. Return whether a simplification was made. */
1053 static bool
1054 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1056 gimple *stmt = gsi_stmt (*gsi);
1057 tree etype;
1058 unsigned HOST_WIDE_INT length, cval;
1060 /* If the LEN parameter is zero, return DEST. */
1061 if (integer_zerop (len))
1063 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1064 return true;
1067 if (! tree_fits_uhwi_p (len))
1068 return false;
1070 if (TREE_CODE (c) != INTEGER_CST)
1071 return false;
1073 tree dest = gimple_call_arg (stmt, 0);
1074 tree var = dest;
1075 if (TREE_CODE (var) != ADDR_EXPR)
1076 return false;
1078 var = TREE_OPERAND (var, 0);
1079 if (TREE_THIS_VOLATILE (var))
1080 return false;
1082 etype = TREE_TYPE (var);
1083 if (TREE_CODE (etype) == ARRAY_TYPE)
1084 etype = TREE_TYPE (etype);
1086 if (!INTEGRAL_TYPE_P (etype)
1087 && !POINTER_TYPE_P (etype))
1088 return NULL_TREE;
1090 if (! var_decl_component_p (var))
1091 return NULL_TREE;
1093 length = tree_to_uhwi (len);
1094 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1095 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1096 return NULL_TREE;
1098 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1099 return NULL_TREE;
1101 if (integer_zerop (c))
1102 cval = 0;
1103 else
1105 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1106 return NULL_TREE;
1108 cval = TREE_INT_CST_LOW (c);
1109 cval &= 0xff;
1110 cval |= cval << 8;
1111 cval |= cval << 16;
1112 cval |= (cval << 31) << 1;
1115 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1116 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1117 gimple_set_vuse (store, gimple_vuse (stmt));
1118 tree vdef = gimple_vdef (stmt);
1119 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1121 gimple_set_vdef (store, gimple_vdef (stmt));
1122 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1124 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1125 if (gimple_call_lhs (stmt))
1127 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1128 gsi_replace (gsi, asgn, false);
1130 else
1132 gimple_stmt_iterator gsi2 = *gsi;
1133 gsi_prev (gsi);
1134 gsi_remove (&gsi2, true);
1137 return true;
1141 /* Return the string length, maximum string length or maximum value of
1142 ARG in LENGTH.
1143 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1144 is not NULL and, for TYPE == 0, its value is not equal to the length
1145 we determine or if we are unable to determine the length or value,
1146 return false. VISITED is a bitmap of visited variables.
1147 TYPE is 0 if string length should be returned, 1 for maximum string
1148 length and 2 for maximum value ARG can have. */
1150 static bool
1151 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1153 tree var, val;
1154 gimple *def_stmt;
1156 if (TREE_CODE (arg) != SSA_NAME)
1158 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1159 if (TREE_CODE (arg) == ADDR_EXPR
1160 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1161 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1163 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1164 if (TREE_CODE (aop0) == INDIRECT_REF
1165 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1166 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1167 length, visited, type);
1170 if (type == 2)
1172 val = arg;
1173 if (TREE_CODE (val) != INTEGER_CST
1174 || tree_int_cst_sgn (val) < 0)
1175 return false;
1177 else
1178 val = c_strlen (arg, 1);
1179 if (!val)
1180 return false;
1182 if (*length)
1184 if (type > 0)
1186 if (TREE_CODE (*length) != INTEGER_CST
1187 || TREE_CODE (val) != INTEGER_CST)
1188 return false;
1190 if (tree_int_cst_lt (*length, val))
1191 *length = val;
1192 return true;
1194 else if (simple_cst_equal (val, *length) != 1)
1195 return false;
1198 *length = val;
1199 return true;
1202 /* If ARG is registered for SSA update we cannot look at its defining
1203 statement. */
1204 if (name_registered_for_update_p (arg))
1205 return false;
1207 /* If we were already here, break the infinite cycle. */
1208 if (!*visited)
1209 *visited = BITMAP_ALLOC (NULL);
1210 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1211 return true;
1213 var = arg;
1214 def_stmt = SSA_NAME_DEF_STMT (var);
1216 switch (gimple_code (def_stmt))
1218 case GIMPLE_ASSIGN:
1219 /* The RHS of the statement defining VAR must either have a
1220 constant length or come from another SSA_NAME with a constant
1221 length. */
1222 if (gimple_assign_single_p (def_stmt)
1223 || gimple_assign_unary_nop_p (def_stmt))
1225 tree rhs = gimple_assign_rhs1 (def_stmt);
1226 return get_maxval_strlen (rhs, length, visited, type);
1228 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1230 tree op2 = gimple_assign_rhs2 (def_stmt);
1231 tree op3 = gimple_assign_rhs3 (def_stmt);
1232 return get_maxval_strlen (op2, length, visited, type)
1233 && get_maxval_strlen (op3, length, visited, type);
1235 return false;
1237 case GIMPLE_PHI:
1239 /* All the arguments of the PHI node must have the same constant
1240 length. */
1241 unsigned i;
1243 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1245 tree arg = gimple_phi_arg (def_stmt, i)->def;
1247 /* If this PHI has itself as an argument, we cannot
1248 determine the string length of this argument. However,
1249 if we can find a constant string length for the other
1250 PHI args then we can still be sure that this is a
1251 constant string length. So be optimistic and just
1252 continue with the next argument. */
1253 if (arg == gimple_phi_result (def_stmt))
1254 continue;
1256 if (!get_maxval_strlen (arg, length, visited, type))
1257 return false;
1260 return true;
1262 default:
1263 return false;
1267 tree
1268 get_maxval_strlen (tree arg, int type)
1270 bitmap visited = NULL;
1271 tree len = NULL_TREE;
1272 if (!get_maxval_strlen (arg, &len, &visited, type))
1273 len = NULL_TREE;
1274 if (visited)
1275 BITMAP_FREE (visited);
1277 return len;
1281 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1282 If LEN is not NULL, it represents the length of the string to be
1283 copied. Return NULL_TREE if no simplification can be made. */
1285 static bool
1286 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1287 tree dest, tree src)
1289 location_t loc = gimple_location (gsi_stmt (*gsi));
1290 tree fn;
1292 /* If SRC and DEST are the same (and not volatile), return DEST. */
1293 if (operand_equal_p (src, dest, 0))
1295 replace_call_with_value (gsi, dest);
1296 return true;
1299 if (optimize_function_for_size_p (cfun))
1300 return false;
1302 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1303 if (!fn)
1304 return false;
1306 tree len = get_maxval_strlen (src, 0);
1307 if (!len)
1308 return false;
1310 len = fold_convert_loc (loc, size_type_node, len);
1311 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1312 len = force_gimple_operand_gsi (gsi, len, true,
1313 NULL_TREE, true, GSI_SAME_STMT);
1314 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1315 replace_call_with_call_and_fold (gsi, repl);
1316 return true;
1319 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1320 If SLEN is not NULL, it represents the length of the source string.
1321 Return NULL_TREE if no simplification can be made. */
1323 static bool
1324 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1325 tree dest, tree src, tree len)
1327 location_t loc = gimple_location (gsi_stmt (*gsi));
1328 tree fn;
1330 /* If the LEN parameter is zero, return DEST. */
1331 if (integer_zerop (len))
1333 replace_call_with_value (gsi, dest);
1334 return true;
1337 /* We can't compare slen with len as constants below if len is not a
1338 constant. */
1339 if (TREE_CODE (len) != INTEGER_CST)
1340 return false;
1342 /* Now, we must be passed a constant src ptr parameter. */
1343 tree slen = get_maxval_strlen (src, 0);
1344 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1345 return false;
1347 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1349 /* We do not support simplification of this case, though we do
1350 support it when expanding trees into RTL. */
1351 /* FIXME: generate a call to __builtin_memset. */
1352 if (tree_int_cst_lt (slen, len))
1353 return false;
1355 /* OK transform into builtin memcpy. */
1356 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1357 if (!fn)
1358 return false;
1360 len = fold_convert_loc (loc, size_type_node, len);
1361 len = force_gimple_operand_gsi (gsi, len, true,
1362 NULL_TREE, true, GSI_SAME_STMT);
1363 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1364 replace_call_with_call_and_fold (gsi, repl);
1365 return true;
1368 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1369 to the call.
1371 Return NULL_TREE if no simplification was possible, otherwise return the
1372 simplified form of the call as a tree.
1374 The simplified form may be a constant or other expression which
1375 computes the same value, but in a more efficient manner (including
1376 calls to other builtin functions).
1378 The call may contain arguments which need to be evaluated, but
1379 which are not useful to determine the result of the call. In
1380 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1381 COMPOUND_EXPR will be an argument which must be evaluated.
1382 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1383 COMPOUND_EXPR in the chain will contain the tree for the simplified
1384 form of the builtin function call. */
1386 static bool
1387 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1389 gimple *stmt = gsi_stmt (*gsi);
1390 location_t loc = gimple_location (stmt);
1392 const char *p = c_getstr (src);
1394 /* If the string length is zero, return the dst parameter. */
1395 if (p && *p == '\0')
1397 replace_call_with_value (gsi, dst);
1398 return true;
1401 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1402 return false;
1404 /* See if we can store by pieces into (dst + strlen(dst)). */
1405 tree newdst;
1406 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1407 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1409 if (!strlen_fn || !memcpy_fn)
1410 return false;
1412 /* If the length of the source string isn't computable don't
1413 split strcat into strlen and memcpy. */
1414 tree len = get_maxval_strlen (src, 0);
1415 if (! len)
1416 return false;
1418 /* Create strlen (dst). */
1419 gimple_seq stmts = NULL, stmts2;
1420 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1421 gimple_set_location (repl, loc);
1422 if (gimple_in_ssa_p (cfun))
1423 newdst = make_ssa_name (size_type_node);
1424 else
1425 newdst = create_tmp_reg (size_type_node);
1426 gimple_call_set_lhs (repl, newdst);
1427 gimple_seq_add_stmt_without_update (&stmts, repl);
1429 /* Create (dst p+ strlen (dst)). */
1430 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1431 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1432 gimple_seq_add_seq_without_update (&stmts, stmts2);
1434 len = fold_convert_loc (loc, size_type_node, len);
1435 len = size_binop_loc (loc, PLUS_EXPR, len,
1436 build_int_cst (size_type_node, 1));
1437 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1438 gimple_seq_add_seq_without_update (&stmts, stmts2);
1440 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1441 gimple_seq_add_stmt_without_update (&stmts, repl);
1442 if (gimple_call_lhs (stmt))
1444 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1445 gimple_seq_add_stmt_without_update (&stmts, repl);
1446 gsi_replace_with_seq_vops (gsi, stmts);
1447 /* gsi now points at the assignment to the lhs, get a
1448 stmt iterator to the memcpy call.
1449 ??? We can't use gsi_for_stmt as that doesn't work when the
1450 CFG isn't built yet. */
1451 gimple_stmt_iterator gsi2 = *gsi;
1452 gsi_prev (&gsi2);
1453 fold_stmt (&gsi2);
1455 else
1457 gsi_replace_with_seq_vops (gsi, stmts);
1458 fold_stmt (gsi);
1460 return true;
1463 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1464 are the arguments to the call. */
1466 static bool
1467 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1469 gimple *stmt = gsi_stmt (*gsi);
1470 tree dest = gimple_call_arg (stmt, 0);
1471 tree src = gimple_call_arg (stmt, 1);
1472 tree size = gimple_call_arg (stmt, 2);
1473 tree fn;
1474 const char *p;
1477 p = c_getstr (src);
1478 /* If the SRC parameter is "", return DEST. */
1479 if (p && *p == '\0')
1481 replace_call_with_value (gsi, dest);
1482 return true;
1485 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1486 return false;
1488 /* If __builtin_strcat_chk is used, assume strcat is available. */
1489 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1490 if (!fn)
1491 return false;
1493 gimple *repl = gimple_build_call (fn, 2, dest, src);
1494 replace_call_with_call_and_fold (gsi, repl);
1495 return true;
1498 /* Simplify a call to the strncat builtin. */
1500 static bool
1501 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1503 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1504 tree dst = gimple_call_arg (stmt, 0);
1505 tree src = gimple_call_arg (stmt, 1);
1506 tree len = gimple_call_arg (stmt, 2);
1508 const char *p = c_getstr (src);
1510 /* If the requested length is zero, or the src parameter string
1511 length is zero, return the dst parameter. */
1512 if (integer_zerop (len) || (p && *p == '\0'))
1514 replace_call_with_value (gsi, dst);
1515 return true;
1518 /* If the requested len is greater than or equal to the string
1519 length, call strcat. */
1520 if (TREE_CODE (len) == INTEGER_CST && p
1521 && compare_tree_int (len, strlen (p)) >= 0)
1523 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1525 /* If the replacement _DECL isn't initialized, don't do the
1526 transformation. */
1527 if (!fn)
1528 return false;
1530 gcall *repl = gimple_build_call (fn, 2, dst, src);
1531 replace_call_with_call_and_fold (gsi, repl);
1532 return true;
1535 return false;
1538 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1539 LEN, and SIZE. */
1541 static bool
1542 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1544 gimple *stmt = gsi_stmt (*gsi);
1545 tree dest = gimple_call_arg (stmt, 0);
1546 tree src = gimple_call_arg (stmt, 1);
1547 tree len = gimple_call_arg (stmt, 2);
1548 tree size = gimple_call_arg (stmt, 3);
1549 tree fn;
1550 const char *p;
1552 p = c_getstr (src);
1553 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1554 if ((p && *p == '\0')
1555 || integer_zerop (len))
1557 replace_call_with_value (gsi, dest);
1558 return true;
1561 if (! tree_fits_uhwi_p (size))
1562 return false;
1564 if (! integer_all_onesp (size))
1566 tree src_len = c_strlen (src, 1);
1567 if (src_len
1568 && tree_fits_uhwi_p (src_len)
1569 && tree_fits_uhwi_p (len)
1570 && ! tree_int_cst_lt (len, src_len))
1572 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1573 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1574 if (!fn)
1575 return false;
1577 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1578 replace_call_with_call_and_fold (gsi, repl);
1579 return true;
1581 return false;
1584 /* If __builtin_strncat_chk is used, assume strncat is available. */
1585 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1586 if (!fn)
1587 return false;
1589 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1590 replace_call_with_call_and_fold (gsi, repl);
1591 return true;
1594 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1595 to the call. IGNORE is true if the value returned
1596 by the builtin will be ignored. UNLOCKED is true is true if this
1597 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1598 the known length of the string. Return NULL_TREE if no simplification
1599 was possible. */
1601 static bool
1602 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1603 tree arg0, tree arg1,
1604 bool unlocked)
1606 gimple *stmt = gsi_stmt (*gsi);
1608 /* If we're using an unlocked function, assume the other unlocked
1609 functions exist explicitly. */
1610 tree const fn_fputc = (unlocked
1611 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1612 : builtin_decl_implicit (BUILT_IN_FPUTC));
1613 tree const fn_fwrite = (unlocked
1614 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1615 : builtin_decl_implicit (BUILT_IN_FWRITE));
1617 /* If the return value is used, don't do the transformation. */
1618 if (gimple_call_lhs (stmt))
1619 return false;
1621 /* Get the length of the string passed to fputs. If the length
1622 can't be determined, punt. */
1623 tree len = get_maxval_strlen (arg0, 0);
1624 if (!len
1625 || TREE_CODE (len) != INTEGER_CST)
1626 return false;
1628 switch (compare_tree_int (len, 1))
1630 case -1: /* length is 0, delete the call entirely . */
1631 replace_call_with_value (gsi, integer_zero_node);
1632 return true;
1634 case 0: /* length is 1, call fputc. */
1636 const char *p = c_getstr (arg0);
1637 if (p != NULL)
1639 if (!fn_fputc)
1640 return false;
1642 gimple *repl = gimple_build_call (fn_fputc, 2,
1643 build_int_cst
1644 (integer_type_node, p[0]), arg1);
1645 replace_call_with_call_and_fold (gsi, repl);
1646 return true;
1649 /* FALLTHROUGH */
1650 case 1: /* length is greater than 1, call fwrite. */
1652 /* If optimizing for size keep fputs. */
1653 if (optimize_function_for_size_p (cfun))
1654 return false;
1655 /* New argument list transforming fputs(string, stream) to
1656 fwrite(string, 1, len, stream). */
1657 if (!fn_fwrite)
1658 return false;
1660 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1661 size_one_node, len, arg1);
1662 replace_call_with_call_and_fold (gsi, repl);
1663 return true;
1665 default:
1666 gcc_unreachable ();
1668 return false;
1671 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1672 DEST, SRC, LEN, and SIZE are the arguments to the call.
1673 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1674 code of the builtin. If MAXLEN is not NULL, it is maximum length
1675 passed as third argument. */
1677 static bool
1678 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1679 tree dest, tree src, tree len, tree size,
1680 enum built_in_function fcode)
1682 gimple *stmt = gsi_stmt (*gsi);
1683 location_t loc = gimple_location (stmt);
1684 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1685 tree fn;
1687 /* If SRC and DEST are the same (and not volatile), return DEST
1688 (resp. DEST+LEN for __mempcpy_chk). */
1689 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1691 if (fcode != BUILT_IN_MEMPCPY_CHK)
1693 replace_call_with_value (gsi, dest);
1694 return true;
1696 else
1698 gimple_seq stmts = NULL;
1699 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1700 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, dest, len);
1701 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1702 replace_call_with_value (gsi, temp);
1703 return true;
1707 if (! tree_fits_uhwi_p (size))
1708 return false;
1710 tree maxlen = get_maxval_strlen (len, 2);
1711 if (! integer_all_onesp (size))
1713 if (! tree_fits_uhwi_p (len))
1715 /* If LEN is not constant, try MAXLEN too.
1716 For MAXLEN only allow optimizing into non-_ocs function
1717 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1718 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1720 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1722 /* (void) __mempcpy_chk () can be optimized into
1723 (void) __memcpy_chk (). */
1724 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1725 if (!fn)
1726 return false;
1728 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1729 replace_call_with_call_and_fold (gsi, repl);
1730 return true;
1732 return false;
1735 else
1736 maxlen = len;
1738 if (tree_int_cst_lt (size, maxlen))
1739 return false;
1742 fn = NULL_TREE;
1743 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1744 mem{cpy,pcpy,move,set} is available. */
1745 switch (fcode)
1747 case BUILT_IN_MEMCPY_CHK:
1748 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1749 break;
1750 case BUILT_IN_MEMPCPY_CHK:
1751 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1752 break;
1753 case BUILT_IN_MEMMOVE_CHK:
1754 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1755 break;
1756 case BUILT_IN_MEMSET_CHK:
1757 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1758 break;
1759 default:
1760 break;
1763 if (!fn)
1764 return false;
1766 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1767 replace_call_with_call_and_fold (gsi, repl);
1768 return true;
1771 /* Fold a call to the __st[rp]cpy_chk builtin.
1772 DEST, SRC, and SIZE are the arguments to the call.
1773 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1774 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1775 strings passed as second argument. */
1777 static bool
1778 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1779 tree dest,
1780 tree src, tree size,
1781 enum built_in_function fcode)
1783 gimple *stmt = gsi_stmt (*gsi);
1784 location_t loc = gimple_location (stmt);
1785 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1786 tree len, fn;
1788 /* If SRC and DEST are the same (and not volatile), return DEST. */
1789 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1791 replace_call_with_value (gsi, dest);
1792 return true;
1795 if (! tree_fits_uhwi_p (size))
1796 return false;
1798 tree maxlen = get_maxval_strlen (src, 1);
1799 if (! integer_all_onesp (size))
1801 len = c_strlen (src, 1);
1802 if (! len || ! tree_fits_uhwi_p (len))
1804 /* If LEN is not constant, try MAXLEN too.
1805 For MAXLEN only allow optimizing into non-_ocs function
1806 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1807 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1809 if (fcode == BUILT_IN_STPCPY_CHK)
1811 if (! ignore)
1812 return false;
1814 /* If return value of __stpcpy_chk is ignored,
1815 optimize into __strcpy_chk. */
1816 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1817 if (!fn)
1818 return false;
1820 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1821 replace_call_with_call_and_fold (gsi, repl);
1822 return true;
1825 if (! len || TREE_SIDE_EFFECTS (len))
1826 return false;
1828 /* If c_strlen returned something, but not a constant,
1829 transform __strcpy_chk into __memcpy_chk. */
1830 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1831 if (!fn)
1832 return false;
1834 gimple_seq stmts = NULL;
1835 len = gimple_convert (&stmts, loc, size_type_node, len);
1836 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1837 build_int_cst (size_type_node, 1));
1838 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1839 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1840 replace_call_with_call_and_fold (gsi, repl);
1841 return true;
1844 else
1845 maxlen = len;
1847 if (! tree_int_cst_lt (maxlen, size))
1848 return false;
1851 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1852 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1853 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1854 if (!fn)
1855 return false;
1857 gimple *repl = gimple_build_call (fn, 2, dest, src);
1858 replace_call_with_call_and_fold (gsi, repl);
1859 return true;
1862 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1863 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1864 length passed as third argument. IGNORE is true if return value can be
1865 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1867 static bool
1868 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1869 tree dest, tree src,
1870 tree len, tree size,
1871 enum built_in_function fcode)
1873 gimple *stmt = gsi_stmt (*gsi);
1874 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1875 tree fn;
1877 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1879 /* If return value of __stpncpy_chk is ignored,
1880 optimize into __strncpy_chk. */
1881 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1882 if (fn)
1884 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1885 replace_call_with_call_and_fold (gsi, repl);
1886 return true;
1890 if (! tree_fits_uhwi_p (size))
1891 return false;
1893 tree maxlen = get_maxval_strlen (len, 2);
1894 if (! integer_all_onesp (size))
1896 if (! tree_fits_uhwi_p (len))
1898 /* If LEN is not constant, try MAXLEN too.
1899 For MAXLEN only allow optimizing into non-_ocs function
1900 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1901 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1902 return false;
1904 else
1905 maxlen = len;
1907 if (tree_int_cst_lt (size, maxlen))
1908 return false;
1911 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1912 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1913 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1914 if (!fn)
1915 return false;
1917 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1918 replace_call_with_call_and_fold (gsi, repl);
1919 return true;
1922 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1923 Return NULL_TREE if no simplification can be made. */
1925 static bool
1926 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1928 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1929 location_t loc = gimple_location (stmt);
1930 tree dest = gimple_call_arg (stmt, 0);
1931 tree src = gimple_call_arg (stmt, 1);
1932 tree fn, len, lenp1;
1934 /* If the result is unused, replace stpcpy with strcpy. */
1935 if (gimple_call_lhs (stmt) == NULL_TREE)
1937 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1938 if (!fn)
1939 return false;
1940 gimple_call_set_fndecl (stmt, fn);
1941 fold_stmt (gsi);
1942 return true;
1945 len = c_strlen (src, 1);
1946 if (!len
1947 || TREE_CODE (len) != INTEGER_CST)
1948 return false;
1950 if (optimize_function_for_size_p (cfun)
1951 /* If length is zero it's small enough. */
1952 && !integer_zerop (len))
1953 return false;
1955 /* If the source has a known length replace stpcpy with memcpy. */
1956 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1957 if (!fn)
1958 return false;
1960 gimple_seq stmts = NULL;
1961 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1962 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1963 tem, build_int_cst (size_type_node, 1));
1964 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1965 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1966 gimple_set_vuse (repl, gimple_vuse (stmt));
1967 gimple_set_vdef (repl, gimple_vdef (stmt));
1968 if (gimple_vdef (repl)
1969 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1970 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1971 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1972 /* Replace the result with dest + len. */
1973 stmts = NULL;
1974 tem = gimple_convert (&stmts, loc, sizetype, len);
1975 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1976 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1977 POINTER_PLUS_EXPR, dest, tem);
1978 gsi_replace (gsi, ret, false);
1979 /* Finally fold the memcpy call. */
1980 gimple_stmt_iterator gsi2 = *gsi;
1981 gsi_prev (&gsi2);
1982 fold_stmt (&gsi2);
1983 return true;
1986 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1987 NULL_TREE if a normal call should be emitted rather than expanding
1988 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1989 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1990 passed as second argument. */
1992 static bool
1993 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
1994 enum built_in_function fcode)
1996 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1997 tree dest, size, len, fn, fmt, flag;
1998 const char *fmt_str;
2000 /* Verify the required arguments in the original call. */
2001 if (gimple_call_num_args (stmt) < 5)
2002 return false;
2004 dest = gimple_call_arg (stmt, 0);
2005 len = gimple_call_arg (stmt, 1);
2006 flag = gimple_call_arg (stmt, 2);
2007 size = gimple_call_arg (stmt, 3);
2008 fmt = gimple_call_arg (stmt, 4);
2010 if (! tree_fits_uhwi_p (size))
2011 return false;
2013 if (! integer_all_onesp (size))
2015 tree maxlen = get_maxval_strlen (len, 2);
2016 if (! tree_fits_uhwi_p (len))
2018 /* If LEN is not constant, try MAXLEN too.
2019 For MAXLEN only allow optimizing into non-_ocs function
2020 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2021 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2022 return false;
2024 else
2025 maxlen = len;
2027 if (tree_int_cst_lt (size, maxlen))
2028 return false;
2031 if (!init_target_chars ())
2032 return false;
2034 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2035 or if format doesn't contain % chars or is "%s". */
2036 if (! integer_zerop (flag))
2038 fmt_str = c_getstr (fmt);
2039 if (fmt_str == NULL)
2040 return false;
2041 if (strchr (fmt_str, target_percent) != NULL
2042 && strcmp (fmt_str, target_percent_s))
2043 return false;
2046 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2047 available. */
2048 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2049 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2050 if (!fn)
2051 return false;
2053 /* Replace the called function and the first 5 argument by 3 retaining
2054 trailing varargs. */
2055 gimple_call_set_fndecl (stmt, fn);
2056 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2057 gimple_call_set_arg (stmt, 0, dest);
2058 gimple_call_set_arg (stmt, 1, len);
2059 gimple_call_set_arg (stmt, 2, fmt);
2060 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2061 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2062 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2063 fold_stmt (gsi);
2064 return true;
2067 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2068 Return NULL_TREE if a normal call should be emitted rather than
2069 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2070 or BUILT_IN_VSPRINTF_CHK. */
2072 static bool
2073 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2074 enum built_in_function fcode)
2076 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2077 tree dest, size, len, fn, fmt, flag;
2078 const char *fmt_str;
2079 unsigned nargs = gimple_call_num_args (stmt);
2081 /* Verify the required arguments in the original call. */
2082 if (nargs < 4)
2083 return false;
2084 dest = gimple_call_arg (stmt, 0);
2085 flag = gimple_call_arg (stmt, 1);
2086 size = gimple_call_arg (stmt, 2);
2087 fmt = gimple_call_arg (stmt, 3);
2089 if (! tree_fits_uhwi_p (size))
2090 return false;
2092 len = NULL_TREE;
2094 if (!init_target_chars ())
2095 return false;
2097 /* Check whether the format is a literal string constant. */
2098 fmt_str = c_getstr (fmt);
2099 if (fmt_str != NULL)
2101 /* If the format doesn't contain % args or %%, we know the size. */
2102 if (strchr (fmt_str, target_percent) == 0)
2104 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2105 len = build_int_cstu (size_type_node, strlen (fmt_str));
2107 /* If the format is "%s" and first ... argument is a string literal,
2108 we know the size too. */
2109 else if (fcode == BUILT_IN_SPRINTF_CHK
2110 && strcmp (fmt_str, target_percent_s) == 0)
2112 tree arg;
2114 if (nargs == 5)
2116 arg = gimple_call_arg (stmt, 4);
2117 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2119 len = c_strlen (arg, 1);
2120 if (! len || ! tree_fits_uhwi_p (len))
2121 len = NULL_TREE;
2127 if (! integer_all_onesp (size))
2129 if (! len || ! tree_int_cst_lt (len, size))
2130 return false;
2133 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2134 or if format doesn't contain % chars or is "%s". */
2135 if (! integer_zerop (flag))
2137 if (fmt_str == NULL)
2138 return false;
2139 if (strchr (fmt_str, target_percent) != NULL
2140 && strcmp (fmt_str, target_percent_s))
2141 return false;
2144 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2145 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2146 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2147 if (!fn)
2148 return false;
2150 /* Replace the called function and the first 4 argument by 2 retaining
2151 trailing varargs. */
2152 gimple_call_set_fndecl (stmt, fn);
2153 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2154 gimple_call_set_arg (stmt, 0, dest);
2155 gimple_call_set_arg (stmt, 1, fmt);
2156 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2157 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2158 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2159 fold_stmt (gsi);
2160 return true;
2163 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2164 ORIG may be null if this is a 2-argument call. We don't attempt to
2165 simplify calls with more than 3 arguments.
2167 Return NULL_TREE if no simplification was possible, otherwise return the
2168 simplified form of the call as a tree. If IGNORED is true, it means that
2169 the caller does not use the returned value of the function. */
2171 static bool
2172 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2174 gimple *stmt = gsi_stmt (*gsi);
2175 tree dest = gimple_call_arg (stmt, 0);
2176 tree fmt = gimple_call_arg (stmt, 1);
2177 tree orig = NULL_TREE;
2178 const char *fmt_str = NULL;
2180 /* Verify the required arguments in the original call. We deal with two
2181 types of sprintf() calls: 'sprintf (str, fmt)' and
2182 'sprintf (dest, "%s", orig)'. */
2183 if (gimple_call_num_args (stmt) > 3)
2184 return false;
2186 if (gimple_call_num_args (stmt) == 3)
2187 orig = gimple_call_arg (stmt, 2);
2189 /* Check whether the format is a literal string constant. */
2190 fmt_str = c_getstr (fmt);
2191 if (fmt_str == NULL)
2192 return false;
2194 if (!init_target_chars ())
2195 return false;
2197 /* If the format doesn't contain % args or %%, use strcpy. */
2198 if (strchr (fmt_str, target_percent) == NULL)
2200 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2202 if (!fn)
2203 return false;
2205 /* Don't optimize sprintf (buf, "abc", ptr++). */
2206 if (orig)
2207 return false;
2209 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2210 'format' is known to contain no % formats. */
2211 gimple_seq stmts = NULL;
2212 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2213 gimple_seq_add_stmt_without_update (&stmts, repl);
2214 if (gimple_call_lhs (stmt))
2216 repl = gimple_build_assign (gimple_call_lhs (stmt),
2217 build_int_cst (integer_type_node,
2218 strlen (fmt_str)));
2219 gimple_seq_add_stmt_without_update (&stmts, repl);
2220 gsi_replace_with_seq_vops (gsi, stmts);
2221 /* gsi now points at the assignment to the lhs, get a
2222 stmt iterator to the memcpy call.
2223 ??? We can't use gsi_for_stmt as that doesn't work when the
2224 CFG isn't built yet. */
2225 gimple_stmt_iterator gsi2 = *gsi;
2226 gsi_prev (&gsi2);
2227 fold_stmt (&gsi2);
2229 else
2231 gsi_replace_with_seq_vops (gsi, stmts);
2232 fold_stmt (gsi);
2234 return true;
2237 /* If the format is "%s", use strcpy if the result isn't used. */
2238 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2240 tree fn;
2241 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2243 if (!fn)
2244 return false;
2246 /* Don't crash on sprintf (str1, "%s"). */
2247 if (!orig)
2248 return false;
2250 tree orig_len = NULL_TREE;
2251 if (gimple_call_lhs (stmt))
2253 orig_len = get_maxval_strlen (orig, 0);
2254 if (!orig_len)
2255 return false;
2258 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2259 gimple_seq stmts = NULL;
2260 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2261 gimple_seq_add_stmt_without_update (&stmts, repl);
2262 if (gimple_call_lhs (stmt))
2264 if (!useless_type_conversion_p (integer_type_node,
2265 TREE_TYPE (orig_len)))
2266 orig_len = fold_convert (integer_type_node, orig_len);
2267 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2268 gimple_seq_add_stmt_without_update (&stmts, repl);
2269 gsi_replace_with_seq_vops (gsi, stmts);
2270 /* gsi now points at the assignment to the lhs, get a
2271 stmt iterator to the memcpy call.
2272 ??? We can't use gsi_for_stmt as that doesn't work when the
2273 CFG isn't built yet. */
2274 gimple_stmt_iterator gsi2 = *gsi;
2275 gsi_prev (&gsi2);
2276 fold_stmt (&gsi2);
2278 else
2280 gsi_replace_with_seq_vops (gsi, stmts);
2281 fold_stmt (gsi);
2283 return true;
2285 return false;
2288 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2289 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2290 attempt to simplify calls with more than 4 arguments.
2292 Return NULL_TREE if no simplification was possible, otherwise return the
2293 simplified form of the call as a tree. If IGNORED is true, it means that
2294 the caller does not use the returned value of the function. */
2296 static bool
2297 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2299 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2300 tree dest = gimple_call_arg (stmt, 0);
2301 tree destsize = gimple_call_arg (stmt, 1);
2302 tree fmt = gimple_call_arg (stmt, 2);
2303 tree orig = NULL_TREE;
2304 const char *fmt_str = NULL;
2306 if (gimple_call_num_args (stmt) > 4)
2307 return false;
2309 if (gimple_call_num_args (stmt) == 4)
2310 orig = gimple_call_arg (stmt, 3);
2312 if (!tree_fits_uhwi_p (destsize))
2313 return false;
2314 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2316 /* Check whether the format is a literal string constant. */
2317 fmt_str = c_getstr (fmt);
2318 if (fmt_str == NULL)
2319 return false;
2321 if (!init_target_chars ())
2322 return false;
2324 /* If the format doesn't contain % args or %%, use strcpy. */
2325 if (strchr (fmt_str, target_percent) == NULL)
2327 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2328 if (!fn)
2329 return false;
2331 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2332 if (orig)
2333 return false;
2335 /* We could expand this as
2336 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2337 or to
2338 memcpy (str, fmt_with_nul_at_cstm1, cst);
2339 but in the former case that might increase code size
2340 and in the latter case grow .rodata section too much.
2341 So punt for now. */
2342 size_t len = strlen (fmt_str);
2343 if (len >= destlen)
2344 return false;
2346 gimple_seq stmts = NULL;
2347 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2348 gimple_seq_add_stmt_without_update (&stmts, repl);
2349 if (gimple_call_lhs (stmt))
2351 repl = gimple_build_assign (gimple_call_lhs (stmt),
2352 build_int_cst (integer_type_node, len));
2353 gimple_seq_add_stmt_without_update (&stmts, repl);
2354 gsi_replace_with_seq_vops (gsi, stmts);
2355 /* gsi now points at the assignment to the lhs, get a
2356 stmt iterator to the memcpy call.
2357 ??? We can't use gsi_for_stmt as that doesn't work when the
2358 CFG isn't built yet. */
2359 gimple_stmt_iterator gsi2 = *gsi;
2360 gsi_prev (&gsi2);
2361 fold_stmt (&gsi2);
2363 else
2365 gsi_replace_with_seq_vops (gsi, stmts);
2366 fold_stmt (gsi);
2368 return true;
2371 /* If the format is "%s", use strcpy if the result isn't used. */
2372 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2374 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2375 if (!fn)
2376 return false;
2378 /* Don't crash on snprintf (str1, cst, "%s"). */
2379 if (!orig)
2380 return false;
2382 tree orig_len = get_maxval_strlen (orig, 0);
2383 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2384 return false;
2386 /* We could expand this as
2387 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2388 or to
2389 memcpy (str1, str2_with_nul_at_cstm1, cst);
2390 but in the former case that might increase code size
2391 and in the latter case grow .rodata section too much.
2392 So punt for now. */
2393 if (compare_tree_int (orig_len, destlen) >= 0)
2394 return false;
2396 /* Convert snprintf (str1, cst, "%s", str2) into
2397 strcpy (str1, str2) if strlen (str2) < cst. */
2398 gimple_seq stmts = NULL;
2399 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2400 gimple_seq_add_stmt_without_update (&stmts, repl);
2401 if (gimple_call_lhs (stmt))
2403 if (!useless_type_conversion_p (integer_type_node,
2404 TREE_TYPE (orig_len)))
2405 orig_len = fold_convert (integer_type_node, orig_len);
2406 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2407 gimple_seq_add_stmt_without_update (&stmts, repl);
2408 gsi_replace_with_seq_vops (gsi, stmts);
2409 /* gsi now points at the assignment to the lhs, get a
2410 stmt iterator to the memcpy call.
2411 ??? We can't use gsi_for_stmt as that doesn't work when the
2412 CFG isn't built yet. */
2413 gimple_stmt_iterator gsi2 = *gsi;
2414 gsi_prev (&gsi2);
2415 fold_stmt (&gsi2);
2417 else
2419 gsi_replace_with_seq_vops (gsi, stmts);
2420 fold_stmt (gsi);
2422 return true;
2424 return false;
2427 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2428 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2429 more than 3 arguments, and ARG may be null in the 2-argument case.
2431 Return NULL_TREE if no simplification was possible, otherwise return the
2432 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2433 code of the function to be simplified. */
2435 static bool
2436 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2437 tree fp, tree fmt, tree arg,
2438 enum built_in_function fcode)
2440 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2441 tree fn_fputc, fn_fputs;
2442 const char *fmt_str = NULL;
2444 /* If the return value is used, don't do the transformation. */
2445 if (gimple_call_lhs (stmt) != NULL_TREE)
2446 return false;
2448 /* Check whether the format is a literal string constant. */
2449 fmt_str = c_getstr (fmt);
2450 if (fmt_str == NULL)
2451 return false;
2453 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2455 /* If we're using an unlocked function, assume the other
2456 unlocked functions exist explicitly. */
2457 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2458 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2460 else
2462 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2463 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2466 if (!init_target_chars ())
2467 return false;
2469 /* If the format doesn't contain % args or %%, use strcpy. */
2470 if (strchr (fmt_str, target_percent) == NULL)
2472 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2473 && arg)
2474 return false;
2476 /* If the format specifier was "", fprintf does nothing. */
2477 if (fmt_str[0] == '\0')
2479 replace_call_with_value (gsi, NULL_TREE);
2480 return true;
2483 /* When "string" doesn't contain %, replace all cases of
2484 fprintf (fp, string) with fputs (string, fp). The fputs
2485 builtin will take care of special cases like length == 1. */
2486 if (fn_fputs)
2488 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2489 replace_call_with_call_and_fold (gsi, repl);
2490 return true;
2494 /* The other optimizations can be done only on the non-va_list variants. */
2495 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2496 return false;
2498 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2499 else if (strcmp (fmt_str, target_percent_s) == 0)
2501 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2502 return false;
2503 if (fn_fputs)
2505 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2506 replace_call_with_call_and_fold (gsi, repl);
2507 return true;
2511 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2512 else if (strcmp (fmt_str, target_percent_c) == 0)
2514 if (!arg
2515 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2516 return false;
2517 if (fn_fputc)
2519 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2520 replace_call_with_call_and_fold (gsi, repl);
2521 return true;
2525 return false;
2528 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2529 FMT and ARG are the arguments to the call; we don't fold cases with
2530 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2532 Return NULL_TREE if no simplification was possible, otherwise return the
2533 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2534 code of the function to be simplified. */
2536 static bool
2537 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2538 tree arg, enum built_in_function fcode)
2540 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2541 tree fn_putchar, fn_puts, newarg;
2542 const char *fmt_str = NULL;
2544 /* If the return value is used, don't do the transformation. */
2545 if (gimple_call_lhs (stmt) != NULL_TREE)
2546 return false;
2548 /* Check whether the format is a literal string constant. */
2549 fmt_str = c_getstr (fmt);
2550 if (fmt_str == NULL)
2551 return false;
2553 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2555 /* If we're using an unlocked function, assume the other
2556 unlocked functions exist explicitly. */
2557 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2558 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2560 else
2562 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2563 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2566 if (!init_target_chars ())
2567 return false;
2569 if (strcmp (fmt_str, target_percent_s) == 0
2570 || strchr (fmt_str, target_percent) == NULL)
2572 const char *str;
2574 if (strcmp (fmt_str, target_percent_s) == 0)
2576 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2577 return false;
2579 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2580 return false;
2582 str = c_getstr (arg);
2583 if (str == NULL)
2584 return false;
2586 else
2588 /* The format specifier doesn't contain any '%' characters. */
2589 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2590 && arg)
2591 return false;
2592 str = fmt_str;
2595 /* If the string was "", printf does nothing. */
2596 if (str[0] == '\0')
2598 replace_call_with_value (gsi, NULL_TREE);
2599 return true;
2602 /* If the string has length of 1, call putchar. */
2603 if (str[1] == '\0')
2605 /* Given printf("c"), (where c is any one character,)
2606 convert "c"[0] to an int and pass that to the replacement
2607 function. */
2608 newarg = build_int_cst (integer_type_node, str[0]);
2609 if (fn_putchar)
2611 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2612 replace_call_with_call_and_fold (gsi, repl);
2613 return true;
2616 else
2618 /* If the string was "string\n", call puts("string"). */
2619 size_t len = strlen (str);
2620 if ((unsigned char)str[len - 1] == target_newline
2621 && (size_t) (int) len == len
2622 && (int) len > 0)
2624 char *newstr;
2625 tree offset_node, string_cst;
2627 /* Create a NUL-terminated string that's one char shorter
2628 than the original, stripping off the trailing '\n'. */
2629 newarg = build_string_literal (len, str);
2630 string_cst = string_constant (newarg, &offset_node);
2631 gcc_checking_assert (string_cst
2632 && (TREE_STRING_LENGTH (string_cst)
2633 == (int) len)
2634 && integer_zerop (offset_node)
2635 && (unsigned char)
2636 TREE_STRING_POINTER (string_cst)[len - 1]
2637 == target_newline);
2638 /* build_string_literal creates a new STRING_CST,
2639 modify it in place to avoid double copying. */
2640 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2641 newstr[len - 1] = '\0';
2642 if (fn_puts)
2644 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2645 replace_call_with_call_and_fold (gsi, repl);
2646 return true;
2649 else
2650 /* We'd like to arrange to call fputs(string,stdout) here,
2651 but we need stdout and don't have a way to get it yet. */
2652 return false;
2656 /* The other optimizations can be done only on the non-va_list variants. */
2657 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2658 return false;
2660 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2661 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2663 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2664 return false;
2665 if (fn_puts)
2667 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2668 replace_call_with_call_and_fold (gsi, repl);
2669 return true;
2673 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2674 else if (strcmp (fmt_str, target_percent_c) == 0)
2676 if (!arg || ! useless_type_conversion_p (integer_type_node,
2677 TREE_TYPE (arg)))
2678 return false;
2679 if (fn_putchar)
2681 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2682 replace_call_with_call_and_fold (gsi, repl);
2683 return true;
2687 return false;
2692 /* Fold a call to __builtin_strlen with known length LEN. */
2694 static bool
2695 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2697 gimple *stmt = gsi_stmt (*gsi);
2698 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2699 if (!len)
2700 return false;
2701 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2702 replace_call_with_value (gsi, len);
2703 return true;
2706 /* Fold a call to __builtin_acc_on_device. */
2708 static bool
2709 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2711 /* Defer folding until we know which compiler we're in. */
2712 if (symtab->state != EXPANSION)
2713 return false;
2715 unsigned val_host = GOMP_DEVICE_HOST;
2716 unsigned val_dev = GOMP_DEVICE_NONE;
2718 #ifdef ACCEL_COMPILER
2719 val_host = GOMP_DEVICE_NOT_HOST;
2720 val_dev = ACCEL_COMPILER_acc_device;
2721 #endif
2723 location_t loc = gimple_location (gsi_stmt (*gsi));
2725 tree host_eq = make_ssa_name (boolean_type_node);
2726 gimple *host_ass = gimple_build_assign
2727 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2728 gimple_set_location (host_ass, loc);
2729 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2731 tree dev_eq = make_ssa_name (boolean_type_node);
2732 gimple *dev_ass = gimple_build_assign
2733 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2734 gimple_set_location (dev_ass, loc);
2735 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2737 tree result = make_ssa_name (boolean_type_node);
2738 gimple *result_ass = gimple_build_assign
2739 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2740 gimple_set_location (result_ass, loc);
2741 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2743 replace_call_with_value (gsi, result);
2745 return true;
2748 /* Fold the non-target builtin at *GSI and return whether any simplification
2749 was made. */
2751 static bool
2752 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2754 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2755 tree callee = gimple_call_fndecl (stmt);
2757 /* Give up for always_inline inline builtins until they are
2758 inlined. */
2759 if (avoid_folding_inline_builtin (callee))
2760 return false;
2762 unsigned n = gimple_call_num_args (stmt);
2763 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2764 switch (fcode)
2766 case BUILT_IN_BZERO:
2767 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2768 gimple_call_arg (stmt, 1));
2769 case BUILT_IN_MEMSET:
2770 return gimple_fold_builtin_memset (gsi,
2771 gimple_call_arg (stmt, 1),
2772 gimple_call_arg (stmt, 2));
2773 case BUILT_IN_BCOPY:
2774 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2775 gimple_call_arg (stmt, 0), 3);
2776 case BUILT_IN_MEMCPY:
2777 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2778 gimple_call_arg (stmt, 1), 0);
2779 case BUILT_IN_MEMPCPY:
2780 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2781 gimple_call_arg (stmt, 1), 1);
2782 case BUILT_IN_MEMMOVE:
2783 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2784 gimple_call_arg (stmt, 1), 3);
2785 case BUILT_IN_SPRINTF_CHK:
2786 case BUILT_IN_VSPRINTF_CHK:
2787 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2788 case BUILT_IN_STRCAT_CHK:
2789 return gimple_fold_builtin_strcat_chk (gsi);
2790 case BUILT_IN_STRNCAT_CHK:
2791 return gimple_fold_builtin_strncat_chk (gsi);
2792 case BUILT_IN_STRLEN:
2793 return gimple_fold_builtin_strlen (gsi);
2794 case BUILT_IN_STRCPY:
2795 return gimple_fold_builtin_strcpy (gsi,
2796 gimple_call_arg (stmt, 0),
2797 gimple_call_arg (stmt, 1));
2798 case BUILT_IN_STRNCPY:
2799 return gimple_fold_builtin_strncpy (gsi,
2800 gimple_call_arg (stmt, 0),
2801 gimple_call_arg (stmt, 1),
2802 gimple_call_arg (stmt, 2));
2803 case BUILT_IN_STRCAT:
2804 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2805 gimple_call_arg (stmt, 1));
2806 case BUILT_IN_STRNCAT:
2807 return gimple_fold_builtin_strncat (gsi);
2808 case BUILT_IN_FPUTS:
2809 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2810 gimple_call_arg (stmt, 1), false);
2811 case BUILT_IN_FPUTS_UNLOCKED:
2812 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2813 gimple_call_arg (stmt, 1), true);
2814 case BUILT_IN_MEMCPY_CHK:
2815 case BUILT_IN_MEMPCPY_CHK:
2816 case BUILT_IN_MEMMOVE_CHK:
2817 case BUILT_IN_MEMSET_CHK:
2818 return gimple_fold_builtin_memory_chk (gsi,
2819 gimple_call_arg (stmt, 0),
2820 gimple_call_arg (stmt, 1),
2821 gimple_call_arg (stmt, 2),
2822 gimple_call_arg (stmt, 3),
2823 fcode);
2824 case BUILT_IN_STPCPY:
2825 return gimple_fold_builtin_stpcpy (gsi);
2826 case BUILT_IN_STRCPY_CHK:
2827 case BUILT_IN_STPCPY_CHK:
2828 return gimple_fold_builtin_stxcpy_chk (gsi,
2829 gimple_call_arg (stmt, 0),
2830 gimple_call_arg (stmt, 1),
2831 gimple_call_arg (stmt, 2),
2832 fcode);
2833 case BUILT_IN_STRNCPY_CHK:
2834 case BUILT_IN_STPNCPY_CHK:
2835 return gimple_fold_builtin_stxncpy_chk (gsi,
2836 gimple_call_arg (stmt, 0),
2837 gimple_call_arg (stmt, 1),
2838 gimple_call_arg (stmt, 2),
2839 gimple_call_arg (stmt, 3),
2840 fcode);
2841 case BUILT_IN_SNPRINTF_CHK:
2842 case BUILT_IN_VSNPRINTF_CHK:
2843 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2844 case BUILT_IN_SNPRINTF:
2845 return gimple_fold_builtin_snprintf (gsi);
2846 case BUILT_IN_SPRINTF:
2847 return gimple_fold_builtin_sprintf (gsi);
2848 case BUILT_IN_FPRINTF:
2849 case BUILT_IN_FPRINTF_UNLOCKED:
2850 case BUILT_IN_VFPRINTF:
2851 if (n == 2 || n == 3)
2852 return gimple_fold_builtin_fprintf (gsi,
2853 gimple_call_arg (stmt, 0),
2854 gimple_call_arg (stmt, 1),
2855 n == 3
2856 ? gimple_call_arg (stmt, 2)
2857 : NULL_TREE,
2858 fcode);
2859 break;
2860 case BUILT_IN_FPRINTF_CHK:
2861 case BUILT_IN_VFPRINTF_CHK:
2862 if (n == 3 || n == 4)
2863 return gimple_fold_builtin_fprintf (gsi,
2864 gimple_call_arg (stmt, 0),
2865 gimple_call_arg (stmt, 2),
2866 n == 4
2867 ? gimple_call_arg (stmt, 3)
2868 : NULL_TREE,
2869 fcode);
2870 break;
2871 case BUILT_IN_PRINTF:
2872 case BUILT_IN_PRINTF_UNLOCKED:
2873 case BUILT_IN_VPRINTF:
2874 if (n == 1 || n == 2)
2875 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2876 n == 2
2877 ? gimple_call_arg (stmt, 1)
2878 : NULL_TREE, fcode);
2879 break;
2880 case BUILT_IN_PRINTF_CHK:
2881 case BUILT_IN_VPRINTF_CHK:
2882 if (n == 2 || n == 3)
2883 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2884 n == 3
2885 ? gimple_call_arg (stmt, 2)
2886 : NULL_TREE, fcode);
2887 break;
2888 case BUILT_IN_ACC_ON_DEVICE:
2889 return gimple_fold_builtin_acc_on_device (gsi,
2890 gimple_call_arg (stmt, 0));
2891 default:;
2894 /* Try the generic builtin folder. */
2895 bool ignore = (gimple_call_lhs (stmt) == NULL);
2896 tree result = fold_call_stmt (stmt, ignore);
2897 if (result)
2899 if (ignore)
2900 STRIP_NOPS (result);
2901 else
2902 result = fold_convert (gimple_call_return_type (stmt), result);
2903 if (!update_call_from_tree (gsi, result))
2904 gimplify_and_update_call_from_tree (gsi, result);
2905 return true;
2908 return false;
2911 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2912 function calls to constants, where possible. */
2914 static tree
2915 fold_internal_goacc_dim (const gimple *call)
2917 int axis = get_oacc_ifn_dim_arg (call);
2918 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2919 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2920 tree result = NULL_TREE;
2922 /* If the size is 1, or we only want the size and it is not dynamic,
2923 we know the answer. */
2924 if (size == 1 || (!is_pos && size))
2926 tree type = TREE_TYPE (gimple_call_lhs (call));
2927 result = build_int_cst (type, size - is_pos);
2930 return result;
2933 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2934 doesn't fit into TYPE. The test for overflow should be regardless of
2935 -fwrapv, and even for unsigned types. */
2937 bool
2938 arith_overflowed_p (enum tree_code code, const_tree type,
2939 const_tree arg0, const_tree arg1)
2941 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2942 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2943 widest2_int_cst;
2944 widest2_int warg0 = widest2_int_cst (arg0);
2945 widest2_int warg1 = widest2_int_cst (arg1);
2946 widest2_int wres;
2947 switch (code)
2949 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2950 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2951 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2952 default: gcc_unreachable ();
2954 signop sign = TYPE_SIGN (type);
2955 if (sign == UNSIGNED && wi::neg_p (wres))
2956 return true;
2957 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2960 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2961 The statement may be replaced by another statement, e.g., if the call
2962 simplifies to a constant value. Return true if any changes were made.
2963 It is assumed that the operands have been previously folded. */
2965 static bool
2966 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2968 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2969 tree callee;
2970 bool changed = false;
2971 unsigned i;
2973 /* Fold *& in call arguments. */
2974 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2975 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2977 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2978 if (tmp)
2980 gimple_call_set_arg (stmt, i, tmp);
2981 changed = true;
2985 /* Check for virtual calls that became direct calls. */
2986 callee = gimple_call_fn (stmt);
2987 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2989 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2991 if (dump_file && virtual_method_call_p (callee)
2992 && !possible_polymorphic_call_target_p
2993 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2994 (OBJ_TYPE_REF_EXPR (callee)))))
2996 fprintf (dump_file,
2997 "Type inheritance inconsistent devirtualization of ");
2998 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2999 fprintf (dump_file, " to ");
3000 print_generic_expr (dump_file, callee, TDF_SLIM);
3001 fprintf (dump_file, "\n");
3004 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3005 changed = true;
3007 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3009 bool final;
3010 vec <cgraph_node *>targets
3011 = possible_polymorphic_call_targets (callee, stmt, &final);
3012 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3014 tree lhs = gimple_call_lhs (stmt);
3015 if (dump_enabled_p ())
3017 location_t loc = gimple_location_safe (stmt);
3018 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3019 "folding virtual function call to %s\n",
3020 targets.length () == 1
3021 ? targets[0]->name ()
3022 : "__builtin_unreachable");
3024 if (targets.length () == 1)
3026 gimple_call_set_fndecl (stmt, targets[0]->decl);
3027 changed = true;
3028 /* If the call becomes noreturn, remove the lhs. */
3029 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3031 if (TREE_CODE (lhs) == SSA_NAME)
3033 tree var = create_tmp_var (TREE_TYPE (lhs));
3034 tree def = get_or_create_ssa_default_def (cfun, var);
3035 gimple *new_stmt = gimple_build_assign (lhs, def);
3036 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3038 gimple_call_set_lhs (stmt, NULL_TREE);
3040 maybe_remove_unused_call_args (cfun, stmt);
3042 else
3044 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3045 gimple *new_stmt = gimple_build_call (fndecl, 0);
3046 gimple_set_location (new_stmt, gimple_location (stmt));
3047 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3049 tree var = create_tmp_var (TREE_TYPE (lhs));
3050 tree def = get_or_create_ssa_default_def (cfun, var);
3052 /* To satisfy condition for
3053 cgraph_update_edges_for_call_stmt_node,
3054 we need to preserve GIMPLE_CALL statement
3055 at position of GSI iterator. */
3056 update_call_from_tree (gsi, def);
3057 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3059 else
3061 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3062 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3063 gsi_replace (gsi, new_stmt, false);
3065 return true;
3071 /* Check for indirect calls that became direct calls, and then
3072 no longer require a static chain. */
3073 if (gimple_call_chain (stmt))
3075 tree fn = gimple_call_fndecl (stmt);
3076 if (fn && !DECL_STATIC_CHAIN (fn))
3078 gimple_call_set_chain (stmt, NULL);
3079 changed = true;
3081 else
3083 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3084 if (tmp)
3086 gimple_call_set_chain (stmt, tmp);
3087 changed = true;
3092 if (inplace)
3093 return changed;
3095 /* Check for builtins that CCP can handle using information not
3096 available in the generic fold routines. */
3097 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3099 if (gimple_fold_builtin (gsi))
3100 changed = true;
3102 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3104 changed |= targetm.gimple_fold_builtin (gsi);
3106 else if (gimple_call_internal_p (stmt))
3108 enum tree_code subcode = ERROR_MARK;
3109 tree result = NULL_TREE;
3110 bool cplx_result = false;
3111 tree overflow = NULL_TREE;
3112 switch (gimple_call_internal_fn (stmt))
3114 case IFN_BUILTIN_EXPECT:
3115 result = fold_builtin_expect (gimple_location (stmt),
3116 gimple_call_arg (stmt, 0),
3117 gimple_call_arg (stmt, 1),
3118 gimple_call_arg (stmt, 2));
3119 break;
3120 case IFN_UBSAN_OBJECT_SIZE:
3121 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3122 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3123 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3124 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3125 gimple_call_arg (stmt, 2))))
3127 gsi_replace (gsi, gimple_build_nop (), false);
3128 unlink_stmt_vdef (stmt);
3129 release_defs (stmt);
3130 return true;
3132 break;
3133 case IFN_GOACC_DIM_SIZE:
3134 case IFN_GOACC_DIM_POS:
3135 result = fold_internal_goacc_dim (stmt);
3136 break;
3137 case IFN_UBSAN_CHECK_ADD:
3138 subcode = PLUS_EXPR;
3139 break;
3140 case IFN_UBSAN_CHECK_SUB:
3141 subcode = MINUS_EXPR;
3142 break;
3143 case IFN_UBSAN_CHECK_MUL:
3144 subcode = MULT_EXPR;
3145 break;
3146 case IFN_ADD_OVERFLOW:
3147 subcode = PLUS_EXPR;
3148 cplx_result = true;
3149 break;
3150 case IFN_SUB_OVERFLOW:
3151 subcode = MINUS_EXPR;
3152 cplx_result = true;
3153 break;
3154 case IFN_MUL_OVERFLOW:
3155 subcode = MULT_EXPR;
3156 cplx_result = true;
3157 break;
3158 default:
3159 break;
3161 if (subcode != ERROR_MARK)
3163 tree arg0 = gimple_call_arg (stmt, 0);
3164 tree arg1 = gimple_call_arg (stmt, 1);
3165 tree type = TREE_TYPE (arg0);
3166 if (cplx_result)
3168 tree lhs = gimple_call_lhs (stmt);
3169 if (lhs == NULL_TREE)
3170 type = NULL_TREE;
3171 else
3172 type = TREE_TYPE (TREE_TYPE (lhs));
3174 if (type == NULL_TREE)
3176 /* x = y + 0; x = y - 0; x = y * 0; */
3177 else if (integer_zerop (arg1))
3178 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3179 /* x = 0 + y; x = 0 * y; */
3180 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3181 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3182 /* x = y - y; */
3183 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3184 result = integer_zero_node;
3185 /* x = y * 1; x = 1 * y; */
3186 else if (subcode == MULT_EXPR && integer_onep (arg1))
3187 result = arg0;
3188 else if (subcode == MULT_EXPR && integer_onep (arg0))
3189 result = arg1;
3190 else if (TREE_CODE (arg0) == INTEGER_CST
3191 && TREE_CODE (arg1) == INTEGER_CST)
3193 if (cplx_result)
3194 result = int_const_binop (subcode, fold_convert (type, arg0),
3195 fold_convert (type, arg1));
3196 else
3197 result = int_const_binop (subcode, arg0, arg1);
3198 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3200 if (cplx_result)
3201 overflow = build_one_cst (type);
3202 else
3203 result = NULL_TREE;
3206 if (result)
3208 if (result == integer_zero_node)
3209 result = build_zero_cst (type);
3210 else if (cplx_result && TREE_TYPE (result) != type)
3212 if (TREE_CODE (result) == INTEGER_CST)
3214 if (arith_overflowed_p (PLUS_EXPR, type, result,
3215 integer_zero_node))
3216 overflow = build_one_cst (type);
3218 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3219 && TYPE_UNSIGNED (type))
3220 || (TYPE_PRECISION (type)
3221 < (TYPE_PRECISION (TREE_TYPE (result))
3222 + (TYPE_UNSIGNED (TREE_TYPE (result))
3223 && !TYPE_UNSIGNED (type)))))
3224 result = NULL_TREE;
3225 if (result)
3226 result = fold_convert (type, result);
3231 if (result)
3233 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3234 result = drop_tree_overflow (result);
3235 if (cplx_result)
3237 if (overflow == NULL_TREE)
3238 overflow = build_zero_cst (TREE_TYPE (result));
3239 tree ctype = build_complex_type (TREE_TYPE (result));
3240 if (TREE_CODE (result) == INTEGER_CST
3241 && TREE_CODE (overflow) == INTEGER_CST)
3242 result = build_complex (ctype, result, overflow);
3243 else
3244 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3245 ctype, result, overflow);
3247 if (!update_call_from_tree (gsi, result))
3248 gimplify_and_update_call_from_tree (gsi, result);
3249 changed = true;
3253 return changed;
3257 /* Return true whether NAME has a use on STMT. */
3259 static bool
3260 has_use_on_stmt (tree name, gimple *stmt)
3262 imm_use_iterator iter;
3263 use_operand_p use_p;
3264 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3265 if (USE_STMT (use_p) == stmt)
3266 return true;
3267 return false;
3270 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3271 gimple_simplify.
3273 Replaces *GSI with the simplification result in RCODE and OPS
3274 and the associated statements in *SEQ. Does the replacement
3275 according to INPLACE and returns true if the operation succeeded. */
3277 static bool
3278 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3279 code_helper rcode, tree *ops,
3280 gimple_seq *seq, bool inplace)
3282 gimple *stmt = gsi_stmt (*gsi);
3284 /* Play safe and do not allow abnormals to be mentioned in
3285 newly created statements. See also maybe_push_res_to_seq.
3286 As an exception allow such uses if there was a use of the
3287 same SSA name on the old stmt. */
3288 if ((TREE_CODE (ops[0]) == SSA_NAME
3289 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3290 && !has_use_on_stmt (ops[0], stmt))
3291 || (ops[1]
3292 && TREE_CODE (ops[1]) == SSA_NAME
3293 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3294 && !has_use_on_stmt (ops[1], stmt))
3295 || (ops[2]
3296 && TREE_CODE (ops[2]) == SSA_NAME
3297 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3298 && !has_use_on_stmt (ops[2], stmt)))
3299 return false;
3301 /* Don't insert new statements when INPLACE is true, even if we could
3302 reuse STMT for the final statement. */
3303 if (inplace && !gimple_seq_empty_p (*seq))
3304 return false;
3306 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3308 gcc_assert (rcode.is_tree_code ());
3309 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3310 /* GIMPLE_CONDs condition may not throw. */
3311 && (!flag_exceptions
3312 || !cfun->can_throw_non_call_exceptions
3313 || !operation_could_trap_p (rcode,
3314 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3315 false, NULL_TREE)))
3316 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3317 else if (rcode == SSA_NAME)
3318 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3319 build_zero_cst (TREE_TYPE (ops[0])));
3320 else if (rcode == INTEGER_CST)
3322 if (integer_zerop (ops[0]))
3323 gimple_cond_make_false (cond_stmt);
3324 else
3325 gimple_cond_make_true (cond_stmt);
3327 else if (!inplace)
3329 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3330 ops, seq);
3331 if (!res)
3332 return false;
3333 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3334 build_zero_cst (TREE_TYPE (res)));
3336 else
3337 return false;
3338 if (dump_file && (dump_flags & TDF_DETAILS))
3340 fprintf (dump_file, "gimple_simplified to ");
3341 if (!gimple_seq_empty_p (*seq))
3342 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3343 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3344 0, TDF_SLIM);
3346 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3347 return true;
3349 else if (is_gimple_assign (stmt)
3350 && rcode.is_tree_code ())
3352 if (!inplace
3353 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3355 maybe_build_generic_op (rcode,
3356 TREE_TYPE (gimple_assign_lhs (stmt)),
3357 &ops[0], ops[1], ops[2]);
3358 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3359 if (dump_file && (dump_flags & TDF_DETAILS))
3361 fprintf (dump_file, "gimple_simplified to ");
3362 if (!gimple_seq_empty_p (*seq))
3363 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3364 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3365 0, TDF_SLIM);
3367 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3368 return true;
3371 else if (rcode.is_fn_code ()
3372 && gimple_call_combined_fn (stmt) == rcode)
3374 unsigned i;
3375 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3377 gcc_assert (ops[i] != NULL_TREE);
3378 gimple_call_set_arg (stmt, i, ops[i]);
3380 if (i < 3)
3381 gcc_assert (ops[i] == NULL_TREE);
3382 if (dump_file && (dump_flags & TDF_DETAILS))
3384 fprintf (dump_file, "gimple_simplified to ");
3385 if (!gimple_seq_empty_p (*seq))
3386 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3387 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3389 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3390 return true;
3392 else if (!inplace)
3394 if (gimple_has_lhs (stmt))
3396 tree lhs = gimple_get_lhs (stmt);
3397 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3398 ops, seq, lhs))
3399 return false;
3400 if (dump_file && (dump_flags & TDF_DETAILS))
3402 fprintf (dump_file, "gimple_simplified to ");
3403 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3405 gsi_replace_with_seq_vops (gsi, *seq);
3406 return true;
3408 else
3409 gcc_unreachable ();
3412 return false;
3415 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3417 static bool
3418 maybe_canonicalize_mem_ref_addr (tree *t)
3420 bool res = false;
3422 if (TREE_CODE (*t) == ADDR_EXPR)
3423 t = &TREE_OPERAND (*t, 0);
3425 while (handled_component_p (*t))
3426 t = &TREE_OPERAND (*t, 0);
3428 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3429 of invariant addresses into a SSA name MEM_REF address. */
3430 if (TREE_CODE (*t) == MEM_REF
3431 || TREE_CODE (*t) == TARGET_MEM_REF)
3433 tree addr = TREE_OPERAND (*t, 0);
3434 if (TREE_CODE (addr) == ADDR_EXPR
3435 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3436 || handled_component_p (TREE_OPERAND (addr, 0))))
3438 tree base;
3439 HOST_WIDE_INT coffset;
3440 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3441 &coffset);
3442 if (!base)
3443 gcc_unreachable ();
3445 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3446 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3447 TREE_OPERAND (*t, 1),
3448 size_int (coffset));
3449 res = true;
3451 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3452 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3455 /* Canonicalize back MEM_REFs to plain reference trees if the object
3456 accessed is a decl that has the same access semantics as the MEM_REF. */
3457 if (TREE_CODE (*t) == MEM_REF
3458 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3459 && integer_zerop (TREE_OPERAND (*t, 1))
3460 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3462 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3463 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3464 if (/* Same volatile qualification. */
3465 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3466 /* Same TBAA behavior with -fstrict-aliasing. */
3467 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3468 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3469 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3470 /* Same alignment. */
3471 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3472 /* We have to look out here to not drop a required conversion
3473 from the rhs to the lhs if *t appears on the lhs or vice-versa
3474 if it appears on the rhs. Thus require strict type
3475 compatibility. */
3476 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3478 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3479 res = true;
3483 /* Canonicalize TARGET_MEM_REF in particular with respect to
3484 the indexes becoming constant. */
3485 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3487 tree tem = maybe_fold_tmr (*t);
3488 if (tem)
3490 *t = tem;
3491 res = true;
3495 return res;
3498 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3499 distinguishes both cases. */
3501 static bool
3502 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3504 bool changed = false;
3505 gimple *stmt = gsi_stmt (*gsi);
3506 unsigned i;
3508 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3509 after propagation.
3510 ??? This shouldn't be done in generic folding but in the
3511 propagation helpers which also know whether an address was
3512 propagated.
3513 Also canonicalize operand order. */
3514 switch (gimple_code (stmt))
3516 case GIMPLE_ASSIGN:
3517 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3519 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3520 if ((REFERENCE_CLASS_P (*rhs)
3521 || TREE_CODE (*rhs) == ADDR_EXPR)
3522 && maybe_canonicalize_mem_ref_addr (rhs))
3523 changed = true;
3524 tree *lhs = gimple_assign_lhs_ptr (stmt);
3525 if (REFERENCE_CLASS_P (*lhs)
3526 && maybe_canonicalize_mem_ref_addr (lhs))
3527 changed = true;
3529 else
3531 /* Canonicalize operand order. */
3532 enum tree_code code = gimple_assign_rhs_code (stmt);
3533 if (TREE_CODE_CLASS (code) == tcc_comparison
3534 || commutative_tree_code (code)
3535 || commutative_ternary_tree_code (code))
3537 tree rhs1 = gimple_assign_rhs1 (stmt);
3538 tree rhs2 = gimple_assign_rhs2 (stmt);
3539 if (tree_swap_operands_p (rhs1, rhs2, false))
3541 gimple_assign_set_rhs1 (stmt, rhs2);
3542 gimple_assign_set_rhs2 (stmt, rhs1);
3543 if (TREE_CODE_CLASS (code) == tcc_comparison)
3544 gimple_assign_set_rhs_code (stmt,
3545 swap_tree_comparison (code));
3546 changed = true;
3550 break;
3551 case GIMPLE_CALL:
3553 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3555 tree *arg = gimple_call_arg_ptr (stmt, i);
3556 if (REFERENCE_CLASS_P (*arg)
3557 && maybe_canonicalize_mem_ref_addr (arg))
3558 changed = true;
3560 tree *lhs = gimple_call_lhs_ptr (stmt);
3561 if (*lhs
3562 && REFERENCE_CLASS_P (*lhs)
3563 && maybe_canonicalize_mem_ref_addr (lhs))
3564 changed = true;
3565 break;
3567 case GIMPLE_ASM:
3569 gasm *asm_stmt = as_a <gasm *> (stmt);
3570 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3572 tree link = gimple_asm_output_op (asm_stmt, i);
3573 tree op = TREE_VALUE (link);
3574 if (REFERENCE_CLASS_P (op)
3575 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3576 changed = true;
3578 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3580 tree link = gimple_asm_input_op (asm_stmt, i);
3581 tree op = TREE_VALUE (link);
3582 if ((REFERENCE_CLASS_P (op)
3583 || TREE_CODE (op) == ADDR_EXPR)
3584 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3585 changed = true;
3588 break;
3589 case GIMPLE_DEBUG:
3590 if (gimple_debug_bind_p (stmt))
3592 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3593 if (*val
3594 && (REFERENCE_CLASS_P (*val)
3595 || TREE_CODE (*val) == ADDR_EXPR)
3596 && maybe_canonicalize_mem_ref_addr (val))
3597 changed = true;
3599 break;
3600 case GIMPLE_COND:
3602 /* Canonicalize operand order. */
3603 tree lhs = gimple_cond_lhs (stmt);
3604 tree rhs = gimple_cond_rhs (stmt);
3605 if (tree_swap_operands_p (lhs, rhs, false))
3607 gcond *gc = as_a <gcond *> (stmt);
3608 gimple_cond_set_lhs (gc, rhs);
3609 gimple_cond_set_rhs (gc, lhs);
3610 gimple_cond_set_code (gc,
3611 swap_tree_comparison (gimple_cond_code (gc)));
3612 changed = true;
3615 default:;
3618 /* Dispatch to pattern-based folding. */
3619 if (!inplace
3620 || is_gimple_assign (stmt)
3621 || gimple_code (stmt) == GIMPLE_COND)
3623 gimple_seq seq = NULL;
3624 code_helper rcode;
3625 tree ops[3] = {};
3626 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3627 valueize, valueize))
3629 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3630 changed = true;
3631 else
3632 gimple_seq_discard (seq);
3636 stmt = gsi_stmt (*gsi);
3638 /* Fold the main computation performed by the statement. */
3639 switch (gimple_code (stmt))
3641 case GIMPLE_ASSIGN:
3643 /* Try to canonicalize for boolean-typed X the comparisons
3644 X == 0, X == 1, X != 0, and X != 1. */
3645 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3646 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3648 tree lhs = gimple_assign_lhs (stmt);
3649 tree op1 = gimple_assign_rhs1 (stmt);
3650 tree op2 = gimple_assign_rhs2 (stmt);
3651 tree type = TREE_TYPE (op1);
3653 /* Check whether the comparison operands are of the same boolean
3654 type as the result type is.
3655 Check that second operand is an integer-constant with value
3656 one or zero. */
3657 if (TREE_CODE (op2) == INTEGER_CST
3658 && (integer_zerop (op2) || integer_onep (op2))
3659 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3661 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3662 bool is_logical_not = false;
3664 /* X == 0 and X != 1 is a logical-not.of X
3665 X == 1 and X != 0 is X */
3666 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3667 || (cmp_code == NE_EXPR && integer_onep (op2)))
3668 is_logical_not = true;
3670 if (is_logical_not == false)
3671 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3672 /* Only for one-bit precision typed X the transformation
3673 !X -> ~X is valied. */
3674 else if (TYPE_PRECISION (type) == 1)
3675 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3676 /* Otherwise we use !X -> X ^ 1. */
3677 else
3678 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3679 build_int_cst (type, 1));
3680 changed = true;
3681 break;
3685 unsigned old_num_ops = gimple_num_ops (stmt);
3686 tree lhs = gimple_assign_lhs (stmt);
3687 tree new_rhs = fold_gimple_assign (gsi);
3688 if (new_rhs
3689 && !useless_type_conversion_p (TREE_TYPE (lhs),
3690 TREE_TYPE (new_rhs)))
3691 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3692 if (new_rhs
3693 && (!inplace
3694 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3696 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3697 changed = true;
3699 break;
3702 case GIMPLE_CALL:
3703 changed |= gimple_fold_call (gsi, inplace);
3704 break;
3706 case GIMPLE_ASM:
3707 /* Fold *& in asm operands. */
3709 gasm *asm_stmt = as_a <gasm *> (stmt);
3710 size_t noutputs;
3711 const char **oconstraints;
3712 const char *constraint;
3713 bool allows_mem, allows_reg;
3715 noutputs = gimple_asm_noutputs (asm_stmt);
3716 oconstraints = XALLOCAVEC (const char *, noutputs);
3718 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3720 tree link = gimple_asm_output_op (asm_stmt, i);
3721 tree op = TREE_VALUE (link);
3722 oconstraints[i]
3723 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3724 if (REFERENCE_CLASS_P (op)
3725 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3727 TREE_VALUE (link) = op;
3728 changed = true;
3731 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3733 tree link = gimple_asm_input_op (asm_stmt, i);
3734 tree op = TREE_VALUE (link);
3735 constraint
3736 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3737 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3738 oconstraints, &allows_mem, &allows_reg);
3739 if (REFERENCE_CLASS_P (op)
3740 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3741 != NULL_TREE)
3743 TREE_VALUE (link) = op;
3744 changed = true;
3748 break;
3750 case GIMPLE_DEBUG:
3751 if (gimple_debug_bind_p (stmt))
3753 tree val = gimple_debug_bind_get_value (stmt);
3754 if (val
3755 && REFERENCE_CLASS_P (val))
3757 tree tem = maybe_fold_reference (val, false);
3758 if (tem)
3760 gimple_debug_bind_set_value (stmt, tem);
3761 changed = true;
3764 else if (val
3765 && TREE_CODE (val) == ADDR_EXPR)
3767 tree ref = TREE_OPERAND (val, 0);
3768 tree tem = maybe_fold_reference (ref, false);
3769 if (tem)
3771 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3772 gimple_debug_bind_set_value (stmt, tem);
3773 changed = true;
3777 break;
3779 default:;
3782 stmt = gsi_stmt (*gsi);
3784 /* Fold *& on the lhs. */
3785 if (gimple_has_lhs (stmt))
3787 tree lhs = gimple_get_lhs (stmt);
3788 if (lhs && REFERENCE_CLASS_P (lhs))
3790 tree new_lhs = maybe_fold_reference (lhs, true);
3791 if (new_lhs)
3793 gimple_set_lhs (stmt, new_lhs);
3794 changed = true;
3799 return changed;
3802 /* Valueziation callback that ends up not following SSA edges. */
3804 tree
3805 no_follow_ssa_edges (tree)
3807 return NULL_TREE;
3810 /* Valueization callback that ends up following single-use SSA edges only. */
3812 tree
3813 follow_single_use_edges (tree val)
3815 if (TREE_CODE (val) == SSA_NAME
3816 && !has_single_use (val))
3817 return NULL_TREE;
3818 return val;
3821 /* Fold the statement pointed to by GSI. In some cases, this function may
3822 replace the whole statement with a new one. Returns true iff folding
3823 makes any changes.
3824 The statement pointed to by GSI should be in valid gimple form but may
3825 be in unfolded state as resulting from for example constant propagation
3826 which can produce *&x = 0. */
3828 bool
3829 fold_stmt (gimple_stmt_iterator *gsi)
3831 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3834 bool
3835 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3837 return fold_stmt_1 (gsi, false, valueize);
3840 /* Perform the minimal folding on statement *GSI. Only operations like
3841 *&x created by constant propagation are handled. The statement cannot
3842 be replaced with a new one. Return true if the statement was
3843 changed, false otherwise.
3844 The statement *GSI should be in valid gimple form but may
3845 be in unfolded state as resulting from for example constant propagation
3846 which can produce *&x = 0. */
3848 bool
3849 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3851 gimple *stmt = gsi_stmt (*gsi);
3852 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3853 gcc_assert (gsi_stmt (*gsi) == stmt);
3854 return changed;
3857 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3858 if EXPR is null or we don't know how.
3859 If non-null, the result always has boolean type. */
3861 static tree
3862 canonicalize_bool (tree expr, bool invert)
3864 if (!expr)
3865 return NULL_TREE;
3866 else if (invert)
3868 if (integer_nonzerop (expr))
3869 return boolean_false_node;
3870 else if (integer_zerop (expr))
3871 return boolean_true_node;
3872 else if (TREE_CODE (expr) == SSA_NAME)
3873 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3874 build_int_cst (TREE_TYPE (expr), 0));
3875 else if (COMPARISON_CLASS_P (expr))
3876 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3877 boolean_type_node,
3878 TREE_OPERAND (expr, 0),
3879 TREE_OPERAND (expr, 1));
3880 else
3881 return NULL_TREE;
3883 else
3885 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3886 return expr;
3887 if (integer_nonzerop (expr))
3888 return boolean_true_node;
3889 else if (integer_zerop (expr))
3890 return boolean_false_node;
3891 else if (TREE_CODE (expr) == SSA_NAME)
3892 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3893 build_int_cst (TREE_TYPE (expr), 0));
3894 else if (COMPARISON_CLASS_P (expr))
3895 return fold_build2 (TREE_CODE (expr),
3896 boolean_type_node,
3897 TREE_OPERAND (expr, 0),
3898 TREE_OPERAND (expr, 1));
3899 else
3900 return NULL_TREE;
3904 /* Check to see if a boolean expression EXPR is logically equivalent to the
3905 comparison (OP1 CODE OP2). Check for various identities involving
3906 SSA_NAMEs. */
3908 static bool
3909 same_bool_comparison_p (const_tree expr, enum tree_code code,
3910 const_tree op1, const_tree op2)
3912 gimple *s;
3914 /* The obvious case. */
3915 if (TREE_CODE (expr) == code
3916 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3917 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3918 return true;
3920 /* Check for comparing (name, name != 0) and the case where expr
3921 is an SSA_NAME with a definition matching the comparison. */
3922 if (TREE_CODE (expr) == SSA_NAME
3923 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3925 if (operand_equal_p (expr, op1, 0))
3926 return ((code == NE_EXPR && integer_zerop (op2))
3927 || (code == EQ_EXPR && integer_nonzerop (op2)));
3928 s = SSA_NAME_DEF_STMT (expr);
3929 if (is_gimple_assign (s)
3930 && gimple_assign_rhs_code (s) == code
3931 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3932 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3933 return true;
3936 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3937 of name is a comparison, recurse. */
3938 if (TREE_CODE (op1) == SSA_NAME
3939 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3941 s = SSA_NAME_DEF_STMT (op1);
3942 if (is_gimple_assign (s)
3943 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3945 enum tree_code c = gimple_assign_rhs_code (s);
3946 if ((c == NE_EXPR && integer_zerop (op2))
3947 || (c == EQ_EXPR && integer_nonzerop (op2)))
3948 return same_bool_comparison_p (expr, c,
3949 gimple_assign_rhs1 (s),
3950 gimple_assign_rhs2 (s));
3951 if ((c == EQ_EXPR && integer_zerop (op2))
3952 || (c == NE_EXPR && integer_nonzerop (op2)))
3953 return same_bool_comparison_p (expr,
3954 invert_tree_comparison (c, false),
3955 gimple_assign_rhs1 (s),
3956 gimple_assign_rhs2 (s));
3959 return false;
3962 /* Check to see if two boolean expressions OP1 and OP2 are logically
3963 equivalent. */
3965 static bool
3966 same_bool_result_p (const_tree op1, const_tree op2)
3968 /* Simple cases first. */
3969 if (operand_equal_p (op1, op2, 0))
3970 return true;
3972 /* Check the cases where at least one of the operands is a comparison.
3973 These are a bit smarter than operand_equal_p in that they apply some
3974 identifies on SSA_NAMEs. */
3975 if (COMPARISON_CLASS_P (op2)
3976 && same_bool_comparison_p (op1, TREE_CODE (op2),
3977 TREE_OPERAND (op2, 0),
3978 TREE_OPERAND (op2, 1)))
3979 return true;
3980 if (COMPARISON_CLASS_P (op1)
3981 && same_bool_comparison_p (op2, TREE_CODE (op1),
3982 TREE_OPERAND (op1, 0),
3983 TREE_OPERAND (op1, 1)))
3984 return true;
3986 /* Default case. */
3987 return false;
3990 /* Forward declarations for some mutually recursive functions. */
3992 static tree
3993 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3994 enum tree_code code2, tree op2a, tree op2b);
3995 static tree
3996 and_var_with_comparison (tree var, bool invert,
3997 enum tree_code code2, tree op2a, tree op2b);
3998 static tree
3999 and_var_with_comparison_1 (gimple *stmt,
4000 enum tree_code code2, tree op2a, tree op2b);
4001 static tree
4002 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4003 enum tree_code code2, tree op2a, tree op2b);
4004 static tree
4005 or_var_with_comparison (tree var, bool invert,
4006 enum tree_code code2, tree op2a, tree op2b);
4007 static tree
4008 or_var_with_comparison_1 (gimple *stmt,
4009 enum tree_code code2, tree op2a, tree op2b);
4011 /* Helper function for and_comparisons_1: try to simplify the AND of the
4012 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4013 If INVERT is true, invert the value of the VAR before doing the AND.
4014 Return NULL_EXPR if we can't simplify this to a single expression. */
4016 static tree
4017 and_var_with_comparison (tree var, bool invert,
4018 enum tree_code code2, tree op2a, tree op2b)
4020 tree t;
4021 gimple *stmt = SSA_NAME_DEF_STMT (var);
4023 /* We can only deal with variables whose definitions are assignments. */
4024 if (!is_gimple_assign (stmt))
4025 return NULL_TREE;
4027 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4028 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4029 Then we only have to consider the simpler non-inverted cases. */
4030 if (invert)
4031 t = or_var_with_comparison_1 (stmt,
4032 invert_tree_comparison (code2, false),
4033 op2a, op2b);
4034 else
4035 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4036 return canonicalize_bool (t, invert);
4039 /* Try to simplify the AND of the ssa variable defined by the assignment
4040 STMT with the comparison specified by (OP2A CODE2 OP2B).
4041 Return NULL_EXPR if we can't simplify this to a single expression. */
4043 static tree
4044 and_var_with_comparison_1 (gimple *stmt,
4045 enum tree_code code2, tree op2a, tree op2b)
4047 tree var = gimple_assign_lhs (stmt);
4048 tree true_test_var = NULL_TREE;
4049 tree false_test_var = NULL_TREE;
4050 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4052 /* Check for identities like (var AND (var == 0)) => false. */
4053 if (TREE_CODE (op2a) == SSA_NAME
4054 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4056 if ((code2 == NE_EXPR && integer_zerop (op2b))
4057 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4059 true_test_var = op2a;
4060 if (var == true_test_var)
4061 return var;
4063 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4064 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4066 false_test_var = op2a;
4067 if (var == false_test_var)
4068 return boolean_false_node;
4072 /* If the definition is a comparison, recurse on it. */
4073 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4075 tree t = and_comparisons_1 (innercode,
4076 gimple_assign_rhs1 (stmt),
4077 gimple_assign_rhs2 (stmt),
4078 code2,
4079 op2a,
4080 op2b);
4081 if (t)
4082 return t;
4085 /* If the definition is an AND or OR expression, we may be able to
4086 simplify by reassociating. */
4087 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4088 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4090 tree inner1 = gimple_assign_rhs1 (stmt);
4091 tree inner2 = gimple_assign_rhs2 (stmt);
4092 gimple *s;
4093 tree t;
4094 tree partial = NULL_TREE;
4095 bool is_and = (innercode == BIT_AND_EXPR);
4097 /* Check for boolean identities that don't require recursive examination
4098 of inner1/inner2:
4099 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4100 inner1 AND (inner1 OR inner2) => inner1
4101 !inner1 AND (inner1 AND inner2) => false
4102 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4103 Likewise for similar cases involving inner2. */
4104 if (inner1 == true_test_var)
4105 return (is_and ? var : inner1);
4106 else if (inner2 == true_test_var)
4107 return (is_and ? var : inner2);
4108 else if (inner1 == false_test_var)
4109 return (is_and
4110 ? boolean_false_node
4111 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4112 else if (inner2 == false_test_var)
4113 return (is_and
4114 ? boolean_false_node
4115 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4117 /* Next, redistribute/reassociate the AND across the inner tests.
4118 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4119 if (TREE_CODE (inner1) == SSA_NAME
4120 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4121 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4122 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4123 gimple_assign_rhs1 (s),
4124 gimple_assign_rhs2 (s),
4125 code2, op2a, op2b)))
4127 /* Handle the AND case, where we are reassociating:
4128 (inner1 AND inner2) AND (op2a code2 op2b)
4129 => (t AND inner2)
4130 If the partial result t is a constant, we win. Otherwise
4131 continue on to try reassociating with the other inner test. */
4132 if (is_and)
4134 if (integer_onep (t))
4135 return inner2;
4136 else if (integer_zerop (t))
4137 return boolean_false_node;
4140 /* Handle the OR case, where we are redistributing:
4141 (inner1 OR inner2) AND (op2a code2 op2b)
4142 => (t OR (inner2 AND (op2a code2 op2b))) */
4143 else if (integer_onep (t))
4144 return boolean_true_node;
4146 /* Save partial result for later. */
4147 partial = t;
4150 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4151 if (TREE_CODE (inner2) == SSA_NAME
4152 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4153 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4154 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4155 gimple_assign_rhs1 (s),
4156 gimple_assign_rhs2 (s),
4157 code2, op2a, op2b)))
4159 /* Handle the AND case, where we are reassociating:
4160 (inner1 AND inner2) AND (op2a code2 op2b)
4161 => (inner1 AND t) */
4162 if (is_and)
4164 if (integer_onep (t))
4165 return inner1;
4166 else if (integer_zerop (t))
4167 return boolean_false_node;
4168 /* If both are the same, we can apply the identity
4169 (x AND x) == x. */
4170 else if (partial && same_bool_result_p (t, partial))
4171 return t;
4174 /* Handle the OR case. where we are redistributing:
4175 (inner1 OR inner2) AND (op2a code2 op2b)
4176 => (t OR (inner1 AND (op2a code2 op2b)))
4177 => (t OR partial) */
4178 else
4180 if (integer_onep (t))
4181 return boolean_true_node;
4182 else if (partial)
4184 /* We already got a simplification for the other
4185 operand to the redistributed OR expression. The
4186 interesting case is when at least one is false.
4187 Or, if both are the same, we can apply the identity
4188 (x OR x) == x. */
4189 if (integer_zerop (partial))
4190 return t;
4191 else if (integer_zerop (t))
4192 return partial;
4193 else if (same_bool_result_p (t, partial))
4194 return t;
4199 return NULL_TREE;
4202 /* Try to simplify the AND of two comparisons defined by
4203 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4204 If this can be done without constructing an intermediate value,
4205 return the resulting tree; otherwise NULL_TREE is returned.
4206 This function is deliberately asymmetric as it recurses on SSA_DEFs
4207 in the first comparison but not the second. */
4209 static tree
4210 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4211 enum tree_code code2, tree op2a, tree op2b)
4213 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4215 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4216 if (operand_equal_p (op1a, op2a, 0)
4217 && operand_equal_p (op1b, op2b, 0))
4219 /* Result will be either NULL_TREE, or a combined comparison. */
4220 tree t = combine_comparisons (UNKNOWN_LOCATION,
4221 TRUTH_ANDIF_EXPR, code1, code2,
4222 truth_type, op1a, op1b);
4223 if (t)
4224 return t;
4227 /* Likewise the swapped case of the above. */
4228 if (operand_equal_p (op1a, op2b, 0)
4229 && operand_equal_p (op1b, op2a, 0))
4231 /* Result will be either NULL_TREE, or a combined comparison. */
4232 tree t = combine_comparisons (UNKNOWN_LOCATION,
4233 TRUTH_ANDIF_EXPR, code1,
4234 swap_tree_comparison (code2),
4235 truth_type, op1a, op1b);
4236 if (t)
4237 return t;
4240 /* If both comparisons are of the same value against constants, we might
4241 be able to merge them. */
4242 if (operand_equal_p (op1a, op2a, 0)
4243 && TREE_CODE (op1b) == INTEGER_CST
4244 && TREE_CODE (op2b) == INTEGER_CST)
4246 int cmp = tree_int_cst_compare (op1b, op2b);
4248 /* If we have (op1a == op1b), we should either be able to
4249 return that or FALSE, depending on whether the constant op1b
4250 also satisfies the other comparison against op2b. */
4251 if (code1 == EQ_EXPR)
4253 bool done = true;
4254 bool val;
4255 switch (code2)
4257 case EQ_EXPR: val = (cmp == 0); break;
4258 case NE_EXPR: val = (cmp != 0); break;
4259 case LT_EXPR: val = (cmp < 0); break;
4260 case GT_EXPR: val = (cmp > 0); break;
4261 case LE_EXPR: val = (cmp <= 0); break;
4262 case GE_EXPR: val = (cmp >= 0); break;
4263 default: done = false;
4265 if (done)
4267 if (val)
4268 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4269 else
4270 return boolean_false_node;
4273 /* Likewise if the second comparison is an == comparison. */
4274 else if (code2 == EQ_EXPR)
4276 bool done = true;
4277 bool val;
4278 switch (code1)
4280 case EQ_EXPR: val = (cmp == 0); break;
4281 case NE_EXPR: val = (cmp != 0); break;
4282 case LT_EXPR: val = (cmp > 0); break;
4283 case GT_EXPR: val = (cmp < 0); break;
4284 case LE_EXPR: val = (cmp >= 0); break;
4285 case GE_EXPR: val = (cmp <= 0); break;
4286 default: done = false;
4288 if (done)
4290 if (val)
4291 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4292 else
4293 return boolean_false_node;
4297 /* Same business with inequality tests. */
4298 else if (code1 == NE_EXPR)
4300 bool val;
4301 switch (code2)
4303 case EQ_EXPR: val = (cmp != 0); break;
4304 case NE_EXPR: val = (cmp == 0); break;
4305 case LT_EXPR: val = (cmp >= 0); break;
4306 case GT_EXPR: val = (cmp <= 0); break;
4307 case LE_EXPR: val = (cmp > 0); break;
4308 case GE_EXPR: val = (cmp < 0); break;
4309 default:
4310 val = false;
4312 if (val)
4313 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4315 else if (code2 == NE_EXPR)
4317 bool val;
4318 switch (code1)
4320 case EQ_EXPR: val = (cmp == 0); break;
4321 case NE_EXPR: val = (cmp != 0); break;
4322 case LT_EXPR: val = (cmp <= 0); break;
4323 case GT_EXPR: val = (cmp >= 0); break;
4324 case LE_EXPR: val = (cmp < 0); break;
4325 case GE_EXPR: val = (cmp > 0); break;
4326 default:
4327 val = false;
4329 if (val)
4330 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4333 /* Chose the more restrictive of two < or <= comparisons. */
4334 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4335 && (code2 == LT_EXPR || code2 == LE_EXPR))
4337 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4338 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4339 else
4340 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4343 /* Likewise chose the more restrictive of two > or >= comparisons. */
4344 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4345 && (code2 == GT_EXPR || code2 == GE_EXPR))
4347 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4348 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4349 else
4350 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4353 /* Check for singleton ranges. */
4354 else if (cmp == 0
4355 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4356 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4357 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4359 /* Check for disjoint ranges. */
4360 else if (cmp <= 0
4361 && (code1 == LT_EXPR || code1 == LE_EXPR)
4362 && (code2 == GT_EXPR || code2 == GE_EXPR))
4363 return boolean_false_node;
4364 else if (cmp >= 0
4365 && (code1 == GT_EXPR || code1 == GE_EXPR)
4366 && (code2 == LT_EXPR || code2 == LE_EXPR))
4367 return boolean_false_node;
4370 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4371 NAME's definition is a truth value. See if there are any simplifications
4372 that can be done against the NAME's definition. */
4373 if (TREE_CODE (op1a) == SSA_NAME
4374 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4375 && (integer_zerop (op1b) || integer_onep (op1b)))
4377 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4378 || (code1 == NE_EXPR && integer_onep (op1b)));
4379 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4380 switch (gimple_code (stmt))
4382 case GIMPLE_ASSIGN:
4383 /* Try to simplify by copy-propagating the definition. */
4384 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4386 case GIMPLE_PHI:
4387 /* If every argument to the PHI produces the same result when
4388 ANDed with the second comparison, we win.
4389 Do not do this unless the type is bool since we need a bool
4390 result here anyway. */
4391 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4393 tree result = NULL_TREE;
4394 unsigned i;
4395 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4397 tree arg = gimple_phi_arg_def (stmt, i);
4399 /* If this PHI has itself as an argument, ignore it.
4400 If all the other args produce the same result,
4401 we're still OK. */
4402 if (arg == gimple_phi_result (stmt))
4403 continue;
4404 else if (TREE_CODE (arg) == INTEGER_CST)
4406 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4408 if (!result)
4409 result = boolean_false_node;
4410 else if (!integer_zerop (result))
4411 return NULL_TREE;
4413 else if (!result)
4414 result = fold_build2 (code2, boolean_type_node,
4415 op2a, op2b);
4416 else if (!same_bool_comparison_p (result,
4417 code2, op2a, op2b))
4418 return NULL_TREE;
4420 else if (TREE_CODE (arg) == SSA_NAME
4421 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4423 tree temp;
4424 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4425 /* In simple cases we can look through PHI nodes,
4426 but we have to be careful with loops.
4427 See PR49073. */
4428 if (! dom_info_available_p (CDI_DOMINATORS)
4429 || gimple_bb (def_stmt) == gimple_bb (stmt)
4430 || dominated_by_p (CDI_DOMINATORS,
4431 gimple_bb (def_stmt),
4432 gimple_bb (stmt)))
4433 return NULL_TREE;
4434 temp = and_var_with_comparison (arg, invert, code2,
4435 op2a, op2b);
4436 if (!temp)
4437 return NULL_TREE;
4438 else if (!result)
4439 result = temp;
4440 else if (!same_bool_result_p (result, temp))
4441 return NULL_TREE;
4443 else
4444 return NULL_TREE;
4446 return result;
4449 default:
4450 break;
4453 return NULL_TREE;
4456 /* Try to simplify the AND of two comparisons, specified by
4457 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4458 If this can be simplified to a single expression (without requiring
4459 introducing more SSA variables to hold intermediate values),
4460 return the resulting tree. Otherwise return NULL_TREE.
4461 If the result expression is non-null, it has boolean type. */
4463 tree
4464 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4465 enum tree_code code2, tree op2a, tree op2b)
4467 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4468 if (t)
4469 return t;
4470 else
4471 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4474 /* Helper function for or_comparisons_1: try to simplify the OR of the
4475 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4476 If INVERT is true, invert the value of VAR before doing the OR.
4477 Return NULL_EXPR if we can't simplify this to a single expression. */
4479 static tree
4480 or_var_with_comparison (tree var, bool invert,
4481 enum tree_code code2, tree op2a, tree op2b)
4483 tree t;
4484 gimple *stmt = SSA_NAME_DEF_STMT (var);
4486 /* We can only deal with variables whose definitions are assignments. */
4487 if (!is_gimple_assign (stmt))
4488 return NULL_TREE;
4490 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4491 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4492 Then we only have to consider the simpler non-inverted cases. */
4493 if (invert)
4494 t = and_var_with_comparison_1 (stmt,
4495 invert_tree_comparison (code2, false),
4496 op2a, op2b);
4497 else
4498 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4499 return canonicalize_bool (t, invert);
4502 /* Try to simplify the OR of the ssa variable defined by the assignment
4503 STMT with the comparison specified by (OP2A CODE2 OP2B).
4504 Return NULL_EXPR if we can't simplify this to a single expression. */
4506 static tree
4507 or_var_with_comparison_1 (gimple *stmt,
4508 enum tree_code code2, tree op2a, tree op2b)
4510 tree var = gimple_assign_lhs (stmt);
4511 tree true_test_var = NULL_TREE;
4512 tree false_test_var = NULL_TREE;
4513 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4515 /* Check for identities like (var OR (var != 0)) => true . */
4516 if (TREE_CODE (op2a) == SSA_NAME
4517 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4519 if ((code2 == NE_EXPR && integer_zerop (op2b))
4520 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4522 true_test_var = op2a;
4523 if (var == true_test_var)
4524 return var;
4526 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4527 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4529 false_test_var = op2a;
4530 if (var == false_test_var)
4531 return boolean_true_node;
4535 /* If the definition is a comparison, recurse on it. */
4536 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4538 tree t = or_comparisons_1 (innercode,
4539 gimple_assign_rhs1 (stmt),
4540 gimple_assign_rhs2 (stmt),
4541 code2,
4542 op2a,
4543 op2b);
4544 if (t)
4545 return t;
4548 /* If the definition is an AND or OR expression, we may be able to
4549 simplify by reassociating. */
4550 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4551 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4553 tree inner1 = gimple_assign_rhs1 (stmt);
4554 tree inner2 = gimple_assign_rhs2 (stmt);
4555 gimple *s;
4556 tree t;
4557 tree partial = NULL_TREE;
4558 bool is_or = (innercode == BIT_IOR_EXPR);
4560 /* Check for boolean identities that don't require recursive examination
4561 of inner1/inner2:
4562 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4563 inner1 OR (inner1 AND inner2) => inner1
4564 !inner1 OR (inner1 OR inner2) => true
4565 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4567 if (inner1 == true_test_var)
4568 return (is_or ? var : inner1);
4569 else if (inner2 == true_test_var)
4570 return (is_or ? var : inner2);
4571 else if (inner1 == false_test_var)
4572 return (is_or
4573 ? boolean_true_node
4574 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4575 else if (inner2 == false_test_var)
4576 return (is_or
4577 ? boolean_true_node
4578 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4580 /* Next, redistribute/reassociate the OR across the inner tests.
4581 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4582 if (TREE_CODE (inner1) == SSA_NAME
4583 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4584 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4585 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4586 gimple_assign_rhs1 (s),
4587 gimple_assign_rhs2 (s),
4588 code2, op2a, op2b)))
4590 /* Handle the OR case, where we are reassociating:
4591 (inner1 OR inner2) OR (op2a code2 op2b)
4592 => (t OR inner2)
4593 If the partial result t is a constant, we win. Otherwise
4594 continue on to try reassociating with the other inner test. */
4595 if (is_or)
4597 if (integer_onep (t))
4598 return boolean_true_node;
4599 else if (integer_zerop (t))
4600 return inner2;
4603 /* Handle the AND case, where we are redistributing:
4604 (inner1 AND inner2) OR (op2a code2 op2b)
4605 => (t AND (inner2 OR (op2a code op2b))) */
4606 else if (integer_zerop (t))
4607 return boolean_false_node;
4609 /* Save partial result for later. */
4610 partial = t;
4613 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4614 if (TREE_CODE (inner2) == SSA_NAME
4615 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4616 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4617 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4618 gimple_assign_rhs1 (s),
4619 gimple_assign_rhs2 (s),
4620 code2, op2a, op2b)))
4622 /* Handle the OR case, where we are reassociating:
4623 (inner1 OR inner2) OR (op2a code2 op2b)
4624 => (inner1 OR t)
4625 => (t OR partial) */
4626 if (is_or)
4628 if (integer_zerop (t))
4629 return inner1;
4630 else if (integer_onep (t))
4631 return boolean_true_node;
4632 /* If both are the same, we can apply the identity
4633 (x OR x) == x. */
4634 else if (partial && same_bool_result_p (t, partial))
4635 return t;
4638 /* Handle the AND case, where we are redistributing:
4639 (inner1 AND inner2) OR (op2a code2 op2b)
4640 => (t AND (inner1 OR (op2a code2 op2b)))
4641 => (t AND partial) */
4642 else
4644 if (integer_zerop (t))
4645 return boolean_false_node;
4646 else if (partial)
4648 /* We already got a simplification for the other
4649 operand to the redistributed AND expression. The
4650 interesting case is when at least one is true.
4651 Or, if both are the same, we can apply the identity
4652 (x AND x) == x. */
4653 if (integer_onep (partial))
4654 return t;
4655 else if (integer_onep (t))
4656 return partial;
4657 else if (same_bool_result_p (t, partial))
4658 return t;
4663 return NULL_TREE;
4666 /* Try to simplify the OR of two comparisons defined by
4667 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4668 If this can be done without constructing an intermediate value,
4669 return the resulting tree; otherwise NULL_TREE is returned.
4670 This function is deliberately asymmetric as it recurses on SSA_DEFs
4671 in the first comparison but not the second. */
4673 static tree
4674 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4675 enum tree_code code2, tree op2a, tree op2b)
4677 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4679 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4680 if (operand_equal_p (op1a, op2a, 0)
4681 && operand_equal_p (op1b, op2b, 0))
4683 /* Result will be either NULL_TREE, or a combined comparison. */
4684 tree t = combine_comparisons (UNKNOWN_LOCATION,
4685 TRUTH_ORIF_EXPR, code1, code2,
4686 truth_type, op1a, op1b);
4687 if (t)
4688 return t;
4691 /* Likewise the swapped case of the above. */
4692 if (operand_equal_p (op1a, op2b, 0)
4693 && operand_equal_p (op1b, op2a, 0))
4695 /* Result will be either NULL_TREE, or a combined comparison. */
4696 tree t = combine_comparisons (UNKNOWN_LOCATION,
4697 TRUTH_ORIF_EXPR, code1,
4698 swap_tree_comparison (code2),
4699 truth_type, op1a, op1b);
4700 if (t)
4701 return t;
4704 /* If both comparisons are of the same value against constants, we might
4705 be able to merge them. */
4706 if (operand_equal_p (op1a, op2a, 0)
4707 && TREE_CODE (op1b) == INTEGER_CST
4708 && TREE_CODE (op2b) == INTEGER_CST)
4710 int cmp = tree_int_cst_compare (op1b, op2b);
4712 /* If we have (op1a != op1b), we should either be able to
4713 return that or TRUE, depending on whether the constant op1b
4714 also satisfies the other comparison against op2b. */
4715 if (code1 == NE_EXPR)
4717 bool done = true;
4718 bool val;
4719 switch (code2)
4721 case EQ_EXPR: val = (cmp == 0); break;
4722 case NE_EXPR: val = (cmp != 0); break;
4723 case LT_EXPR: val = (cmp < 0); break;
4724 case GT_EXPR: val = (cmp > 0); break;
4725 case LE_EXPR: val = (cmp <= 0); break;
4726 case GE_EXPR: val = (cmp >= 0); break;
4727 default: done = false;
4729 if (done)
4731 if (val)
4732 return boolean_true_node;
4733 else
4734 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4737 /* Likewise if the second comparison is a != comparison. */
4738 else if (code2 == NE_EXPR)
4740 bool done = true;
4741 bool val;
4742 switch (code1)
4744 case EQ_EXPR: val = (cmp == 0); break;
4745 case NE_EXPR: val = (cmp != 0); break;
4746 case LT_EXPR: val = (cmp > 0); break;
4747 case GT_EXPR: val = (cmp < 0); break;
4748 case LE_EXPR: val = (cmp >= 0); break;
4749 case GE_EXPR: val = (cmp <= 0); break;
4750 default: done = false;
4752 if (done)
4754 if (val)
4755 return boolean_true_node;
4756 else
4757 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4761 /* See if an equality test is redundant with the other comparison. */
4762 else if (code1 == EQ_EXPR)
4764 bool val;
4765 switch (code2)
4767 case EQ_EXPR: val = (cmp == 0); break;
4768 case NE_EXPR: val = (cmp != 0); break;
4769 case LT_EXPR: val = (cmp < 0); break;
4770 case GT_EXPR: val = (cmp > 0); break;
4771 case LE_EXPR: val = (cmp <= 0); break;
4772 case GE_EXPR: val = (cmp >= 0); break;
4773 default:
4774 val = false;
4776 if (val)
4777 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4779 else if (code2 == EQ_EXPR)
4781 bool val;
4782 switch (code1)
4784 case EQ_EXPR: val = (cmp == 0); break;
4785 case NE_EXPR: val = (cmp != 0); break;
4786 case LT_EXPR: val = (cmp > 0); break;
4787 case GT_EXPR: val = (cmp < 0); break;
4788 case LE_EXPR: val = (cmp >= 0); break;
4789 case GE_EXPR: val = (cmp <= 0); break;
4790 default:
4791 val = false;
4793 if (val)
4794 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4797 /* Chose the less restrictive of two < or <= comparisons. */
4798 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4799 && (code2 == LT_EXPR || code2 == LE_EXPR))
4801 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4802 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4803 else
4804 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4807 /* Likewise chose the less restrictive of two > or >= comparisons. */
4808 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4809 && (code2 == GT_EXPR || code2 == GE_EXPR))
4811 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4812 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4813 else
4814 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4817 /* Check for singleton ranges. */
4818 else if (cmp == 0
4819 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4820 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4821 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4823 /* Check for less/greater pairs that don't restrict the range at all. */
4824 else if (cmp >= 0
4825 && (code1 == LT_EXPR || code1 == LE_EXPR)
4826 && (code2 == GT_EXPR || code2 == GE_EXPR))
4827 return boolean_true_node;
4828 else if (cmp <= 0
4829 && (code1 == GT_EXPR || code1 == GE_EXPR)
4830 && (code2 == LT_EXPR || code2 == LE_EXPR))
4831 return boolean_true_node;
4834 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4835 NAME's definition is a truth value. See if there are any simplifications
4836 that can be done against the NAME's definition. */
4837 if (TREE_CODE (op1a) == SSA_NAME
4838 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4839 && (integer_zerop (op1b) || integer_onep (op1b)))
4841 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4842 || (code1 == NE_EXPR && integer_onep (op1b)));
4843 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4844 switch (gimple_code (stmt))
4846 case GIMPLE_ASSIGN:
4847 /* Try to simplify by copy-propagating the definition. */
4848 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4850 case GIMPLE_PHI:
4851 /* If every argument to the PHI produces the same result when
4852 ORed with the second comparison, we win.
4853 Do not do this unless the type is bool since we need a bool
4854 result here anyway. */
4855 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4857 tree result = NULL_TREE;
4858 unsigned i;
4859 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4861 tree arg = gimple_phi_arg_def (stmt, i);
4863 /* If this PHI has itself as an argument, ignore it.
4864 If all the other args produce the same result,
4865 we're still OK. */
4866 if (arg == gimple_phi_result (stmt))
4867 continue;
4868 else if (TREE_CODE (arg) == INTEGER_CST)
4870 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4872 if (!result)
4873 result = boolean_true_node;
4874 else if (!integer_onep (result))
4875 return NULL_TREE;
4877 else if (!result)
4878 result = fold_build2 (code2, boolean_type_node,
4879 op2a, op2b);
4880 else if (!same_bool_comparison_p (result,
4881 code2, op2a, op2b))
4882 return NULL_TREE;
4884 else if (TREE_CODE (arg) == SSA_NAME
4885 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4887 tree temp;
4888 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4889 /* In simple cases we can look through PHI nodes,
4890 but we have to be careful with loops.
4891 See PR49073. */
4892 if (! dom_info_available_p (CDI_DOMINATORS)
4893 || gimple_bb (def_stmt) == gimple_bb (stmt)
4894 || dominated_by_p (CDI_DOMINATORS,
4895 gimple_bb (def_stmt),
4896 gimple_bb (stmt)))
4897 return NULL_TREE;
4898 temp = or_var_with_comparison (arg, invert, code2,
4899 op2a, op2b);
4900 if (!temp)
4901 return NULL_TREE;
4902 else if (!result)
4903 result = temp;
4904 else if (!same_bool_result_p (result, temp))
4905 return NULL_TREE;
4907 else
4908 return NULL_TREE;
4910 return result;
4913 default:
4914 break;
4917 return NULL_TREE;
4920 /* Try to simplify the OR of two comparisons, specified by
4921 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4922 If this can be simplified to a single expression (without requiring
4923 introducing more SSA variables to hold intermediate values),
4924 return the resulting tree. Otherwise return NULL_TREE.
4925 If the result expression is non-null, it has boolean type. */
4927 tree
4928 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4929 enum tree_code code2, tree op2a, tree op2b)
4931 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4932 if (t)
4933 return t;
4934 else
4935 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4939 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4941 Either NULL_TREE, a simplified but non-constant or a constant
4942 is returned.
4944 ??? This should go into a gimple-fold-inline.h file to be eventually
4945 privatized with the single valueize function used in the various TUs
4946 to avoid the indirect function call overhead. */
4948 tree
4949 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4950 tree (*gvalueize) (tree))
4952 code_helper rcode;
4953 tree ops[3] = {};
4954 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4955 edges if there are intermediate VARYING defs. For this reason
4956 do not follow SSA edges here even though SCCVN can technically
4957 just deal fine with that. */
4958 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4960 tree res = NULL_TREE;
4961 if (gimple_simplified_result_is_gimple_val (rcode, ops))
4962 res = ops[0];
4963 else if (mprts_hook)
4964 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4965 if (res)
4967 if (dump_file && dump_flags & TDF_DETAILS)
4969 fprintf (dump_file, "Match-and-simplified ");
4970 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4971 fprintf (dump_file, " to ");
4972 print_generic_expr (dump_file, res, 0);
4973 fprintf (dump_file, "\n");
4975 return res;
4979 location_t loc = gimple_location (stmt);
4980 switch (gimple_code (stmt))
4982 case GIMPLE_ASSIGN:
4984 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4986 switch (get_gimple_rhs_class (subcode))
4988 case GIMPLE_SINGLE_RHS:
4990 tree rhs = gimple_assign_rhs1 (stmt);
4991 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4993 if (TREE_CODE (rhs) == SSA_NAME)
4995 /* If the RHS is an SSA_NAME, return its known constant value,
4996 if any. */
4997 return (*valueize) (rhs);
4999 /* Handle propagating invariant addresses into address
5000 operations. */
5001 else if (TREE_CODE (rhs) == ADDR_EXPR
5002 && !is_gimple_min_invariant (rhs))
5004 HOST_WIDE_INT offset = 0;
5005 tree base;
5006 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5007 &offset,
5008 valueize);
5009 if (base
5010 && (CONSTANT_CLASS_P (base)
5011 || decl_address_invariant_p (base)))
5012 return build_invariant_address (TREE_TYPE (rhs),
5013 base, offset);
5015 else if (TREE_CODE (rhs) == CONSTRUCTOR
5016 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5017 && (CONSTRUCTOR_NELTS (rhs)
5018 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5020 unsigned i;
5021 tree val, *vec;
5023 vec = XALLOCAVEC (tree,
5024 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5025 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5027 val = (*valueize) (val);
5028 if (TREE_CODE (val) == INTEGER_CST
5029 || TREE_CODE (val) == REAL_CST
5030 || TREE_CODE (val) == FIXED_CST)
5031 vec[i] = val;
5032 else
5033 return NULL_TREE;
5036 return build_vector (TREE_TYPE (rhs), vec);
5038 if (subcode == OBJ_TYPE_REF)
5040 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5041 /* If callee is constant, we can fold away the wrapper. */
5042 if (is_gimple_min_invariant (val))
5043 return val;
5046 if (kind == tcc_reference)
5048 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5049 || TREE_CODE (rhs) == REALPART_EXPR
5050 || TREE_CODE (rhs) == IMAGPART_EXPR)
5051 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5053 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5054 return fold_unary_loc (EXPR_LOCATION (rhs),
5055 TREE_CODE (rhs),
5056 TREE_TYPE (rhs), val);
5058 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5059 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5061 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5062 return fold_ternary_loc (EXPR_LOCATION (rhs),
5063 TREE_CODE (rhs),
5064 TREE_TYPE (rhs), val,
5065 TREE_OPERAND (rhs, 1),
5066 TREE_OPERAND (rhs, 2));
5068 else if (TREE_CODE (rhs) == MEM_REF
5069 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5071 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5072 if (TREE_CODE (val) == ADDR_EXPR
5073 && is_gimple_min_invariant (val))
5075 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5076 unshare_expr (val),
5077 TREE_OPERAND (rhs, 1));
5078 if (tem)
5079 rhs = tem;
5082 return fold_const_aggregate_ref_1 (rhs, valueize);
5084 else if (kind == tcc_declaration)
5085 return get_symbol_constant_value (rhs);
5086 return rhs;
5089 case GIMPLE_UNARY_RHS:
5090 return NULL_TREE;
5092 case GIMPLE_BINARY_RHS:
5093 /* Translate &x + CST into an invariant form suitable for
5094 further propagation. */
5095 if (subcode == POINTER_PLUS_EXPR)
5097 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5098 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5099 if (TREE_CODE (op0) == ADDR_EXPR
5100 && TREE_CODE (op1) == INTEGER_CST)
5102 tree off = fold_convert (ptr_type_node, op1);
5103 return build_fold_addr_expr_loc
5104 (loc,
5105 fold_build2 (MEM_REF,
5106 TREE_TYPE (TREE_TYPE (op0)),
5107 unshare_expr (op0), off));
5110 /* Canonicalize bool != 0 and bool == 0 appearing after
5111 valueization. While gimple_simplify handles this
5112 it can get confused by the ~X == 1 -> X == 0 transform
5113 which we cant reduce to a SSA name or a constant
5114 (and we have no way to tell gimple_simplify to not
5115 consider those transforms in the first place). */
5116 else if (subcode == EQ_EXPR
5117 || subcode == NE_EXPR)
5119 tree lhs = gimple_assign_lhs (stmt);
5120 tree op0 = gimple_assign_rhs1 (stmt);
5121 if (useless_type_conversion_p (TREE_TYPE (lhs),
5122 TREE_TYPE (op0)))
5124 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5125 op0 = (*valueize) (op0);
5126 if (TREE_CODE (op0) == INTEGER_CST)
5127 std::swap (op0, op1);
5128 if (TREE_CODE (op1) == INTEGER_CST
5129 && ((subcode == NE_EXPR && integer_zerop (op1))
5130 || (subcode == EQ_EXPR && integer_onep (op1))))
5131 return op0;
5134 return NULL_TREE;
5136 case GIMPLE_TERNARY_RHS:
5138 /* Handle ternary operators that can appear in GIMPLE form. */
5139 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5140 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5141 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5142 return fold_ternary_loc (loc, subcode,
5143 gimple_expr_type (stmt), op0, op1, op2);
5146 default:
5147 gcc_unreachable ();
5151 case GIMPLE_CALL:
5153 tree fn;
5154 gcall *call_stmt = as_a <gcall *> (stmt);
5156 if (gimple_call_internal_p (stmt))
5158 enum tree_code subcode = ERROR_MARK;
5159 switch (gimple_call_internal_fn (stmt))
5161 case IFN_UBSAN_CHECK_ADD:
5162 subcode = PLUS_EXPR;
5163 break;
5164 case IFN_UBSAN_CHECK_SUB:
5165 subcode = MINUS_EXPR;
5166 break;
5167 case IFN_UBSAN_CHECK_MUL:
5168 subcode = MULT_EXPR;
5169 break;
5170 default:
5171 return NULL_TREE;
5173 tree arg0 = gimple_call_arg (stmt, 0);
5174 tree arg1 = gimple_call_arg (stmt, 1);
5175 tree op0 = (*valueize) (arg0);
5176 tree op1 = (*valueize) (arg1);
5178 if (TREE_CODE (op0) != INTEGER_CST
5179 || TREE_CODE (op1) != INTEGER_CST)
5181 switch (subcode)
5183 case MULT_EXPR:
5184 /* x * 0 = 0 * x = 0 without overflow. */
5185 if (integer_zerop (op0) || integer_zerop (op1))
5186 return build_zero_cst (TREE_TYPE (arg0));
5187 break;
5188 case MINUS_EXPR:
5189 /* y - y = 0 without overflow. */
5190 if (operand_equal_p (op0, op1, 0))
5191 return build_zero_cst (TREE_TYPE (arg0));
5192 break;
5193 default:
5194 break;
5197 tree res
5198 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5199 if (res
5200 && TREE_CODE (res) == INTEGER_CST
5201 && !TREE_OVERFLOW (res))
5202 return res;
5203 return NULL_TREE;
5206 fn = (*valueize) (gimple_call_fn (stmt));
5207 if (TREE_CODE (fn) == ADDR_EXPR
5208 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5209 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5210 && gimple_builtin_call_types_compatible_p (stmt,
5211 TREE_OPERAND (fn, 0)))
5213 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5214 tree retval;
5215 unsigned i;
5216 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5217 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5218 retval = fold_builtin_call_array (loc,
5219 gimple_call_return_type (call_stmt),
5220 fn, gimple_call_num_args (stmt), args);
5221 if (retval)
5223 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5224 STRIP_NOPS (retval);
5225 retval = fold_convert (gimple_call_return_type (call_stmt),
5226 retval);
5228 return retval;
5230 return NULL_TREE;
5233 default:
5234 return NULL_TREE;
5238 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5239 Returns NULL_TREE if folding to a constant is not possible, otherwise
5240 returns a constant according to is_gimple_min_invariant. */
5242 tree
5243 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5245 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5246 if (res && is_gimple_min_invariant (res))
5247 return res;
5248 return NULL_TREE;
5252 /* The following set of functions are supposed to fold references using
5253 their constant initializers. */
5255 /* See if we can find constructor defining value of BASE.
5256 When we know the consructor with constant offset (such as
5257 base is array[40] and we do know constructor of array), then
5258 BIT_OFFSET is adjusted accordingly.
5260 As a special case, return error_mark_node when constructor
5261 is not explicitly available, but it is known to be zero
5262 such as 'static const int a;'. */
5263 static tree
5264 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5265 tree (*valueize)(tree))
5267 HOST_WIDE_INT bit_offset2, size, max_size;
5268 bool reverse;
5270 if (TREE_CODE (base) == MEM_REF)
5272 if (!integer_zerop (TREE_OPERAND (base, 1)))
5274 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5275 return NULL_TREE;
5276 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5277 * BITS_PER_UNIT);
5280 if (valueize
5281 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5282 base = valueize (TREE_OPERAND (base, 0));
5283 if (!base || TREE_CODE (base) != ADDR_EXPR)
5284 return NULL_TREE;
5285 base = TREE_OPERAND (base, 0);
5288 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5289 DECL_INITIAL. If BASE is a nested reference into another
5290 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5291 the inner reference. */
5292 switch (TREE_CODE (base))
5294 case VAR_DECL:
5295 case CONST_DECL:
5297 tree init = ctor_for_folding (base);
5299 /* Our semantic is exact opposite of ctor_for_folding;
5300 NULL means unknown, while error_mark_node is 0. */
5301 if (init == error_mark_node)
5302 return NULL_TREE;
5303 if (!init)
5304 return error_mark_node;
5305 return init;
5308 case ARRAY_REF:
5309 case COMPONENT_REF:
5310 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5311 &reverse);
5312 if (max_size == -1 || size != max_size)
5313 return NULL_TREE;
5314 *bit_offset += bit_offset2;
5315 return get_base_constructor (base, bit_offset, valueize);
5317 case STRING_CST:
5318 case CONSTRUCTOR:
5319 return base;
5321 default:
5322 return NULL_TREE;
5326 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5327 SIZE to the memory at bit OFFSET. */
5329 static tree
5330 fold_array_ctor_reference (tree type, tree ctor,
5331 unsigned HOST_WIDE_INT offset,
5332 unsigned HOST_WIDE_INT size,
5333 tree from_decl)
5335 offset_int low_bound;
5336 offset_int elt_size;
5337 offset_int access_index;
5338 tree domain_type = NULL_TREE;
5339 HOST_WIDE_INT inner_offset;
5341 /* Compute low bound and elt size. */
5342 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5343 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5344 if (domain_type && TYPE_MIN_VALUE (domain_type))
5346 /* Static constructors for variably sized objects makes no sense. */
5347 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5348 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5350 else
5351 low_bound = 0;
5352 /* Static constructors for variably sized objects makes no sense. */
5353 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5354 == INTEGER_CST);
5355 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5357 /* We can handle only constantly sized accesses that are known to not
5358 be larger than size of array element. */
5359 if (!TYPE_SIZE_UNIT (type)
5360 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5361 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5362 || elt_size == 0)
5363 return NULL_TREE;
5365 /* Compute the array index we look for. */
5366 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5367 elt_size);
5368 access_index += low_bound;
5370 /* And offset within the access. */
5371 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5373 /* See if the array field is large enough to span whole access. We do not
5374 care to fold accesses spanning multiple array indexes. */
5375 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5376 return NULL_TREE;
5377 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5378 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5380 /* When memory is not explicitely mentioned in constructor,
5381 it is 0 (or out of range). */
5382 return build_zero_cst (type);
5385 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5386 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5388 static tree
5389 fold_nonarray_ctor_reference (tree type, tree ctor,
5390 unsigned HOST_WIDE_INT offset,
5391 unsigned HOST_WIDE_INT size,
5392 tree from_decl)
5394 unsigned HOST_WIDE_INT cnt;
5395 tree cfield, cval;
5397 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5398 cval)
5400 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5401 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5402 tree field_size = DECL_SIZE (cfield);
5403 offset_int bitoffset;
5404 offset_int bitoffset_end, access_end;
5406 /* Variable sized objects in static constructors makes no sense,
5407 but field_size can be NULL for flexible array members. */
5408 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5409 && TREE_CODE (byte_offset) == INTEGER_CST
5410 && (field_size != NULL_TREE
5411 ? TREE_CODE (field_size) == INTEGER_CST
5412 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5414 /* Compute bit offset of the field. */
5415 bitoffset = (wi::to_offset (field_offset)
5416 + wi::lshift (wi::to_offset (byte_offset),
5417 LOG2_BITS_PER_UNIT));
5418 /* Compute bit offset where the field ends. */
5419 if (field_size != NULL_TREE)
5420 bitoffset_end = bitoffset + wi::to_offset (field_size);
5421 else
5422 bitoffset_end = 0;
5424 access_end = offset_int (offset) + size;
5426 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5427 [BITOFFSET, BITOFFSET_END)? */
5428 if (wi::cmps (access_end, bitoffset) > 0
5429 && (field_size == NULL_TREE
5430 || wi::lts_p (offset, bitoffset_end)))
5432 offset_int inner_offset = offset_int (offset) - bitoffset;
5433 /* We do have overlap. Now see if field is large enough to
5434 cover the access. Give up for accesses spanning multiple
5435 fields. */
5436 if (wi::cmps (access_end, bitoffset_end) > 0)
5437 return NULL_TREE;
5438 if (wi::lts_p (offset, bitoffset))
5439 return NULL_TREE;
5440 return fold_ctor_reference (type, cval,
5441 inner_offset.to_uhwi (), size,
5442 from_decl);
5445 /* When memory is not explicitely mentioned in constructor, it is 0. */
5446 return build_zero_cst (type);
5449 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5450 to the memory at bit OFFSET. */
5452 tree
5453 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5454 unsigned HOST_WIDE_INT size, tree from_decl)
5456 tree ret;
5458 /* We found the field with exact match. */
5459 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5460 && !offset)
5461 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5463 /* We are at the end of walk, see if we can view convert the
5464 result. */
5465 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5466 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5467 && !compare_tree_int (TYPE_SIZE (type), size)
5468 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5470 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5471 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5472 if (ret)
5473 STRIP_USELESS_TYPE_CONVERSION (ret);
5474 return ret;
5476 /* For constants and byte-aligned/sized reads try to go through
5477 native_encode/interpret. */
5478 if (CONSTANT_CLASS_P (ctor)
5479 && BITS_PER_UNIT == 8
5480 && offset % BITS_PER_UNIT == 0
5481 && size % BITS_PER_UNIT == 0
5482 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5484 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5485 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5486 offset / BITS_PER_UNIT) > 0)
5487 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5489 if (TREE_CODE (ctor) == CONSTRUCTOR)
5492 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5493 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5494 return fold_array_ctor_reference (type, ctor, offset, size,
5495 from_decl);
5496 else
5497 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5498 from_decl);
5501 return NULL_TREE;
5504 /* Return the tree representing the element referenced by T if T is an
5505 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5506 names using VALUEIZE. Return NULL_TREE otherwise. */
5508 tree
5509 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5511 tree ctor, idx, base;
5512 HOST_WIDE_INT offset, size, max_size;
5513 tree tem;
5514 bool reverse;
5516 if (TREE_THIS_VOLATILE (t))
5517 return NULL_TREE;
5519 if (DECL_P (t))
5520 return get_symbol_constant_value (t);
5522 tem = fold_read_from_constant_string (t);
5523 if (tem)
5524 return tem;
5526 switch (TREE_CODE (t))
5528 case ARRAY_REF:
5529 case ARRAY_RANGE_REF:
5530 /* Constant indexes are handled well by get_base_constructor.
5531 Only special case variable offsets.
5532 FIXME: This code can't handle nested references with variable indexes
5533 (they will be handled only by iteration of ccp). Perhaps we can bring
5534 get_ref_base_and_extent here and make it use a valueize callback. */
5535 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5536 && valueize
5537 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5538 && TREE_CODE (idx) == INTEGER_CST)
5540 tree low_bound, unit_size;
5542 /* If the resulting bit-offset is constant, track it. */
5543 if ((low_bound = array_ref_low_bound (t),
5544 TREE_CODE (low_bound) == INTEGER_CST)
5545 && (unit_size = array_ref_element_size (t),
5546 tree_fits_uhwi_p (unit_size)))
5548 offset_int woffset
5549 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5550 TYPE_PRECISION (TREE_TYPE (idx)));
5552 if (wi::fits_shwi_p (woffset))
5554 offset = woffset.to_shwi ();
5555 /* TODO: This code seems wrong, multiply then check
5556 to see if it fits. */
5557 offset *= tree_to_uhwi (unit_size);
5558 offset *= BITS_PER_UNIT;
5560 base = TREE_OPERAND (t, 0);
5561 ctor = get_base_constructor (base, &offset, valueize);
5562 /* Empty constructor. Always fold to 0. */
5563 if (ctor == error_mark_node)
5564 return build_zero_cst (TREE_TYPE (t));
5565 /* Out of bound array access. Value is undefined,
5566 but don't fold. */
5567 if (offset < 0)
5568 return NULL_TREE;
5569 /* We can not determine ctor. */
5570 if (!ctor)
5571 return NULL_TREE;
5572 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5573 tree_to_uhwi (unit_size)
5574 * BITS_PER_UNIT,
5575 base);
5579 /* Fallthru. */
5581 case COMPONENT_REF:
5582 case BIT_FIELD_REF:
5583 case TARGET_MEM_REF:
5584 case MEM_REF:
5585 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5586 ctor = get_base_constructor (base, &offset, valueize);
5588 /* Empty constructor. Always fold to 0. */
5589 if (ctor == error_mark_node)
5590 return build_zero_cst (TREE_TYPE (t));
5591 /* We do not know precise address. */
5592 if (max_size == -1 || max_size != size)
5593 return NULL_TREE;
5594 /* We can not determine ctor. */
5595 if (!ctor)
5596 return NULL_TREE;
5598 /* Out of bound array access. Value is undefined, but don't fold. */
5599 if (offset < 0)
5600 return NULL_TREE;
5602 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5603 base);
5605 case REALPART_EXPR:
5606 case IMAGPART_EXPR:
5608 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5609 if (c && TREE_CODE (c) == COMPLEX_CST)
5610 return fold_build1_loc (EXPR_LOCATION (t),
5611 TREE_CODE (t), TREE_TYPE (t), c);
5612 break;
5615 default:
5616 break;
5619 return NULL_TREE;
5622 tree
5623 fold_const_aggregate_ref (tree t)
5625 return fold_const_aggregate_ref_1 (t, NULL);
5628 /* Lookup virtual method with index TOKEN in a virtual table V
5629 at OFFSET.
5630 Set CAN_REFER if non-NULL to false if method
5631 is not referable or if the virtual table is ill-formed (such as rewriten
5632 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5634 tree
5635 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5636 tree v,
5637 unsigned HOST_WIDE_INT offset,
5638 bool *can_refer)
5640 tree vtable = v, init, fn;
5641 unsigned HOST_WIDE_INT size;
5642 unsigned HOST_WIDE_INT elt_size, access_index;
5643 tree domain_type;
5645 if (can_refer)
5646 *can_refer = true;
5648 /* First of all double check we have virtual table. */
5649 if (TREE_CODE (v) != VAR_DECL
5650 || !DECL_VIRTUAL_P (v))
5652 /* Pass down that we lost track of the target. */
5653 if (can_refer)
5654 *can_refer = false;
5655 return NULL_TREE;
5658 init = ctor_for_folding (v);
5660 /* The virtual tables should always be born with constructors
5661 and we always should assume that they are avaialble for
5662 folding. At the moment we do not stream them in all cases,
5663 but it should never happen that ctor seem unreachable. */
5664 gcc_assert (init);
5665 if (init == error_mark_node)
5667 gcc_assert (in_lto_p);
5668 /* Pass down that we lost track of the target. */
5669 if (can_refer)
5670 *can_refer = false;
5671 return NULL_TREE;
5673 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5674 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5675 offset *= BITS_PER_UNIT;
5676 offset += token * size;
5678 /* Lookup the value in the constructor that is assumed to be array.
5679 This is equivalent to
5680 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5681 offset, size, NULL);
5682 but in a constant time. We expect that frontend produced a simple
5683 array without indexed initializers. */
5685 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5686 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5687 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5688 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5690 access_index = offset / BITS_PER_UNIT / elt_size;
5691 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5693 /* This code makes an assumption that there are no
5694 indexed fileds produced by C++ FE, so we can directly index the array. */
5695 if (access_index < CONSTRUCTOR_NELTS (init))
5697 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5698 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5699 STRIP_NOPS (fn);
5701 else
5702 fn = NULL;
5704 /* For type inconsistent program we may end up looking up virtual method
5705 in virtual table that does not contain TOKEN entries. We may overrun
5706 the virtual table and pick up a constant or RTTI info pointer.
5707 In any case the call is undefined. */
5708 if (!fn
5709 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5710 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5711 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5712 else
5714 fn = TREE_OPERAND (fn, 0);
5716 /* When cgraph node is missing and function is not public, we cannot
5717 devirtualize. This can happen in WHOPR when the actual method
5718 ends up in other partition, because we found devirtualization
5719 possibility too late. */
5720 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5722 if (can_refer)
5724 *can_refer = false;
5725 return fn;
5727 return NULL_TREE;
5731 /* Make sure we create a cgraph node for functions we'll reference.
5732 They can be non-existent if the reference comes from an entry
5733 of an external vtable for example. */
5734 cgraph_node::get_create (fn);
5736 return fn;
5739 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5740 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5741 KNOWN_BINFO carries the binfo describing the true type of
5742 OBJ_TYPE_REF_OBJECT(REF).
5743 Set CAN_REFER if non-NULL to false if method
5744 is not referable or if the virtual table is ill-formed (such as rewriten
5745 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5747 tree
5748 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5749 bool *can_refer)
5751 unsigned HOST_WIDE_INT offset;
5752 tree v;
5754 v = BINFO_VTABLE (known_binfo);
5755 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5756 if (!v)
5757 return NULL_TREE;
5759 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5761 if (can_refer)
5762 *can_refer = false;
5763 return NULL_TREE;
5765 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5768 /* Given a pointer value OP0, return a simplified version of an
5769 indirection through OP0, or NULL_TREE if no simplification is
5770 possible. Note that the resulting type may be different from
5771 the type pointed to in the sense that it is still compatible
5772 from the langhooks point of view. */
5774 tree
5775 gimple_fold_indirect_ref (tree t)
5777 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5778 tree sub = t;
5779 tree subtype;
5781 STRIP_NOPS (sub);
5782 subtype = TREE_TYPE (sub);
5783 if (!POINTER_TYPE_P (subtype))
5784 return NULL_TREE;
5786 if (TREE_CODE (sub) == ADDR_EXPR)
5788 tree op = TREE_OPERAND (sub, 0);
5789 tree optype = TREE_TYPE (op);
5790 /* *&p => p */
5791 if (useless_type_conversion_p (type, optype))
5792 return op;
5794 /* *(foo *)&fooarray => fooarray[0] */
5795 if (TREE_CODE (optype) == ARRAY_TYPE
5796 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5797 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5799 tree type_domain = TYPE_DOMAIN (optype);
5800 tree min_val = size_zero_node;
5801 if (type_domain && TYPE_MIN_VALUE (type_domain))
5802 min_val = TYPE_MIN_VALUE (type_domain);
5803 if (TREE_CODE (min_val) == INTEGER_CST)
5804 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5806 /* *(foo *)&complexfoo => __real__ complexfoo */
5807 else if (TREE_CODE (optype) == COMPLEX_TYPE
5808 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5809 return fold_build1 (REALPART_EXPR, type, op);
5810 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5811 else if (TREE_CODE (optype) == VECTOR_TYPE
5812 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5814 tree part_width = TYPE_SIZE (type);
5815 tree index = bitsize_int (0);
5816 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5820 /* *(p + CST) -> ... */
5821 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5822 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5824 tree addr = TREE_OPERAND (sub, 0);
5825 tree off = TREE_OPERAND (sub, 1);
5826 tree addrtype;
5828 STRIP_NOPS (addr);
5829 addrtype = TREE_TYPE (addr);
5831 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5832 if (TREE_CODE (addr) == ADDR_EXPR
5833 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5834 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5835 && tree_fits_uhwi_p (off))
5837 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5838 tree part_width = TYPE_SIZE (type);
5839 unsigned HOST_WIDE_INT part_widthi
5840 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5841 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5842 tree index = bitsize_int (indexi);
5843 if (offset / part_widthi
5844 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5845 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5846 part_width, index);
5849 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5850 if (TREE_CODE (addr) == ADDR_EXPR
5851 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5852 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5854 tree size = TYPE_SIZE_UNIT (type);
5855 if (tree_int_cst_equal (size, off))
5856 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5859 /* *(p + CST) -> MEM_REF <p, CST>. */
5860 if (TREE_CODE (addr) != ADDR_EXPR
5861 || DECL_P (TREE_OPERAND (addr, 0)))
5862 return fold_build2 (MEM_REF, type,
5863 addr,
5864 wide_int_to_tree (ptype, off));
5867 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5868 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5869 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5870 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5872 tree type_domain;
5873 tree min_val = size_zero_node;
5874 tree osub = sub;
5875 sub = gimple_fold_indirect_ref (sub);
5876 if (! sub)
5877 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5878 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5879 if (type_domain && TYPE_MIN_VALUE (type_domain))
5880 min_val = TYPE_MIN_VALUE (type_domain);
5881 if (TREE_CODE (min_val) == INTEGER_CST)
5882 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5885 return NULL_TREE;
5888 /* Return true if CODE is an operation that when operating on signed
5889 integer types involves undefined behavior on overflow and the
5890 operation can be expressed with unsigned arithmetic. */
5892 bool
5893 arith_code_with_undefined_signed_overflow (tree_code code)
5895 switch (code)
5897 case PLUS_EXPR:
5898 case MINUS_EXPR:
5899 case MULT_EXPR:
5900 case NEGATE_EXPR:
5901 case POINTER_PLUS_EXPR:
5902 return true;
5903 default:
5904 return false;
5908 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5909 operation that can be transformed to unsigned arithmetic by converting
5910 its operand, carrying out the operation in the corresponding unsigned
5911 type and converting the result back to the original type.
5913 Returns a sequence of statements that replace STMT and also contain
5914 a modified form of STMT itself. */
5916 gimple_seq
5917 rewrite_to_defined_overflow (gimple *stmt)
5919 if (dump_file && (dump_flags & TDF_DETAILS))
5921 fprintf (dump_file, "rewriting stmt with undefined signed "
5922 "overflow ");
5923 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5926 tree lhs = gimple_assign_lhs (stmt);
5927 tree type = unsigned_type_for (TREE_TYPE (lhs));
5928 gimple_seq stmts = NULL;
5929 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5931 tree op = gimple_op (stmt, i);
5932 op = gimple_convert (&stmts, type, op);
5933 gimple_set_op (stmt, i, op);
5935 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5936 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5937 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5938 gimple_seq_add_stmt (&stmts, stmt);
5939 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5940 gimple_seq_add_stmt (&stmts, cvt);
5942 return stmts;
5946 /* The valueization hook we use for the gimple_build API simplification.
5947 This makes us match fold_buildN behavior by only combining with
5948 statements in the sequence(s) we are currently building. */
5950 static tree
5951 gimple_build_valueize (tree op)
5953 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5954 return op;
5955 return NULL_TREE;
5958 /* Build the expression CODE OP0 of type TYPE with location LOC,
5959 simplifying it first if possible. Returns the built
5960 expression value and appends statements possibly defining it
5961 to SEQ. */
5963 tree
5964 gimple_build (gimple_seq *seq, location_t loc,
5965 enum tree_code code, tree type, tree op0)
5967 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
5968 if (!res)
5970 if (gimple_in_ssa_p (cfun))
5971 res = make_ssa_name (type);
5972 else
5973 res = create_tmp_reg (type);
5974 gimple *stmt;
5975 if (code == REALPART_EXPR
5976 || code == IMAGPART_EXPR
5977 || code == VIEW_CONVERT_EXPR)
5978 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
5979 else
5980 stmt = gimple_build_assign (res, code, op0);
5981 gimple_set_location (stmt, loc);
5982 gimple_seq_add_stmt_without_update (seq, stmt);
5984 return res;
5987 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5988 simplifying it first if possible. Returns the built
5989 expression value and appends statements possibly defining it
5990 to SEQ. */
5992 tree
5993 gimple_build (gimple_seq *seq, location_t loc,
5994 enum tree_code code, tree type, tree op0, tree op1)
5996 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
5997 if (!res)
5999 if (gimple_in_ssa_p (cfun))
6000 res = make_ssa_name (type);
6001 else
6002 res = create_tmp_reg (type);
6003 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6004 gimple_set_location (stmt, loc);
6005 gimple_seq_add_stmt_without_update (seq, stmt);
6007 return res;
6010 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6011 simplifying it first if possible. Returns the built
6012 expression value and appends statements possibly defining it
6013 to SEQ. */
6015 tree
6016 gimple_build (gimple_seq *seq, location_t loc,
6017 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6019 tree res = gimple_simplify (code, type, op0, op1, op2,
6020 seq, gimple_build_valueize);
6021 if (!res)
6023 if (gimple_in_ssa_p (cfun))
6024 res = make_ssa_name (type);
6025 else
6026 res = create_tmp_reg (type);
6027 gimple *stmt;
6028 if (code == BIT_FIELD_REF)
6029 stmt = gimple_build_assign (res, code,
6030 build3 (code, type, op0, op1, op2));
6031 else
6032 stmt = gimple_build_assign (res, code, op0, op1, op2);
6033 gimple_set_location (stmt, loc);
6034 gimple_seq_add_stmt_without_update (seq, stmt);
6036 return res;
6039 /* Build the call FN (ARG0) with a result of type TYPE
6040 (or no result if TYPE is void) with location LOC,
6041 simplifying it first if possible. Returns the built
6042 expression value (or NULL_TREE if TYPE is void) and appends
6043 statements possibly defining it to SEQ. */
6045 tree
6046 gimple_build (gimple_seq *seq, location_t loc,
6047 enum built_in_function fn, tree type, tree arg0)
6049 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6050 if (!res)
6052 tree decl = builtin_decl_implicit (fn);
6053 gimple *stmt = gimple_build_call (decl, 1, arg0);
6054 if (!VOID_TYPE_P (type))
6056 if (gimple_in_ssa_p (cfun))
6057 res = make_ssa_name (type);
6058 else
6059 res = create_tmp_reg (type);
6060 gimple_call_set_lhs (stmt, res);
6062 gimple_set_location (stmt, loc);
6063 gimple_seq_add_stmt_without_update (seq, stmt);
6065 return res;
6068 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6069 (or no result if TYPE is void) with location LOC,
6070 simplifying it first if possible. Returns the built
6071 expression value (or NULL_TREE if TYPE is void) and appends
6072 statements possibly defining it to SEQ. */
6074 tree
6075 gimple_build (gimple_seq *seq, location_t loc,
6076 enum built_in_function fn, tree type, tree arg0, tree arg1)
6078 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6079 if (!res)
6081 tree decl = builtin_decl_implicit (fn);
6082 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6083 if (!VOID_TYPE_P (type))
6085 if (gimple_in_ssa_p (cfun))
6086 res = make_ssa_name (type);
6087 else
6088 res = create_tmp_reg (type);
6089 gimple_call_set_lhs (stmt, res);
6091 gimple_set_location (stmt, loc);
6092 gimple_seq_add_stmt_without_update (seq, stmt);
6094 return res;
6097 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6098 (or no result if TYPE is void) with location LOC,
6099 simplifying it first if possible. Returns the built
6100 expression value (or NULL_TREE if TYPE is void) and appends
6101 statements possibly defining it to SEQ. */
6103 tree
6104 gimple_build (gimple_seq *seq, location_t loc,
6105 enum built_in_function fn, tree type,
6106 tree arg0, tree arg1, tree arg2)
6108 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6109 seq, gimple_build_valueize);
6110 if (!res)
6112 tree decl = builtin_decl_implicit (fn);
6113 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6114 if (!VOID_TYPE_P (type))
6116 if (gimple_in_ssa_p (cfun))
6117 res = make_ssa_name (type);
6118 else
6119 res = create_tmp_reg (type);
6120 gimple_call_set_lhs (stmt, res);
6122 gimple_set_location (stmt, loc);
6123 gimple_seq_add_stmt_without_update (seq, stmt);
6125 return res;
6128 /* Build the conversion (TYPE) OP with a result of type TYPE
6129 with location LOC if such conversion is neccesary in GIMPLE,
6130 simplifying it first.
6131 Returns the built expression value and appends
6132 statements possibly defining it to SEQ. */
6134 tree
6135 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6137 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6138 return op;
6139 return gimple_build (seq, loc, NOP_EXPR, type, op);
6142 /* Build the conversion (ptrofftype) OP with a result of a type
6143 compatible with ptrofftype with location LOC if such conversion
6144 is neccesary in GIMPLE, simplifying it first.
6145 Returns the built expression value and appends
6146 statements possibly defining it to SEQ. */
6148 tree
6149 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6151 if (ptrofftype_p (TREE_TYPE (op)))
6152 return op;
6153 return gimple_convert (seq, loc, sizetype, op);
6156 /* Return true if the result of assignment STMT is known to be non-negative.
6157 If the return value is based on the assumption that signed overflow is
6158 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6159 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6161 static bool
6162 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6163 int depth)
6165 enum tree_code code = gimple_assign_rhs_code (stmt);
6166 switch (get_gimple_rhs_class (code))
6168 case GIMPLE_UNARY_RHS:
6169 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6170 gimple_expr_type (stmt),
6171 gimple_assign_rhs1 (stmt),
6172 strict_overflow_p, depth);
6173 case GIMPLE_BINARY_RHS:
6174 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6175 gimple_expr_type (stmt),
6176 gimple_assign_rhs1 (stmt),
6177 gimple_assign_rhs2 (stmt),
6178 strict_overflow_p, depth);
6179 case GIMPLE_TERNARY_RHS:
6180 return false;
6181 case GIMPLE_SINGLE_RHS:
6182 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6183 strict_overflow_p, depth);
6184 case GIMPLE_INVALID_RHS:
6185 break;
6187 gcc_unreachable ();
6190 /* Return true if return value of call STMT is known to be non-negative.
6191 If the return value is based on the assumption that signed overflow is
6192 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6193 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6195 static bool
6196 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6197 int depth)
6199 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6200 gimple_call_arg (stmt, 0) : NULL_TREE;
6201 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6202 gimple_call_arg (stmt, 1) : NULL_TREE;
6204 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6205 gimple_call_combined_fn (stmt),
6206 arg0,
6207 arg1,
6208 strict_overflow_p, depth);
6211 /* Return true if return value of call STMT is known to be non-negative.
6212 If the return value is based on the assumption that signed overflow is
6213 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6214 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6216 static bool
6217 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6218 int depth)
6220 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6222 tree arg = gimple_phi_arg_def (stmt, i);
6223 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6224 return false;
6226 return true;
6229 /* Return true if STMT is known to compute a non-negative value.
6230 If the return value is based on the assumption that signed overflow is
6231 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6232 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6234 bool
6235 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6236 int depth)
6238 switch (gimple_code (stmt))
6240 case GIMPLE_ASSIGN:
6241 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6242 depth);
6243 case GIMPLE_CALL:
6244 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6245 depth);
6246 case GIMPLE_PHI:
6247 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6248 depth);
6249 default:
6250 return false;
6254 /* Return true if the floating-point value computed by assignment STMT
6255 is known to have an integer value. We also allow +Inf, -Inf and NaN
6256 to be considered integer values.
6258 DEPTH is the current nesting depth of the query. */
6260 static bool
6261 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6263 enum tree_code code = gimple_assign_rhs_code (stmt);
6264 switch (get_gimple_rhs_class (code))
6266 case GIMPLE_UNARY_RHS:
6267 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6268 gimple_assign_rhs1 (stmt), depth);
6269 case GIMPLE_BINARY_RHS:
6270 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6271 gimple_assign_rhs1 (stmt),
6272 gimple_assign_rhs2 (stmt), depth);
6273 case GIMPLE_TERNARY_RHS:
6274 return false;
6275 case GIMPLE_SINGLE_RHS:
6276 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6277 case GIMPLE_INVALID_RHS:
6278 break;
6280 gcc_unreachable ();
6283 /* Return true if the floating-point value computed by call STMT is known
6284 to have an integer value. We also allow +Inf, -Inf and NaN to be
6285 considered integer values.
6287 DEPTH is the current nesting depth of the query. */
6289 static bool
6290 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6292 tree arg0 = (gimple_call_num_args (stmt) > 0
6293 ? gimple_call_arg (stmt, 0)
6294 : NULL_TREE);
6295 tree arg1 = (gimple_call_num_args (stmt) > 1
6296 ? gimple_call_arg (stmt, 1)
6297 : NULL_TREE);
6298 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6299 arg0, arg1, depth);
6302 /* Return true if the floating-point result of phi STMT is known to have
6303 an integer value. We also allow +Inf, -Inf and NaN to be considered
6304 integer values.
6306 DEPTH is the current nesting depth of the query. */
6308 static bool
6309 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6311 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6313 tree arg = gimple_phi_arg_def (stmt, i);
6314 if (!integer_valued_real_single_p (arg, depth + 1))
6315 return false;
6317 return true;
6320 /* Return true if the floating-point value computed by STMT is known
6321 to have an integer value. We also allow +Inf, -Inf and NaN to be
6322 considered integer values.
6324 DEPTH is the current nesting depth of the query. */
6326 bool
6327 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6329 switch (gimple_code (stmt))
6331 case GIMPLE_ASSIGN:
6332 return gimple_assign_integer_valued_real_p (stmt, depth);
6333 case GIMPLE_CALL:
6334 return gimple_call_integer_valued_real_p (stmt, depth);
6335 case GIMPLE_PHI:
6336 return gimple_phi_integer_valued_real_p (stmt, depth);
6337 default:
6338 return false;