Wno-frame-address.c: Skip on hppa*-*-*.
[official-gcc.git] / gcc / gimple-fold.c
blob45840af22d90935756dc972af9f680c0654f06eb
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
58 /* Return true when DECL can be referenced from current unit.
59 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
60 We can get declarations that are not possible to reference for various
61 reasons:
63 1) When analyzing C++ virtual tables.
64 C++ virtual tables do have known constructors even
65 when they are keyed to other compilation unit.
66 Those tables can contain pointers to methods and vars
67 in other units. Those methods have both STATIC and EXTERNAL
68 set.
69 2) In WHOPR mode devirtualization might lead to reference
70 to method that was partitioned elsehwere.
71 In this case we have static VAR_DECL or FUNCTION_DECL
72 that has no corresponding callgraph/varpool node
73 declaring the body.
74 3) COMDAT functions referred by external vtables that
75 we devirtualize only during final compilation stage.
76 At this time we already decided that we will not output
77 the function body and thus we can't reference the symbol
78 directly. */
80 static bool
81 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
83 varpool_node *vnode;
84 struct cgraph_node *node;
85 symtab_node *snode;
87 if (DECL_ABSTRACT_P (decl))
88 return false;
90 /* We are concerned only about static/external vars and functions. */
91 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
92 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
93 return true;
95 /* Static objects can be referred only if they was not optimized out yet. */
96 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
98 /* Before we start optimizing unreachable code we can be sure all
99 static objects are defined. */
100 if (symtab->function_flags_ready)
101 return true;
102 snode = symtab_node::get (decl);
103 if (!snode || !snode->definition)
104 return false;
105 node = dyn_cast <cgraph_node *> (snode);
106 return !node || !node->global.inlined_to;
109 /* We will later output the initializer, so we can refer to it.
110 So we are concerned only when DECL comes from initializer of
111 external var or var that has been optimized out. */
112 if (!from_decl
113 || TREE_CODE (from_decl) != VAR_DECL
114 || (!DECL_EXTERNAL (from_decl)
115 && (vnode = varpool_node::get (from_decl)) != NULL
116 && vnode->definition)
117 || (flag_ltrans
118 && (vnode = varpool_node::get (from_decl)) != NULL
119 && vnode->in_other_partition))
120 return true;
121 /* We are folding reference from external vtable. The vtable may reffer
122 to a symbol keyed to other compilation unit. The other compilation
123 unit may be in separate DSO and the symbol may be hidden. */
124 if (DECL_VISIBILITY_SPECIFIED (decl)
125 && DECL_EXTERNAL (decl)
126 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
127 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
128 return false;
129 /* When function is public, we always can introduce new reference.
130 Exception are the COMDAT functions where introducing a direct
131 reference imply need to include function body in the curren tunit. */
132 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
133 return true;
134 /* We have COMDAT. We are going to check if we still have definition
135 or if the definition is going to be output in other partition.
136 Bypass this when gimplifying; all needed functions will be produced.
138 As observed in PR20991 for already optimized out comdat virtual functions
139 it may be tempting to not necessarily give up because the copy will be
140 output elsewhere when corresponding vtable is output.
141 This is however not possible - ABI specify that COMDATs are output in
142 units where they are used and when the other unit was compiled with LTO
143 it is possible that vtable was kept public while the function itself
144 was privatized. */
145 if (!symtab->function_flags_ready)
146 return true;
148 snode = symtab_node::get (decl);
149 if (!snode
150 || ((!snode->definition || DECL_EXTERNAL (decl))
151 && (!snode->in_other_partition
152 || (!snode->forced_by_abi && !snode->force_output))))
153 return false;
154 node = dyn_cast <cgraph_node *> (snode);
155 return !node || !node->global.inlined_to;
158 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
159 acceptable form for is_gimple_min_invariant.
160 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
162 tree
163 canonicalize_constructor_val (tree cval, tree from_decl)
165 tree orig_cval = cval;
166 STRIP_NOPS (cval);
167 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
168 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
170 tree ptr = TREE_OPERAND (cval, 0);
171 if (is_gimple_min_invariant (ptr))
172 cval = build1_loc (EXPR_LOCATION (cval),
173 ADDR_EXPR, TREE_TYPE (ptr),
174 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
175 ptr,
176 fold_convert (ptr_type_node,
177 TREE_OPERAND (cval, 1))));
179 if (TREE_CODE (cval) == ADDR_EXPR)
181 tree base = NULL_TREE;
182 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
184 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
185 if (base)
186 TREE_OPERAND (cval, 0) = base;
188 else
189 base = get_base_address (TREE_OPERAND (cval, 0));
190 if (!base)
191 return NULL_TREE;
193 if ((TREE_CODE (base) == VAR_DECL
194 || TREE_CODE (base) == FUNCTION_DECL)
195 && !can_refer_decl_in_current_unit_p (base, from_decl))
196 return NULL_TREE;
197 if (TREE_CODE (base) == VAR_DECL)
198 TREE_ADDRESSABLE (base) = 1;
199 else if (TREE_CODE (base) == FUNCTION_DECL)
201 /* Make sure we create a cgraph node for functions we'll reference.
202 They can be non-existent if the reference comes from an entry
203 of an external vtable for example. */
204 cgraph_node::get_create (base);
206 /* Fixup types in global initializers. */
207 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
208 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
210 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
211 cval = fold_convert (TREE_TYPE (orig_cval), cval);
212 return cval;
214 if (TREE_OVERFLOW_P (cval))
215 return drop_tree_overflow (cval);
216 return orig_cval;
219 /* If SYM is a constant variable with known value, return the value.
220 NULL_TREE is returned otherwise. */
222 tree
223 get_symbol_constant_value (tree sym)
225 tree val = ctor_for_folding (sym);
226 if (val != error_mark_node)
228 if (val)
230 val = canonicalize_constructor_val (unshare_expr (val), sym);
231 if (val && is_gimple_min_invariant (val))
232 return val;
233 else
234 return NULL_TREE;
236 /* Variables declared 'const' without an initializer
237 have zero as the initializer if they may not be
238 overridden at link or run time. */
239 if (!val
240 && is_gimple_reg_type (TREE_TYPE (sym)))
241 return build_zero_cst (TREE_TYPE (sym));
244 return NULL_TREE;
249 /* Subroutine of fold_stmt. We perform several simplifications of the
250 memory reference tree EXPR and make sure to re-gimplify them properly
251 after propagation of constant addresses. IS_LHS is true if the
252 reference is supposed to be an lvalue. */
254 static tree
255 maybe_fold_reference (tree expr, bool is_lhs)
257 tree result;
259 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
260 || TREE_CODE (expr) == REALPART_EXPR
261 || TREE_CODE (expr) == IMAGPART_EXPR)
262 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
263 return fold_unary_loc (EXPR_LOCATION (expr),
264 TREE_CODE (expr),
265 TREE_TYPE (expr),
266 TREE_OPERAND (expr, 0));
267 else if (TREE_CODE (expr) == BIT_FIELD_REF
268 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
269 return fold_ternary_loc (EXPR_LOCATION (expr),
270 TREE_CODE (expr),
271 TREE_TYPE (expr),
272 TREE_OPERAND (expr, 0),
273 TREE_OPERAND (expr, 1),
274 TREE_OPERAND (expr, 2));
276 if (!is_lhs
277 && (result = fold_const_aggregate_ref (expr))
278 && is_gimple_min_invariant (result))
279 return result;
281 return NULL_TREE;
285 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
286 replacement rhs for the statement or NULL_TREE if no simplification
287 could be made. It is assumed that the operands have been previously
288 folded. */
290 static tree
291 fold_gimple_assign (gimple_stmt_iterator *si)
293 gimple *stmt = gsi_stmt (*si);
294 enum tree_code subcode = gimple_assign_rhs_code (stmt);
295 location_t loc = gimple_location (stmt);
297 tree result = NULL_TREE;
299 switch (get_gimple_rhs_class (subcode))
301 case GIMPLE_SINGLE_RHS:
303 tree rhs = gimple_assign_rhs1 (stmt);
305 if (TREE_CLOBBER_P (rhs))
306 return NULL_TREE;
308 if (REFERENCE_CLASS_P (rhs))
309 return maybe_fold_reference (rhs, false);
311 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
313 tree val = OBJ_TYPE_REF_EXPR (rhs);
314 if (is_gimple_min_invariant (val))
315 return val;
316 else if (flag_devirtualize && virtual_method_call_p (rhs))
318 bool final;
319 vec <cgraph_node *>targets
320 = possible_polymorphic_call_targets (rhs, stmt, &final);
321 if (final && targets.length () <= 1 && dbg_cnt (devirt))
323 if (dump_enabled_p ())
325 location_t loc = gimple_location_safe (stmt);
326 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
327 "resolving virtual function address "
328 "reference to function %s\n",
329 targets.length () == 1
330 ? targets[0]->name ()
331 : "NULL");
333 if (targets.length () == 1)
335 val = fold_convert (TREE_TYPE (val),
336 build_fold_addr_expr_loc
337 (loc, targets[0]->decl));
338 STRIP_USELESS_TYPE_CONVERSION (val);
340 else
341 /* We can not use __builtin_unreachable here because it
342 can not have address taken. */
343 val = build_int_cst (TREE_TYPE (val), 0);
344 return val;
349 else if (TREE_CODE (rhs) == ADDR_EXPR)
351 tree ref = TREE_OPERAND (rhs, 0);
352 tree tem = maybe_fold_reference (ref, true);
353 if (tem
354 && TREE_CODE (tem) == MEM_REF
355 && integer_zerop (TREE_OPERAND (tem, 1)))
356 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
357 else if (tem)
358 result = fold_convert (TREE_TYPE (rhs),
359 build_fold_addr_expr_loc (loc, tem));
360 else if (TREE_CODE (ref) == MEM_REF
361 && integer_zerop (TREE_OPERAND (ref, 1)))
362 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
364 if (result)
366 /* Strip away useless type conversions. Both the
367 NON_LVALUE_EXPR that may have been added by fold, and
368 "useless" type conversions that might now be apparent
369 due to propagation. */
370 STRIP_USELESS_TYPE_CONVERSION (result);
372 if (result != rhs && valid_gimple_rhs_p (result))
373 return result;
377 else if (TREE_CODE (rhs) == CONSTRUCTOR
378 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
380 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
381 unsigned i;
382 tree val;
384 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
385 if (! CONSTANT_CLASS_P (val))
386 return NULL_TREE;
388 return build_vector_from_ctor (TREE_TYPE (rhs),
389 CONSTRUCTOR_ELTS (rhs));
392 else if (DECL_P (rhs))
393 return get_symbol_constant_value (rhs);
395 break;
397 case GIMPLE_UNARY_RHS:
398 break;
400 case GIMPLE_BINARY_RHS:
401 break;
403 case GIMPLE_TERNARY_RHS:
404 result = fold_ternary_loc (loc, subcode,
405 TREE_TYPE (gimple_assign_lhs (stmt)),
406 gimple_assign_rhs1 (stmt),
407 gimple_assign_rhs2 (stmt),
408 gimple_assign_rhs3 (stmt));
410 if (result)
412 STRIP_USELESS_TYPE_CONVERSION (result);
413 if (valid_gimple_rhs_p (result))
414 return result;
416 break;
418 case GIMPLE_INVALID_RHS:
419 gcc_unreachable ();
422 return NULL_TREE;
426 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
427 adjusting the replacement stmts location and virtual operands.
428 If the statement has a lhs the last stmt in the sequence is expected
429 to assign to that lhs. */
431 static void
432 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
434 gimple *stmt = gsi_stmt (*si_p);
436 if (gimple_has_location (stmt))
437 annotate_all_with_location (stmts, gimple_location (stmt));
439 /* First iterate over the replacement statements backward, assigning
440 virtual operands to their defining statements. */
441 gimple *laststore = NULL;
442 for (gimple_stmt_iterator i = gsi_last (stmts);
443 !gsi_end_p (i); gsi_prev (&i))
445 gimple *new_stmt = gsi_stmt (i);
446 if ((gimple_assign_single_p (new_stmt)
447 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
448 || (is_gimple_call (new_stmt)
449 && (gimple_call_flags (new_stmt)
450 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
452 tree vdef;
453 if (!laststore)
454 vdef = gimple_vdef (stmt);
455 else
456 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
457 gimple_set_vdef (new_stmt, vdef);
458 if (vdef && TREE_CODE (vdef) == SSA_NAME)
459 SSA_NAME_DEF_STMT (vdef) = new_stmt;
460 laststore = new_stmt;
464 /* Second iterate over the statements forward, assigning virtual
465 operands to their uses. */
466 tree reaching_vuse = gimple_vuse (stmt);
467 for (gimple_stmt_iterator i = gsi_start (stmts);
468 !gsi_end_p (i); gsi_next (&i))
470 gimple *new_stmt = gsi_stmt (i);
471 /* If the new statement possibly has a VUSE, update it with exact SSA
472 name we know will reach this one. */
473 if (gimple_has_mem_ops (new_stmt))
474 gimple_set_vuse (new_stmt, reaching_vuse);
475 gimple_set_modified (new_stmt, true);
476 if (gimple_vdef (new_stmt))
477 reaching_vuse = gimple_vdef (new_stmt);
480 /* If the new sequence does not do a store release the virtual
481 definition of the original statement. */
482 if (reaching_vuse
483 && reaching_vuse == gimple_vuse (stmt))
485 tree vdef = gimple_vdef (stmt);
486 if (vdef
487 && TREE_CODE (vdef) == SSA_NAME)
489 unlink_stmt_vdef (stmt);
490 release_ssa_name (vdef);
494 /* Finally replace the original statement with the sequence. */
495 gsi_replace_with_seq (si_p, stmts, false);
498 /* Convert EXPR into a GIMPLE value suitable for substitution on the
499 RHS of an assignment. Insert the necessary statements before
500 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
501 is replaced. If the call is expected to produces a result, then it
502 is replaced by an assignment of the new RHS to the result variable.
503 If the result is to be ignored, then the call is replaced by a
504 GIMPLE_NOP. A proper VDEF chain is retained by making the first
505 VUSE and the last VDEF of the whole sequence be the same as the replaced
506 statement and using new SSA names for stores in between. */
508 void
509 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
511 tree lhs;
512 gimple *stmt, *new_stmt;
513 gimple_stmt_iterator i;
514 gimple_seq stmts = NULL;
516 stmt = gsi_stmt (*si_p);
518 gcc_assert (is_gimple_call (stmt));
520 push_gimplify_context (gimple_in_ssa_p (cfun));
522 lhs = gimple_call_lhs (stmt);
523 if (lhs == NULL_TREE)
525 gimplify_and_add (expr, &stmts);
526 /* We can end up with folding a memcpy of an empty class assignment
527 which gets optimized away by C++ gimplification. */
528 if (gimple_seq_empty_p (stmts))
530 pop_gimplify_context (NULL);
531 if (gimple_in_ssa_p (cfun))
533 unlink_stmt_vdef (stmt);
534 release_defs (stmt);
536 gsi_replace (si_p, gimple_build_nop (), false);
537 return;
540 else
542 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
543 new_stmt = gimple_build_assign (lhs, tmp);
544 i = gsi_last (stmts);
545 gsi_insert_after_without_update (&i, new_stmt,
546 GSI_CONTINUE_LINKING);
549 pop_gimplify_context (NULL);
551 gsi_replace_with_seq_vops (si_p, stmts);
555 /* Replace the call at *GSI with the gimple value VAL. */
557 static void
558 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
560 gimple *stmt = gsi_stmt (*gsi);
561 tree lhs = gimple_call_lhs (stmt);
562 gimple *repl;
563 if (lhs)
565 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
566 val = fold_convert (TREE_TYPE (lhs), val);
567 repl = gimple_build_assign (lhs, val);
569 else
570 repl = gimple_build_nop ();
571 tree vdef = gimple_vdef (stmt);
572 if (vdef && TREE_CODE (vdef) == SSA_NAME)
574 unlink_stmt_vdef (stmt);
575 release_ssa_name (vdef);
577 gsi_replace (gsi, repl, false);
580 /* Replace the call at *GSI with the new call REPL and fold that
581 again. */
583 static void
584 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
586 gimple *stmt = gsi_stmt (*gsi);
587 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
588 gimple_set_location (repl, gimple_location (stmt));
589 if (gimple_vdef (stmt)
590 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
592 gimple_set_vdef (repl, gimple_vdef (stmt));
593 gimple_set_vuse (repl, gimple_vuse (stmt));
594 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
596 gsi_replace (gsi, repl, false);
597 fold_stmt (gsi);
600 /* Return true if VAR is a VAR_DECL or a component thereof. */
602 static bool
603 var_decl_component_p (tree var)
605 tree inner = var;
606 while (handled_component_p (inner))
607 inner = TREE_OPERAND (inner, 0);
608 return SSA_VAR_P (inner);
611 /* Fold function call to builtin mem{{,p}cpy,move}. Return
612 false if no simplification can be made.
613 If ENDP is 0, return DEST (like memcpy).
614 If ENDP is 1, return DEST+LEN (like mempcpy).
615 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
616 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
617 (memmove). */
619 static bool
620 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
621 tree dest, tree src, int endp)
623 gimple *stmt = gsi_stmt (*gsi);
624 tree lhs = gimple_call_lhs (stmt);
625 tree len = gimple_call_arg (stmt, 2);
626 tree destvar, srcvar;
627 location_t loc = gimple_location (stmt);
629 /* If the LEN parameter is zero, return DEST. */
630 if (integer_zerop (len))
632 gimple *repl;
633 if (gimple_call_lhs (stmt))
634 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
635 else
636 repl = gimple_build_nop ();
637 tree vdef = gimple_vdef (stmt);
638 if (vdef && TREE_CODE (vdef) == SSA_NAME)
640 unlink_stmt_vdef (stmt);
641 release_ssa_name (vdef);
643 gsi_replace (gsi, repl, false);
644 return true;
647 /* If SRC and DEST are the same (and not volatile), return
648 DEST{,+LEN,+LEN-1}. */
649 if (operand_equal_p (src, dest, 0))
651 unlink_stmt_vdef (stmt);
652 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
653 release_ssa_name (gimple_vdef (stmt));
654 if (!lhs)
656 gsi_replace (gsi, gimple_build_nop (), false);
657 return true;
659 goto done;
661 else
663 tree srctype, desttype;
664 unsigned int src_align, dest_align;
665 tree off0;
667 /* Build accesses at offset zero with a ref-all character type. */
668 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
669 ptr_mode, true), 0);
671 /* If we can perform the copy efficiently with first doing all loads
672 and then all stores inline it that way. Currently efficiently
673 means that we can load all the memory into a single integer
674 register which is what MOVE_MAX gives us. */
675 src_align = get_pointer_alignment (src);
676 dest_align = get_pointer_alignment (dest);
677 if (tree_fits_uhwi_p (len)
678 && compare_tree_int (len, MOVE_MAX) <= 0
679 /* ??? Don't transform copies from strings with known length this
680 confuses the tree-ssa-strlen.c. This doesn't handle
681 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
682 reason. */
683 && !c_strlen (src, 2))
685 unsigned ilen = tree_to_uhwi (len);
686 if (exact_log2 (ilen) != -1)
688 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
689 if (type
690 && TYPE_MODE (type) != BLKmode
691 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
692 == ilen * 8)
693 /* If the destination pointer is not aligned we must be able
694 to emit an unaligned store. */
695 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
696 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
697 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
698 != CODE_FOR_nothing)))
700 tree srctype = type;
701 tree desttype = type;
702 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
703 srctype = build_aligned_type (type, src_align);
704 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
705 tree tem = fold_const_aggregate_ref (srcmem);
706 if (tem)
707 srcmem = tem;
708 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
709 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
710 src_align)
711 && (optab_handler (movmisalign_optab,
712 TYPE_MODE (type))
713 == CODE_FOR_nothing))
714 srcmem = NULL_TREE;
715 if (srcmem)
717 gimple *new_stmt;
718 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
720 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
721 if (gimple_in_ssa_p (cfun))
722 srcmem = make_ssa_name (TREE_TYPE (srcmem),
723 new_stmt);
724 else
725 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
726 gimple_assign_set_lhs (new_stmt, srcmem);
727 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
728 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
730 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
731 desttype = build_aligned_type (type, dest_align);
732 new_stmt
733 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
734 dest, off0),
735 srcmem);
736 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
737 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
738 if (gimple_vdef (new_stmt)
739 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
740 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
741 if (!lhs)
743 gsi_replace (gsi, new_stmt, false);
744 return true;
746 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
747 goto done;
753 if (endp == 3)
755 /* Both DEST and SRC must be pointer types.
756 ??? This is what old code did. Is the testing for pointer types
757 really mandatory?
759 If either SRC is readonly or length is 1, we can use memcpy. */
760 if (!dest_align || !src_align)
761 return false;
762 if (readonly_data_expr (src)
763 || (tree_fits_uhwi_p (len)
764 && (MIN (src_align, dest_align) / BITS_PER_UNIT
765 >= tree_to_uhwi (len))))
767 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
768 if (!fn)
769 return false;
770 gimple_call_set_fndecl (stmt, fn);
771 gimple_call_set_arg (stmt, 0, dest);
772 gimple_call_set_arg (stmt, 1, src);
773 fold_stmt (gsi);
774 return true;
777 /* If *src and *dest can't overlap, optimize into memcpy as well. */
778 if (TREE_CODE (src) == ADDR_EXPR
779 && TREE_CODE (dest) == ADDR_EXPR)
781 tree src_base, dest_base, fn;
782 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
783 HOST_WIDE_INT size = -1;
784 HOST_WIDE_INT maxsize = -1;
786 srcvar = TREE_OPERAND (src, 0);
787 src_base = get_ref_base_and_extent (srcvar, &src_offset,
788 &size, &maxsize);
789 destvar = TREE_OPERAND (dest, 0);
790 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
791 &size, &maxsize);
792 if (tree_fits_uhwi_p (len))
793 maxsize = tree_to_uhwi (len);
794 else
795 maxsize = -1;
796 src_offset /= BITS_PER_UNIT;
797 dest_offset /= BITS_PER_UNIT;
798 if (SSA_VAR_P (src_base)
799 && SSA_VAR_P (dest_base))
801 if (operand_equal_p (src_base, dest_base, 0)
802 && ranges_overlap_p (src_offset, maxsize,
803 dest_offset, maxsize))
804 return false;
806 else if (TREE_CODE (src_base) == MEM_REF
807 && TREE_CODE (dest_base) == MEM_REF)
809 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
810 TREE_OPERAND (dest_base, 0), 0))
811 return false;
812 offset_int off = mem_ref_offset (src_base) + src_offset;
813 if (!wi::fits_shwi_p (off))
814 return false;
815 src_offset = off.to_shwi ();
817 off = mem_ref_offset (dest_base) + dest_offset;
818 if (!wi::fits_shwi_p (off))
819 return false;
820 dest_offset = off.to_shwi ();
821 if (ranges_overlap_p (src_offset, maxsize,
822 dest_offset, maxsize))
823 return false;
825 else
826 return false;
828 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
829 if (!fn)
830 return false;
831 gimple_call_set_fndecl (stmt, fn);
832 gimple_call_set_arg (stmt, 0, dest);
833 gimple_call_set_arg (stmt, 1, src);
834 fold_stmt (gsi);
835 return true;
838 /* If the destination and source do not alias optimize into
839 memcpy as well. */
840 if ((is_gimple_min_invariant (dest)
841 || TREE_CODE (dest) == SSA_NAME)
842 && (is_gimple_min_invariant (src)
843 || TREE_CODE (src) == SSA_NAME))
845 ao_ref destr, srcr;
846 ao_ref_init_from_ptr_and_size (&destr, dest, len);
847 ao_ref_init_from_ptr_and_size (&srcr, src, len);
848 if (!refs_may_alias_p_1 (&destr, &srcr, false))
850 tree fn;
851 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
852 if (!fn)
853 return false;
854 gimple_call_set_fndecl (stmt, fn);
855 gimple_call_set_arg (stmt, 0, dest);
856 gimple_call_set_arg (stmt, 1, src);
857 fold_stmt (gsi);
858 return true;
862 return false;
865 if (!tree_fits_shwi_p (len))
866 return false;
867 /* FIXME:
868 This logic lose for arguments like (type *)malloc (sizeof (type)),
869 since we strip the casts of up to VOID return value from malloc.
870 Perhaps we ought to inherit type from non-VOID argument here? */
871 STRIP_NOPS (src);
872 STRIP_NOPS (dest);
873 if (!POINTER_TYPE_P (TREE_TYPE (src))
874 || !POINTER_TYPE_P (TREE_TYPE (dest)))
875 return false;
876 /* In the following try to find a type that is most natural to be
877 used for the memcpy source and destination and that allows
878 the most optimization when memcpy is turned into a plain assignment
879 using that type. In theory we could always use a char[len] type
880 but that only gains us that the destination and source possibly
881 no longer will have their address taken. */
882 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
883 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
885 tree tem = TREE_OPERAND (src, 0);
886 STRIP_NOPS (tem);
887 if (tem != TREE_OPERAND (src, 0))
888 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
890 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
892 tree tem = TREE_OPERAND (dest, 0);
893 STRIP_NOPS (tem);
894 if (tem != TREE_OPERAND (dest, 0))
895 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
897 srctype = TREE_TYPE (TREE_TYPE (src));
898 if (TREE_CODE (srctype) == ARRAY_TYPE
899 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
901 srctype = TREE_TYPE (srctype);
902 STRIP_NOPS (src);
903 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
905 desttype = TREE_TYPE (TREE_TYPE (dest));
906 if (TREE_CODE (desttype) == ARRAY_TYPE
907 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
909 desttype = TREE_TYPE (desttype);
910 STRIP_NOPS (dest);
911 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
913 if (TREE_ADDRESSABLE (srctype)
914 || TREE_ADDRESSABLE (desttype))
915 return false;
917 /* Make sure we are not copying using a floating-point mode or
918 a type whose size possibly does not match its precision. */
919 if (FLOAT_MODE_P (TYPE_MODE (desttype))
920 || TREE_CODE (desttype) == BOOLEAN_TYPE
921 || TREE_CODE (desttype) == ENUMERAL_TYPE)
922 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
923 if (FLOAT_MODE_P (TYPE_MODE (srctype))
924 || TREE_CODE (srctype) == BOOLEAN_TYPE
925 || TREE_CODE (srctype) == ENUMERAL_TYPE)
926 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
927 if (!srctype)
928 srctype = desttype;
929 if (!desttype)
930 desttype = srctype;
931 if (!srctype)
932 return false;
934 src_align = get_pointer_alignment (src);
935 dest_align = get_pointer_alignment (dest);
936 if (dest_align < TYPE_ALIGN (desttype)
937 || src_align < TYPE_ALIGN (srctype))
938 return false;
940 destvar = dest;
941 STRIP_NOPS (destvar);
942 if (TREE_CODE (destvar) == ADDR_EXPR
943 && var_decl_component_p (TREE_OPERAND (destvar, 0))
944 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
945 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
946 else
947 destvar = NULL_TREE;
949 srcvar = src;
950 STRIP_NOPS (srcvar);
951 if (TREE_CODE (srcvar) == ADDR_EXPR
952 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
953 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
955 if (!destvar
956 || src_align >= TYPE_ALIGN (desttype))
957 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
958 srcvar, off0);
959 else if (!STRICT_ALIGNMENT)
961 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
962 src_align);
963 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
965 else
966 srcvar = NULL_TREE;
968 else
969 srcvar = NULL_TREE;
971 if (srcvar == NULL_TREE && destvar == NULL_TREE)
972 return false;
974 if (srcvar == NULL_TREE)
976 STRIP_NOPS (src);
977 if (src_align >= TYPE_ALIGN (desttype))
978 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
979 else
981 if (STRICT_ALIGNMENT)
982 return false;
983 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
984 src_align);
985 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
988 else if (destvar == NULL_TREE)
990 STRIP_NOPS (dest);
991 if (dest_align >= TYPE_ALIGN (srctype))
992 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
993 else
995 if (STRICT_ALIGNMENT)
996 return false;
997 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
998 dest_align);
999 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1003 gimple *new_stmt;
1004 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1006 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1007 if (gimple_in_ssa_p (cfun))
1008 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1009 else
1010 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1011 gimple_assign_set_lhs (new_stmt, srcvar);
1012 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1013 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1015 new_stmt = gimple_build_assign (destvar, srcvar);
1016 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1017 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1018 if (gimple_vdef (new_stmt)
1019 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1020 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1021 if (!lhs)
1023 gsi_replace (gsi, new_stmt, false);
1024 return true;
1026 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1029 done:
1030 gimple_seq stmts = NULL;
1031 if (endp == 0 || endp == 3)
1032 len = NULL_TREE;
1033 else if (endp == 2)
1034 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1035 ssize_int (1));
1036 if (endp == 2 || endp == 1)
1038 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1039 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1040 TREE_TYPE (dest), dest, len);
1043 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1044 gimple *repl = gimple_build_assign (lhs, dest);
1045 gsi_replace (gsi, repl, false);
1046 return true;
1049 /* Fold function call to builtin memset or bzero at *GSI setting the
1050 memory of size LEN to VAL. Return whether a simplification was made. */
1052 static bool
1053 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1055 gimple *stmt = gsi_stmt (*gsi);
1056 tree etype;
1057 unsigned HOST_WIDE_INT length, cval;
1059 /* If the LEN parameter is zero, return DEST. */
1060 if (integer_zerop (len))
1062 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1063 return true;
1066 if (! tree_fits_uhwi_p (len))
1067 return false;
1069 if (TREE_CODE (c) != INTEGER_CST)
1070 return false;
1072 tree dest = gimple_call_arg (stmt, 0);
1073 tree var = dest;
1074 if (TREE_CODE (var) != ADDR_EXPR)
1075 return false;
1077 var = TREE_OPERAND (var, 0);
1078 if (TREE_THIS_VOLATILE (var))
1079 return false;
1081 etype = TREE_TYPE (var);
1082 if (TREE_CODE (etype) == ARRAY_TYPE)
1083 etype = TREE_TYPE (etype);
1085 if (!INTEGRAL_TYPE_P (etype)
1086 && !POINTER_TYPE_P (etype))
1087 return NULL_TREE;
1089 if (! var_decl_component_p (var))
1090 return NULL_TREE;
1092 length = tree_to_uhwi (len);
1093 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1094 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1095 return NULL_TREE;
1097 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1098 return NULL_TREE;
1100 if (integer_zerop (c))
1101 cval = 0;
1102 else
1104 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1105 return NULL_TREE;
1107 cval = TREE_INT_CST_LOW (c);
1108 cval &= 0xff;
1109 cval |= cval << 8;
1110 cval |= cval << 16;
1111 cval |= (cval << 31) << 1;
1114 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1115 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1116 gimple_set_vuse (store, gimple_vuse (stmt));
1117 tree vdef = gimple_vdef (stmt);
1118 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1120 gimple_set_vdef (store, gimple_vdef (stmt));
1121 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1123 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1124 if (gimple_call_lhs (stmt))
1126 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1127 gsi_replace (gsi, asgn, false);
1129 else
1131 gimple_stmt_iterator gsi2 = *gsi;
1132 gsi_prev (gsi);
1133 gsi_remove (&gsi2, true);
1136 return true;
1140 /* Return the string length, maximum string length or maximum value of
1141 ARG in LENGTH.
1142 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1143 is not NULL and, for TYPE == 0, its value is not equal to the length
1144 we determine or if we are unable to determine the length or value,
1145 return false. VISITED is a bitmap of visited variables.
1146 TYPE is 0 if string length should be returned, 1 for maximum string
1147 length and 2 for maximum value ARG can have. */
1149 static bool
1150 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1152 tree var, val;
1153 gimple *def_stmt;
1155 if (TREE_CODE (arg) != SSA_NAME)
1157 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1158 if (TREE_CODE (arg) == ADDR_EXPR
1159 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1160 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1162 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1163 if (TREE_CODE (aop0) == INDIRECT_REF
1164 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1165 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1166 length, visited, type);
1169 if (type == 2)
1171 val = arg;
1172 if (TREE_CODE (val) != INTEGER_CST
1173 || tree_int_cst_sgn (val) < 0)
1174 return false;
1176 else
1177 val = c_strlen (arg, 1);
1178 if (!val)
1179 return false;
1181 if (*length)
1183 if (type > 0)
1185 if (TREE_CODE (*length) != INTEGER_CST
1186 || TREE_CODE (val) != INTEGER_CST)
1187 return false;
1189 if (tree_int_cst_lt (*length, val))
1190 *length = val;
1191 return true;
1193 else if (simple_cst_equal (val, *length) != 1)
1194 return false;
1197 *length = val;
1198 return true;
1201 /* If ARG is registered for SSA update we cannot look at its defining
1202 statement. */
1203 if (name_registered_for_update_p (arg))
1204 return false;
1206 /* If we were already here, break the infinite cycle. */
1207 if (!*visited)
1208 *visited = BITMAP_ALLOC (NULL);
1209 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1210 return true;
1212 var = arg;
1213 def_stmt = SSA_NAME_DEF_STMT (var);
1215 switch (gimple_code (def_stmt))
1217 case GIMPLE_ASSIGN:
1218 /* The RHS of the statement defining VAR must either have a
1219 constant length or come from another SSA_NAME with a constant
1220 length. */
1221 if (gimple_assign_single_p (def_stmt)
1222 || gimple_assign_unary_nop_p (def_stmt))
1224 tree rhs = gimple_assign_rhs1 (def_stmt);
1225 return get_maxval_strlen (rhs, length, visited, type);
1227 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1229 tree op2 = gimple_assign_rhs2 (def_stmt);
1230 tree op3 = gimple_assign_rhs3 (def_stmt);
1231 return get_maxval_strlen (op2, length, visited, type)
1232 && get_maxval_strlen (op3, length, visited, type);
1234 return false;
1236 case GIMPLE_PHI:
1238 /* All the arguments of the PHI node must have the same constant
1239 length. */
1240 unsigned i;
1242 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1244 tree arg = gimple_phi_arg (def_stmt, i)->def;
1246 /* If this PHI has itself as an argument, we cannot
1247 determine the string length of this argument. However,
1248 if we can find a constant string length for the other
1249 PHI args then we can still be sure that this is a
1250 constant string length. So be optimistic and just
1251 continue with the next argument. */
1252 if (arg == gimple_phi_result (def_stmt))
1253 continue;
1255 if (!get_maxval_strlen (arg, length, visited, type))
1256 return false;
1259 return true;
1261 default:
1262 return false;
1266 tree
1267 get_maxval_strlen (tree arg, int type)
1269 bitmap visited = NULL;
1270 tree len = NULL_TREE;
1271 if (!get_maxval_strlen (arg, &len, &visited, type))
1272 len = NULL_TREE;
1273 if (visited)
1274 BITMAP_FREE (visited);
1276 return len;
1280 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1281 If LEN is not NULL, it represents the length of the string to be
1282 copied. Return NULL_TREE if no simplification can be made. */
1284 static bool
1285 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1286 tree dest, tree src)
1288 location_t loc = gimple_location (gsi_stmt (*gsi));
1289 tree fn;
1291 /* If SRC and DEST are the same (and not volatile), return DEST. */
1292 if (operand_equal_p (src, dest, 0))
1294 replace_call_with_value (gsi, dest);
1295 return true;
1298 if (optimize_function_for_size_p (cfun))
1299 return false;
1301 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1302 if (!fn)
1303 return false;
1305 tree len = get_maxval_strlen (src, 0);
1306 if (!len)
1307 return false;
1309 len = fold_convert_loc (loc, size_type_node, len);
1310 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1311 len = force_gimple_operand_gsi (gsi, len, true,
1312 NULL_TREE, true, GSI_SAME_STMT);
1313 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1314 replace_call_with_call_and_fold (gsi, repl);
1315 return true;
1318 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1319 If SLEN is not NULL, it represents the length of the source string.
1320 Return NULL_TREE if no simplification can be made. */
1322 static bool
1323 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1324 tree dest, tree src, tree len)
1326 location_t loc = gimple_location (gsi_stmt (*gsi));
1327 tree fn;
1329 /* If the LEN parameter is zero, return DEST. */
1330 if (integer_zerop (len))
1332 replace_call_with_value (gsi, dest);
1333 return true;
1336 /* We can't compare slen with len as constants below if len is not a
1337 constant. */
1338 if (TREE_CODE (len) != INTEGER_CST)
1339 return false;
1341 /* Now, we must be passed a constant src ptr parameter. */
1342 tree slen = get_maxval_strlen (src, 0);
1343 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1344 return false;
1346 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1348 /* We do not support simplification of this case, though we do
1349 support it when expanding trees into RTL. */
1350 /* FIXME: generate a call to __builtin_memset. */
1351 if (tree_int_cst_lt (slen, len))
1352 return false;
1354 /* OK transform into builtin memcpy. */
1355 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1356 if (!fn)
1357 return false;
1359 len = fold_convert_loc (loc, size_type_node, len);
1360 len = force_gimple_operand_gsi (gsi, len, true,
1361 NULL_TREE, true, GSI_SAME_STMT);
1362 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1363 replace_call_with_call_and_fold (gsi, repl);
1364 return true;
1367 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1368 to the call.
1370 Return NULL_TREE if no simplification was possible, otherwise return the
1371 simplified form of the call as a tree.
1373 The simplified form may be a constant or other expression which
1374 computes the same value, but in a more efficient manner (including
1375 calls to other builtin functions).
1377 The call may contain arguments which need to be evaluated, but
1378 which are not useful to determine the result of the call. In
1379 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1380 COMPOUND_EXPR will be an argument which must be evaluated.
1381 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1382 COMPOUND_EXPR in the chain will contain the tree for the simplified
1383 form of the builtin function call. */
1385 static bool
1386 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1388 gimple *stmt = gsi_stmt (*gsi);
1389 location_t loc = gimple_location (stmt);
1391 const char *p = c_getstr (src);
1393 /* If the string length is zero, return the dst parameter. */
1394 if (p && *p == '\0')
1396 replace_call_with_value (gsi, dst);
1397 return true;
1400 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1401 return false;
1403 /* See if we can store by pieces into (dst + strlen(dst)). */
1404 tree newdst;
1405 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1406 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1408 if (!strlen_fn || !memcpy_fn)
1409 return false;
1411 /* If the length of the source string isn't computable don't
1412 split strcat into strlen and memcpy. */
1413 tree len = get_maxval_strlen (src, 0);
1414 if (! len)
1415 return false;
1417 /* Create strlen (dst). */
1418 gimple_seq stmts = NULL, stmts2;
1419 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1420 gimple_set_location (repl, loc);
1421 if (gimple_in_ssa_p (cfun))
1422 newdst = make_ssa_name (size_type_node);
1423 else
1424 newdst = create_tmp_reg (size_type_node);
1425 gimple_call_set_lhs (repl, newdst);
1426 gimple_seq_add_stmt_without_update (&stmts, repl);
1428 /* Create (dst p+ strlen (dst)). */
1429 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1430 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1431 gimple_seq_add_seq_without_update (&stmts, stmts2);
1433 len = fold_convert_loc (loc, size_type_node, len);
1434 len = size_binop_loc (loc, PLUS_EXPR, len,
1435 build_int_cst (size_type_node, 1));
1436 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1437 gimple_seq_add_seq_without_update (&stmts, stmts2);
1439 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1440 gimple_seq_add_stmt_without_update (&stmts, repl);
1441 if (gimple_call_lhs (stmt))
1443 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1444 gimple_seq_add_stmt_without_update (&stmts, repl);
1445 gsi_replace_with_seq_vops (gsi, stmts);
1446 /* gsi now points at the assignment to the lhs, get a
1447 stmt iterator to the memcpy call.
1448 ??? We can't use gsi_for_stmt as that doesn't work when the
1449 CFG isn't built yet. */
1450 gimple_stmt_iterator gsi2 = *gsi;
1451 gsi_prev (&gsi2);
1452 fold_stmt (&gsi2);
1454 else
1456 gsi_replace_with_seq_vops (gsi, stmts);
1457 fold_stmt (gsi);
1459 return true;
1462 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1463 are the arguments to the call. */
1465 static bool
1466 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1468 gimple *stmt = gsi_stmt (*gsi);
1469 tree dest = gimple_call_arg (stmt, 0);
1470 tree src = gimple_call_arg (stmt, 1);
1471 tree size = gimple_call_arg (stmt, 2);
1472 tree fn;
1473 const char *p;
1476 p = c_getstr (src);
1477 /* If the SRC parameter is "", return DEST. */
1478 if (p && *p == '\0')
1480 replace_call_with_value (gsi, dest);
1481 return true;
1484 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1485 return false;
1487 /* If __builtin_strcat_chk is used, assume strcat is available. */
1488 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1489 if (!fn)
1490 return false;
1492 gimple *repl = gimple_build_call (fn, 2, dest, src);
1493 replace_call_with_call_and_fold (gsi, repl);
1494 return true;
1497 /* Simplify a call to the strncat builtin. */
1499 static bool
1500 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1502 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1503 tree dst = gimple_call_arg (stmt, 0);
1504 tree src = gimple_call_arg (stmt, 1);
1505 tree len = gimple_call_arg (stmt, 2);
1507 const char *p = c_getstr (src);
1509 /* If the requested length is zero, or the src parameter string
1510 length is zero, return the dst parameter. */
1511 if (integer_zerop (len) || (p && *p == '\0'))
1513 replace_call_with_value (gsi, dst);
1514 return true;
1517 /* If the requested len is greater than or equal to the string
1518 length, call strcat. */
1519 if (TREE_CODE (len) == INTEGER_CST && p
1520 && compare_tree_int (len, strlen (p)) >= 0)
1522 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1524 /* If the replacement _DECL isn't initialized, don't do the
1525 transformation. */
1526 if (!fn)
1527 return false;
1529 gcall *repl = gimple_build_call (fn, 2, dst, src);
1530 replace_call_with_call_and_fold (gsi, repl);
1531 return true;
1534 return false;
1537 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1538 LEN, and SIZE. */
1540 static bool
1541 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1543 gimple *stmt = gsi_stmt (*gsi);
1544 tree dest = gimple_call_arg (stmt, 0);
1545 tree src = gimple_call_arg (stmt, 1);
1546 tree len = gimple_call_arg (stmt, 2);
1547 tree size = gimple_call_arg (stmt, 3);
1548 tree fn;
1549 const char *p;
1551 p = c_getstr (src);
1552 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1553 if ((p && *p == '\0')
1554 || integer_zerop (len))
1556 replace_call_with_value (gsi, dest);
1557 return true;
1560 if (! tree_fits_uhwi_p (size))
1561 return false;
1563 if (! integer_all_onesp (size))
1565 tree src_len = c_strlen (src, 1);
1566 if (src_len
1567 && tree_fits_uhwi_p (src_len)
1568 && tree_fits_uhwi_p (len)
1569 && ! tree_int_cst_lt (len, src_len))
1571 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1572 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1573 if (!fn)
1574 return false;
1576 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1577 replace_call_with_call_and_fold (gsi, repl);
1578 return true;
1580 return false;
1583 /* If __builtin_strncat_chk is used, assume strncat is available. */
1584 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1585 if (!fn)
1586 return false;
1588 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1589 replace_call_with_call_and_fold (gsi, repl);
1590 return true;
1593 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1594 to the call. IGNORE is true if the value returned
1595 by the builtin will be ignored. UNLOCKED is true is true if this
1596 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1597 the known length of the string. Return NULL_TREE if no simplification
1598 was possible. */
1600 static bool
1601 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1602 tree arg0, tree arg1,
1603 bool unlocked)
1605 gimple *stmt = gsi_stmt (*gsi);
1607 /* If we're using an unlocked function, assume the other unlocked
1608 functions exist explicitly. */
1609 tree const fn_fputc = (unlocked
1610 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1611 : builtin_decl_implicit (BUILT_IN_FPUTC));
1612 tree const fn_fwrite = (unlocked
1613 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1614 : builtin_decl_implicit (BUILT_IN_FWRITE));
1616 /* If the return value is used, don't do the transformation. */
1617 if (gimple_call_lhs (stmt))
1618 return false;
1620 /* Get the length of the string passed to fputs. If the length
1621 can't be determined, punt. */
1622 tree len = get_maxval_strlen (arg0, 0);
1623 if (!len
1624 || TREE_CODE (len) != INTEGER_CST)
1625 return false;
1627 switch (compare_tree_int (len, 1))
1629 case -1: /* length is 0, delete the call entirely . */
1630 replace_call_with_value (gsi, integer_zero_node);
1631 return true;
1633 case 0: /* length is 1, call fputc. */
1635 const char *p = c_getstr (arg0);
1636 if (p != NULL)
1638 if (!fn_fputc)
1639 return false;
1641 gimple *repl = gimple_build_call (fn_fputc, 2,
1642 build_int_cst
1643 (integer_type_node, p[0]), arg1);
1644 replace_call_with_call_and_fold (gsi, repl);
1645 return true;
1648 /* FALLTHROUGH */
1649 case 1: /* length is greater than 1, call fwrite. */
1651 /* If optimizing for size keep fputs. */
1652 if (optimize_function_for_size_p (cfun))
1653 return false;
1654 /* New argument list transforming fputs(string, stream) to
1655 fwrite(string, 1, len, stream). */
1656 if (!fn_fwrite)
1657 return false;
1659 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1660 size_one_node, len, arg1);
1661 replace_call_with_call_and_fold (gsi, repl);
1662 return true;
1664 default:
1665 gcc_unreachable ();
1667 return false;
1670 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1671 DEST, SRC, LEN, and SIZE are the arguments to the call.
1672 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1673 code of the builtin. If MAXLEN is not NULL, it is maximum length
1674 passed as third argument. */
1676 static bool
1677 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1678 tree dest, tree src, tree len, tree size,
1679 enum built_in_function fcode)
1681 gimple *stmt = gsi_stmt (*gsi);
1682 location_t loc = gimple_location (stmt);
1683 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1684 tree fn;
1686 /* If SRC and DEST are the same (and not volatile), return DEST
1687 (resp. DEST+LEN for __mempcpy_chk). */
1688 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1690 if (fcode != BUILT_IN_MEMPCPY_CHK)
1692 replace_call_with_value (gsi, dest);
1693 return true;
1695 else
1697 gimple_seq stmts = NULL;
1698 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1699 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, dest, len);
1700 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1701 replace_call_with_value (gsi, temp);
1702 return true;
1706 if (! tree_fits_uhwi_p (size))
1707 return false;
1709 tree maxlen = get_maxval_strlen (len, 2);
1710 if (! integer_all_onesp (size))
1712 if (! tree_fits_uhwi_p (len))
1714 /* If LEN is not constant, try MAXLEN too.
1715 For MAXLEN only allow optimizing into non-_ocs function
1716 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1717 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1719 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1721 /* (void) __mempcpy_chk () can be optimized into
1722 (void) __memcpy_chk (). */
1723 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1724 if (!fn)
1725 return false;
1727 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1728 replace_call_with_call_and_fold (gsi, repl);
1729 return true;
1731 return false;
1734 else
1735 maxlen = len;
1737 if (tree_int_cst_lt (size, maxlen))
1738 return false;
1741 fn = NULL_TREE;
1742 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1743 mem{cpy,pcpy,move,set} is available. */
1744 switch (fcode)
1746 case BUILT_IN_MEMCPY_CHK:
1747 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1748 break;
1749 case BUILT_IN_MEMPCPY_CHK:
1750 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1751 break;
1752 case BUILT_IN_MEMMOVE_CHK:
1753 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1754 break;
1755 case BUILT_IN_MEMSET_CHK:
1756 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1757 break;
1758 default:
1759 break;
1762 if (!fn)
1763 return false;
1765 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1766 replace_call_with_call_and_fold (gsi, repl);
1767 return true;
1770 /* Fold a call to the __st[rp]cpy_chk builtin.
1771 DEST, SRC, and SIZE are the arguments to the call.
1772 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1773 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1774 strings passed as second argument. */
1776 static bool
1777 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1778 tree dest,
1779 tree src, tree size,
1780 enum built_in_function fcode)
1782 gimple *stmt = gsi_stmt (*gsi);
1783 location_t loc = gimple_location (stmt);
1784 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1785 tree len, fn;
1787 /* If SRC and DEST are the same (and not volatile), return DEST. */
1788 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1790 replace_call_with_value (gsi, dest);
1791 return true;
1794 if (! tree_fits_uhwi_p (size))
1795 return false;
1797 tree maxlen = get_maxval_strlen (src, 1);
1798 if (! integer_all_onesp (size))
1800 len = c_strlen (src, 1);
1801 if (! len || ! tree_fits_uhwi_p (len))
1803 /* If LEN is not constant, try MAXLEN too.
1804 For MAXLEN only allow optimizing into non-_ocs function
1805 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1806 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1808 if (fcode == BUILT_IN_STPCPY_CHK)
1810 if (! ignore)
1811 return false;
1813 /* If return value of __stpcpy_chk is ignored,
1814 optimize into __strcpy_chk. */
1815 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1816 if (!fn)
1817 return false;
1819 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1820 replace_call_with_call_and_fold (gsi, repl);
1821 return true;
1824 if (! len || TREE_SIDE_EFFECTS (len))
1825 return false;
1827 /* If c_strlen returned something, but not a constant,
1828 transform __strcpy_chk into __memcpy_chk. */
1829 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1830 if (!fn)
1831 return false;
1833 gimple_seq stmts = NULL;
1834 len = gimple_convert (&stmts, loc, size_type_node, len);
1835 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1836 build_int_cst (size_type_node, 1));
1837 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1838 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1839 replace_call_with_call_and_fold (gsi, repl);
1840 return true;
1843 else
1844 maxlen = len;
1846 if (! tree_int_cst_lt (maxlen, size))
1847 return false;
1850 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1851 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1852 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1853 if (!fn)
1854 return false;
1856 gimple *repl = gimple_build_call (fn, 2, dest, src);
1857 replace_call_with_call_and_fold (gsi, repl);
1858 return true;
1861 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1862 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1863 length passed as third argument. IGNORE is true if return value can be
1864 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1866 static bool
1867 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1868 tree dest, tree src,
1869 tree len, tree size,
1870 enum built_in_function fcode)
1872 gimple *stmt = gsi_stmt (*gsi);
1873 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1874 tree fn;
1876 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1878 /* If return value of __stpncpy_chk is ignored,
1879 optimize into __strncpy_chk. */
1880 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1881 if (fn)
1883 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1884 replace_call_with_call_and_fold (gsi, repl);
1885 return true;
1889 if (! tree_fits_uhwi_p (size))
1890 return false;
1892 tree maxlen = get_maxval_strlen (len, 2);
1893 if (! integer_all_onesp (size))
1895 if (! tree_fits_uhwi_p (len))
1897 /* If LEN is not constant, try MAXLEN too.
1898 For MAXLEN only allow optimizing into non-_ocs function
1899 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1900 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1901 return false;
1903 else
1904 maxlen = len;
1906 if (tree_int_cst_lt (size, maxlen))
1907 return false;
1910 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1911 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1912 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1913 if (!fn)
1914 return false;
1916 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1917 replace_call_with_call_and_fold (gsi, repl);
1918 return true;
1921 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1922 Return NULL_TREE if no simplification can be made. */
1924 static bool
1925 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1927 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1928 location_t loc = gimple_location (stmt);
1929 tree dest = gimple_call_arg (stmt, 0);
1930 tree src = gimple_call_arg (stmt, 1);
1931 tree fn, len, lenp1;
1933 /* If the result is unused, replace stpcpy with strcpy. */
1934 if (gimple_call_lhs (stmt) == NULL_TREE)
1936 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1937 if (!fn)
1938 return false;
1939 gimple_call_set_fndecl (stmt, fn);
1940 fold_stmt (gsi);
1941 return true;
1944 len = c_strlen (src, 1);
1945 if (!len
1946 || TREE_CODE (len) != INTEGER_CST)
1947 return false;
1949 if (optimize_function_for_size_p (cfun)
1950 /* If length is zero it's small enough. */
1951 && !integer_zerop (len))
1952 return false;
1954 /* If the source has a known length replace stpcpy with memcpy. */
1955 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1956 if (!fn)
1957 return false;
1959 gimple_seq stmts = NULL;
1960 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1961 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1962 tem, build_int_cst (size_type_node, 1));
1963 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1964 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1965 gimple_set_vuse (repl, gimple_vuse (stmt));
1966 gimple_set_vdef (repl, gimple_vdef (stmt));
1967 if (gimple_vdef (repl)
1968 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1969 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1970 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1971 /* Replace the result with dest + len. */
1972 stmts = NULL;
1973 tem = gimple_convert (&stmts, loc, sizetype, len);
1974 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1975 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1976 POINTER_PLUS_EXPR, dest, tem);
1977 gsi_replace (gsi, ret, false);
1978 /* Finally fold the memcpy call. */
1979 gimple_stmt_iterator gsi2 = *gsi;
1980 gsi_prev (&gsi2);
1981 fold_stmt (&gsi2);
1982 return true;
1985 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1986 NULL_TREE if a normal call should be emitted rather than expanding
1987 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1988 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1989 passed as second argument. */
1991 static bool
1992 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
1993 enum built_in_function fcode)
1995 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1996 tree dest, size, len, fn, fmt, flag;
1997 const char *fmt_str;
1999 /* Verify the required arguments in the original call. */
2000 if (gimple_call_num_args (stmt) < 5)
2001 return false;
2003 dest = gimple_call_arg (stmt, 0);
2004 len = gimple_call_arg (stmt, 1);
2005 flag = gimple_call_arg (stmt, 2);
2006 size = gimple_call_arg (stmt, 3);
2007 fmt = gimple_call_arg (stmt, 4);
2009 if (! tree_fits_uhwi_p (size))
2010 return false;
2012 if (! integer_all_onesp (size))
2014 tree maxlen = get_maxval_strlen (len, 2);
2015 if (! tree_fits_uhwi_p (len))
2017 /* If LEN is not constant, try MAXLEN too.
2018 For MAXLEN only allow optimizing into non-_ocs function
2019 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2020 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2021 return false;
2023 else
2024 maxlen = len;
2026 if (tree_int_cst_lt (size, maxlen))
2027 return false;
2030 if (!init_target_chars ())
2031 return false;
2033 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2034 or if format doesn't contain % chars or is "%s". */
2035 if (! integer_zerop (flag))
2037 fmt_str = c_getstr (fmt);
2038 if (fmt_str == NULL)
2039 return false;
2040 if (strchr (fmt_str, target_percent) != NULL
2041 && strcmp (fmt_str, target_percent_s))
2042 return false;
2045 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2046 available. */
2047 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2048 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2049 if (!fn)
2050 return false;
2052 /* Replace the called function and the first 5 argument by 3 retaining
2053 trailing varargs. */
2054 gimple_call_set_fndecl (stmt, fn);
2055 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2056 gimple_call_set_arg (stmt, 0, dest);
2057 gimple_call_set_arg (stmt, 1, len);
2058 gimple_call_set_arg (stmt, 2, fmt);
2059 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2060 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2061 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2062 fold_stmt (gsi);
2063 return true;
2066 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2067 Return NULL_TREE if a normal call should be emitted rather than
2068 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2069 or BUILT_IN_VSPRINTF_CHK. */
2071 static bool
2072 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2073 enum built_in_function fcode)
2075 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2076 tree dest, size, len, fn, fmt, flag;
2077 const char *fmt_str;
2078 unsigned nargs = gimple_call_num_args (stmt);
2080 /* Verify the required arguments in the original call. */
2081 if (nargs < 4)
2082 return false;
2083 dest = gimple_call_arg (stmt, 0);
2084 flag = gimple_call_arg (stmt, 1);
2085 size = gimple_call_arg (stmt, 2);
2086 fmt = gimple_call_arg (stmt, 3);
2088 if (! tree_fits_uhwi_p (size))
2089 return false;
2091 len = NULL_TREE;
2093 if (!init_target_chars ())
2094 return false;
2096 /* Check whether the format is a literal string constant. */
2097 fmt_str = c_getstr (fmt);
2098 if (fmt_str != NULL)
2100 /* If the format doesn't contain % args or %%, we know the size. */
2101 if (strchr (fmt_str, target_percent) == 0)
2103 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2104 len = build_int_cstu (size_type_node, strlen (fmt_str));
2106 /* If the format is "%s" and first ... argument is a string literal,
2107 we know the size too. */
2108 else if (fcode == BUILT_IN_SPRINTF_CHK
2109 && strcmp (fmt_str, target_percent_s) == 0)
2111 tree arg;
2113 if (nargs == 5)
2115 arg = gimple_call_arg (stmt, 4);
2116 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2118 len = c_strlen (arg, 1);
2119 if (! len || ! tree_fits_uhwi_p (len))
2120 len = NULL_TREE;
2126 if (! integer_all_onesp (size))
2128 if (! len || ! tree_int_cst_lt (len, size))
2129 return false;
2132 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2133 or if format doesn't contain % chars or is "%s". */
2134 if (! integer_zerop (flag))
2136 if (fmt_str == NULL)
2137 return false;
2138 if (strchr (fmt_str, target_percent) != NULL
2139 && strcmp (fmt_str, target_percent_s))
2140 return false;
2143 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2144 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2145 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2146 if (!fn)
2147 return false;
2149 /* Replace the called function and the first 4 argument by 2 retaining
2150 trailing varargs. */
2151 gimple_call_set_fndecl (stmt, fn);
2152 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2153 gimple_call_set_arg (stmt, 0, dest);
2154 gimple_call_set_arg (stmt, 1, fmt);
2155 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2156 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2157 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2158 fold_stmt (gsi);
2159 return true;
2162 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2163 ORIG may be null if this is a 2-argument call. We don't attempt to
2164 simplify calls with more than 3 arguments.
2166 Return NULL_TREE if no simplification was possible, otherwise return the
2167 simplified form of the call as a tree. If IGNORED is true, it means that
2168 the caller does not use the returned value of the function. */
2170 static bool
2171 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2173 gimple *stmt = gsi_stmt (*gsi);
2174 tree dest = gimple_call_arg (stmt, 0);
2175 tree fmt = gimple_call_arg (stmt, 1);
2176 tree orig = NULL_TREE;
2177 const char *fmt_str = NULL;
2179 /* Verify the required arguments in the original call. We deal with two
2180 types of sprintf() calls: 'sprintf (str, fmt)' and
2181 'sprintf (dest, "%s", orig)'. */
2182 if (gimple_call_num_args (stmt) > 3)
2183 return false;
2185 if (gimple_call_num_args (stmt) == 3)
2186 orig = gimple_call_arg (stmt, 2);
2188 /* Check whether the format is a literal string constant. */
2189 fmt_str = c_getstr (fmt);
2190 if (fmt_str == NULL)
2191 return false;
2193 if (!init_target_chars ())
2194 return false;
2196 /* If the format doesn't contain % args or %%, use strcpy. */
2197 if (strchr (fmt_str, target_percent) == NULL)
2199 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2201 if (!fn)
2202 return false;
2204 /* Don't optimize sprintf (buf, "abc", ptr++). */
2205 if (orig)
2206 return false;
2208 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2209 'format' is known to contain no % formats. */
2210 gimple_seq stmts = NULL;
2211 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2212 gimple_seq_add_stmt_without_update (&stmts, repl);
2213 if (gimple_call_lhs (stmt))
2215 repl = gimple_build_assign (gimple_call_lhs (stmt),
2216 build_int_cst (integer_type_node,
2217 strlen (fmt_str)));
2218 gimple_seq_add_stmt_without_update (&stmts, repl);
2219 gsi_replace_with_seq_vops (gsi, stmts);
2220 /* gsi now points at the assignment to the lhs, get a
2221 stmt iterator to the memcpy call.
2222 ??? We can't use gsi_for_stmt as that doesn't work when the
2223 CFG isn't built yet. */
2224 gimple_stmt_iterator gsi2 = *gsi;
2225 gsi_prev (&gsi2);
2226 fold_stmt (&gsi2);
2228 else
2230 gsi_replace_with_seq_vops (gsi, stmts);
2231 fold_stmt (gsi);
2233 return true;
2236 /* If the format is "%s", use strcpy if the result isn't used. */
2237 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2239 tree fn;
2240 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2242 if (!fn)
2243 return false;
2245 /* Don't crash on sprintf (str1, "%s"). */
2246 if (!orig)
2247 return false;
2249 tree orig_len = NULL_TREE;
2250 if (gimple_call_lhs (stmt))
2252 orig_len = get_maxval_strlen (orig, 0);
2253 if (!orig_len)
2254 return false;
2257 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2258 gimple_seq stmts = NULL;
2259 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2260 gimple_seq_add_stmt_without_update (&stmts, repl);
2261 if (gimple_call_lhs (stmt))
2263 if (!useless_type_conversion_p (integer_type_node,
2264 TREE_TYPE (orig_len)))
2265 orig_len = fold_convert (integer_type_node, orig_len);
2266 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2267 gimple_seq_add_stmt_without_update (&stmts, repl);
2268 gsi_replace_with_seq_vops (gsi, stmts);
2269 /* gsi now points at the assignment to the lhs, get a
2270 stmt iterator to the memcpy call.
2271 ??? We can't use gsi_for_stmt as that doesn't work when the
2272 CFG isn't built yet. */
2273 gimple_stmt_iterator gsi2 = *gsi;
2274 gsi_prev (&gsi2);
2275 fold_stmt (&gsi2);
2277 else
2279 gsi_replace_with_seq_vops (gsi, stmts);
2280 fold_stmt (gsi);
2282 return true;
2284 return false;
2287 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2288 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2289 attempt to simplify calls with more than 4 arguments.
2291 Return NULL_TREE if no simplification was possible, otherwise return the
2292 simplified form of the call as a tree. If IGNORED is true, it means that
2293 the caller does not use the returned value of the function. */
2295 static bool
2296 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2298 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2299 tree dest = gimple_call_arg (stmt, 0);
2300 tree destsize = gimple_call_arg (stmt, 1);
2301 tree fmt = gimple_call_arg (stmt, 2);
2302 tree orig = NULL_TREE;
2303 const char *fmt_str = NULL;
2305 if (gimple_call_num_args (stmt) > 4)
2306 return false;
2308 if (gimple_call_num_args (stmt) == 4)
2309 orig = gimple_call_arg (stmt, 3);
2311 if (!tree_fits_uhwi_p (destsize))
2312 return false;
2313 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2315 /* Check whether the format is a literal string constant. */
2316 fmt_str = c_getstr (fmt);
2317 if (fmt_str == NULL)
2318 return false;
2320 if (!init_target_chars ())
2321 return false;
2323 /* If the format doesn't contain % args or %%, use strcpy. */
2324 if (strchr (fmt_str, target_percent) == NULL)
2326 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2327 if (!fn)
2328 return false;
2330 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2331 if (orig)
2332 return false;
2334 /* We could expand this as
2335 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2336 or to
2337 memcpy (str, fmt_with_nul_at_cstm1, cst);
2338 but in the former case that might increase code size
2339 and in the latter case grow .rodata section too much.
2340 So punt for now. */
2341 size_t len = strlen (fmt_str);
2342 if (len >= destlen)
2343 return false;
2345 gimple_seq stmts = NULL;
2346 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2347 gimple_seq_add_stmt_without_update (&stmts, repl);
2348 if (gimple_call_lhs (stmt))
2350 repl = gimple_build_assign (gimple_call_lhs (stmt),
2351 build_int_cst (integer_type_node, len));
2352 gimple_seq_add_stmt_without_update (&stmts, repl);
2353 gsi_replace_with_seq_vops (gsi, stmts);
2354 /* gsi now points at the assignment to the lhs, get a
2355 stmt iterator to the memcpy call.
2356 ??? We can't use gsi_for_stmt as that doesn't work when the
2357 CFG isn't built yet. */
2358 gimple_stmt_iterator gsi2 = *gsi;
2359 gsi_prev (&gsi2);
2360 fold_stmt (&gsi2);
2362 else
2364 gsi_replace_with_seq_vops (gsi, stmts);
2365 fold_stmt (gsi);
2367 return true;
2370 /* If the format is "%s", use strcpy if the result isn't used. */
2371 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2373 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2374 if (!fn)
2375 return false;
2377 /* Don't crash on snprintf (str1, cst, "%s"). */
2378 if (!orig)
2379 return false;
2381 tree orig_len = get_maxval_strlen (orig, 0);
2382 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2383 return false;
2385 /* We could expand this as
2386 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2387 or to
2388 memcpy (str1, str2_with_nul_at_cstm1, cst);
2389 but in the former case that might increase code size
2390 and in the latter case grow .rodata section too much.
2391 So punt for now. */
2392 if (compare_tree_int (orig_len, destlen) >= 0)
2393 return false;
2395 /* Convert snprintf (str1, cst, "%s", str2) into
2396 strcpy (str1, str2) if strlen (str2) < cst. */
2397 gimple_seq stmts = NULL;
2398 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2399 gimple_seq_add_stmt_without_update (&stmts, repl);
2400 if (gimple_call_lhs (stmt))
2402 if (!useless_type_conversion_p (integer_type_node,
2403 TREE_TYPE (orig_len)))
2404 orig_len = fold_convert (integer_type_node, orig_len);
2405 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2406 gimple_seq_add_stmt_without_update (&stmts, repl);
2407 gsi_replace_with_seq_vops (gsi, stmts);
2408 /* gsi now points at the assignment to the lhs, get a
2409 stmt iterator to the memcpy call.
2410 ??? We can't use gsi_for_stmt as that doesn't work when the
2411 CFG isn't built yet. */
2412 gimple_stmt_iterator gsi2 = *gsi;
2413 gsi_prev (&gsi2);
2414 fold_stmt (&gsi2);
2416 else
2418 gsi_replace_with_seq_vops (gsi, stmts);
2419 fold_stmt (gsi);
2421 return true;
2423 return false;
2426 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2427 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2428 more than 3 arguments, and ARG may be null in the 2-argument case.
2430 Return NULL_TREE if no simplification was possible, otherwise return the
2431 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2432 code of the function to be simplified. */
2434 static bool
2435 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2436 tree fp, tree fmt, tree arg,
2437 enum built_in_function fcode)
2439 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2440 tree fn_fputc, fn_fputs;
2441 const char *fmt_str = NULL;
2443 /* If the return value is used, don't do the transformation. */
2444 if (gimple_call_lhs (stmt) != NULL_TREE)
2445 return false;
2447 /* Check whether the format is a literal string constant. */
2448 fmt_str = c_getstr (fmt);
2449 if (fmt_str == NULL)
2450 return false;
2452 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2454 /* If we're using an unlocked function, assume the other
2455 unlocked functions exist explicitly. */
2456 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2457 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2459 else
2461 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2462 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2465 if (!init_target_chars ())
2466 return false;
2468 /* If the format doesn't contain % args or %%, use strcpy. */
2469 if (strchr (fmt_str, target_percent) == NULL)
2471 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2472 && arg)
2473 return false;
2475 /* If the format specifier was "", fprintf does nothing. */
2476 if (fmt_str[0] == '\0')
2478 replace_call_with_value (gsi, NULL_TREE);
2479 return true;
2482 /* When "string" doesn't contain %, replace all cases of
2483 fprintf (fp, string) with fputs (string, fp). The fputs
2484 builtin will take care of special cases like length == 1. */
2485 if (fn_fputs)
2487 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2488 replace_call_with_call_and_fold (gsi, repl);
2489 return true;
2493 /* The other optimizations can be done only on the non-va_list variants. */
2494 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2495 return false;
2497 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2498 else if (strcmp (fmt_str, target_percent_s) == 0)
2500 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2501 return false;
2502 if (fn_fputs)
2504 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2505 replace_call_with_call_and_fold (gsi, repl);
2506 return true;
2510 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2511 else if (strcmp (fmt_str, target_percent_c) == 0)
2513 if (!arg
2514 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2515 return false;
2516 if (fn_fputc)
2518 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2519 replace_call_with_call_and_fold (gsi, repl);
2520 return true;
2524 return false;
2527 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2528 FMT and ARG are the arguments to the call; we don't fold cases with
2529 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2531 Return NULL_TREE if no simplification was possible, otherwise return the
2532 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2533 code of the function to be simplified. */
2535 static bool
2536 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2537 tree arg, enum built_in_function fcode)
2539 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2540 tree fn_putchar, fn_puts, newarg;
2541 const char *fmt_str = NULL;
2543 /* If the return value is used, don't do the transformation. */
2544 if (gimple_call_lhs (stmt) != NULL_TREE)
2545 return false;
2547 /* Check whether the format is a literal string constant. */
2548 fmt_str = c_getstr (fmt);
2549 if (fmt_str == NULL)
2550 return false;
2552 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2554 /* If we're using an unlocked function, assume the other
2555 unlocked functions exist explicitly. */
2556 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2557 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2559 else
2561 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2562 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2565 if (!init_target_chars ())
2566 return false;
2568 if (strcmp (fmt_str, target_percent_s) == 0
2569 || strchr (fmt_str, target_percent) == NULL)
2571 const char *str;
2573 if (strcmp (fmt_str, target_percent_s) == 0)
2575 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2576 return false;
2578 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2579 return false;
2581 str = c_getstr (arg);
2582 if (str == NULL)
2583 return false;
2585 else
2587 /* The format specifier doesn't contain any '%' characters. */
2588 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2589 && arg)
2590 return false;
2591 str = fmt_str;
2594 /* If the string was "", printf does nothing. */
2595 if (str[0] == '\0')
2597 replace_call_with_value (gsi, NULL_TREE);
2598 return true;
2601 /* If the string has length of 1, call putchar. */
2602 if (str[1] == '\0')
2604 /* Given printf("c"), (where c is any one character,)
2605 convert "c"[0] to an int and pass that to the replacement
2606 function. */
2607 newarg = build_int_cst (integer_type_node, str[0]);
2608 if (fn_putchar)
2610 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2611 replace_call_with_call_and_fold (gsi, repl);
2612 return true;
2615 else
2617 /* If the string was "string\n", call puts("string"). */
2618 size_t len = strlen (str);
2619 if ((unsigned char)str[len - 1] == target_newline
2620 && (size_t) (int) len == len
2621 && (int) len > 0)
2623 char *newstr;
2624 tree offset_node, string_cst;
2626 /* Create a NUL-terminated string that's one char shorter
2627 than the original, stripping off the trailing '\n'. */
2628 newarg = build_string_literal (len, str);
2629 string_cst = string_constant (newarg, &offset_node);
2630 gcc_checking_assert (string_cst
2631 && (TREE_STRING_LENGTH (string_cst)
2632 == (int) len)
2633 && integer_zerop (offset_node)
2634 && (unsigned char)
2635 TREE_STRING_POINTER (string_cst)[len - 1]
2636 == target_newline);
2637 /* build_string_literal creates a new STRING_CST,
2638 modify it in place to avoid double copying. */
2639 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2640 newstr[len - 1] = '\0';
2641 if (fn_puts)
2643 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2644 replace_call_with_call_and_fold (gsi, repl);
2645 return true;
2648 else
2649 /* We'd like to arrange to call fputs(string,stdout) here,
2650 but we need stdout and don't have a way to get it yet. */
2651 return false;
2655 /* The other optimizations can be done only on the non-va_list variants. */
2656 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2657 return false;
2659 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2660 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2662 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2663 return false;
2664 if (fn_puts)
2666 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2667 replace_call_with_call_and_fold (gsi, repl);
2668 return true;
2672 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2673 else if (strcmp (fmt_str, target_percent_c) == 0)
2675 if (!arg || ! useless_type_conversion_p (integer_type_node,
2676 TREE_TYPE (arg)))
2677 return false;
2678 if (fn_putchar)
2680 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2681 replace_call_with_call_and_fold (gsi, repl);
2682 return true;
2686 return false;
2691 /* Fold a call to __builtin_strlen with known length LEN. */
2693 static bool
2694 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2696 gimple *stmt = gsi_stmt (*gsi);
2697 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2698 if (!len)
2699 return false;
2700 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2701 replace_call_with_value (gsi, len);
2702 return true;
2705 /* Fold a call to __builtin_acc_on_device. */
2707 static bool
2708 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2710 /* Defer folding until we know which compiler we're in. */
2711 if (symtab->state != EXPANSION)
2712 return false;
2714 unsigned val_host = GOMP_DEVICE_HOST;
2715 unsigned val_dev = GOMP_DEVICE_NONE;
2717 #ifdef ACCEL_COMPILER
2718 val_host = GOMP_DEVICE_NOT_HOST;
2719 val_dev = ACCEL_COMPILER_acc_device;
2720 #endif
2722 location_t loc = gimple_location (gsi_stmt (*gsi));
2724 tree host_eq = make_ssa_name (boolean_type_node);
2725 gimple *host_ass = gimple_build_assign
2726 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2727 gimple_set_location (host_ass, loc);
2728 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2730 tree dev_eq = make_ssa_name (boolean_type_node);
2731 gimple *dev_ass = gimple_build_assign
2732 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2733 gimple_set_location (dev_ass, loc);
2734 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2736 tree result = make_ssa_name (boolean_type_node);
2737 gimple *result_ass = gimple_build_assign
2738 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2739 gimple_set_location (result_ass, loc);
2740 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2742 replace_call_with_value (gsi, result);
2744 return true;
2747 /* Fold the non-target builtin at *GSI and return whether any simplification
2748 was made. */
2750 static bool
2751 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2753 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2754 tree callee = gimple_call_fndecl (stmt);
2756 /* Give up for always_inline inline builtins until they are
2757 inlined. */
2758 if (avoid_folding_inline_builtin (callee))
2759 return false;
2761 unsigned n = gimple_call_num_args (stmt);
2762 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2763 switch (fcode)
2765 case BUILT_IN_BZERO:
2766 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2767 gimple_call_arg (stmt, 1));
2768 case BUILT_IN_MEMSET:
2769 return gimple_fold_builtin_memset (gsi,
2770 gimple_call_arg (stmt, 1),
2771 gimple_call_arg (stmt, 2));
2772 case BUILT_IN_BCOPY:
2773 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2774 gimple_call_arg (stmt, 0), 3);
2775 case BUILT_IN_MEMCPY:
2776 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2777 gimple_call_arg (stmt, 1), 0);
2778 case BUILT_IN_MEMPCPY:
2779 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2780 gimple_call_arg (stmt, 1), 1);
2781 case BUILT_IN_MEMMOVE:
2782 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2783 gimple_call_arg (stmt, 1), 3);
2784 case BUILT_IN_SPRINTF_CHK:
2785 case BUILT_IN_VSPRINTF_CHK:
2786 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2787 case BUILT_IN_STRCAT_CHK:
2788 return gimple_fold_builtin_strcat_chk (gsi);
2789 case BUILT_IN_STRNCAT_CHK:
2790 return gimple_fold_builtin_strncat_chk (gsi);
2791 case BUILT_IN_STRLEN:
2792 return gimple_fold_builtin_strlen (gsi);
2793 case BUILT_IN_STRCPY:
2794 return gimple_fold_builtin_strcpy (gsi,
2795 gimple_call_arg (stmt, 0),
2796 gimple_call_arg (stmt, 1));
2797 case BUILT_IN_STRNCPY:
2798 return gimple_fold_builtin_strncpy (gsi,
2799 gimple_call_arg (stmt, 0),
2800 gimple_call_arg (stmt, 1),
2801 gimple_call_arg (stmt, 2));
2802 case BUILT_IN_STRCAT:
2803 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2804 gimple_call_arg (stmt, 1));
2805 case BUILT_IN_STRNCAT:
2806 return gimple_fold_builtin_strncat (gsi);
2807 case BUILT_IN_FPUTS:
2808 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2809 gimple_call_arg (stmt, 1), false);
2810 case BUILT_IN_FPUTS_UNLOCKED:
2811 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2812 gimple_call_arg (stmt, 1), true);
2813 case BUILT_IN_MEMCPY_CHK:
2814 case BUILT_IN_MEMPCPY_CHK:
2815 case BUILT_IN_MEMMOVE_CHK:
2816 case BUILT_IN_MEMSET_CHK:
2817 return gimple_fold_builtin_memory_chk (gsi,
2818 gimple_call_arg (stmt, 0),
2819 gimple_call_arg (stmt, 1),
2820 gimple_call_arg (stmt, 2),
2821 gimple_call_arg (stmt, 3),
2822 fcode);
2823 case BUILT_IN_STPCPY:
2824 return gimple_fold_builtin_stpcpy (gsi);
2825 case BUILT_IN_STRCPY_CHK:
2826 case BUILT_IN_STPCPY_CHK:
2827 return gimple_fold_builtin_stxcpy_chk (gsi,
2828 gimple_call_arg (stmt, 0),
2829 gimple_call_arg (stmt, 1),
2830 gimple_call_arg (stmt, 2),
2831 fcode);
2832 case BUILT_IN_STRNCPY_CHK:
2833 case BUILT_IN_STPNCPY_CHK:
2834 return gimple_fold_builtin_stxncpy_chk (gsi,
2835 gimple_call_arg (stmt, 0),
2836 gimple_call_arg (stmt, 1),
2837 gimple_call_arg (stmt, 2),
2838 gimple_call_arg (stmt, 3),
2839 fcode);
2840 case BUILT_IN_SNPRINTF_CHK:
2841 case BUILT_IN_VSNPRINTF_CHK:
2842 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2843 case BUILT_IN_SNPRINTF:
2844 return gimple_fold_builtin_snprintf (gsi);
2845 case BUILT_IN_SPRINTF:
2846 return gimple_fold_builtin_sprintf (gsi);
2847 case BUILT_IN_FPRINTF:
2848 case BUILT_IN_FPRINTF_UNLOCKED:
2849 case BUILT_IN_VFPRINTF:
2850 if (n == 2 || n == 3)
2851 return gimple_fold_builtin_fprintf (gsi,
2852 gimple_call_arg (stmt, 0),
2853 gimple_call_arg (stmt, 1),
2854 n == 3
2855 ? gimple_call_arg (stmt, 2)
2856 : NULL_TREE,
2857 fcode);
2858 break;
2859 case BUILT_IN_FPRINTF_CHK:
2860 case BUILT_IN_VFPRINTF_CHK:
2861 if (n == 3 || n == 4)
2862 return gimple_fold_builtin_fprintf (gsi,
2863 gimple_call_arg (stmt, 0),
2864 gimple_call_arg (stmt, 2),
2865 n == 4
2866 ? gimple_call_arg (stmt, 3)
2867 : NULL_TREE,
2868 fcode);
2869 break;
2870 case BUILT_IN_PRINTF:
2871 case BUILT_IN_PRINTF_UNLOCKED:
2872 case BUILT_IN_VPRINTF:
2873 if (n == 1 || n == 2)
2874 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2875 n == 2
2876 ? gimple_call_arg (stmt, 1)
2877 : NULL_TREE, fcode);
2878 break;
2879 case BUILT_IN_PRINTF_CHK:
2880 case BUILT_IN_VPRINTF_CHK:
2881 if (n == 2 || n == 3)
2882 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2883 n == 3
2884 ? gimple_call_arg (stmt, 2)
2885 : NULL_TREE, fcode);
2886 break;
2887 case BUILT_IN_ACC_ON_DEVICE:
2888 return gimple_fold_builtin_acc_on_device (gsi,
2889 gimple_call_arg (stmt, 0));
2890 default:;
2893 /* Try the generic builtin folder. */
2894 bool ignore = (gimple_call_lhs (stmt) == NULL);
2895 tree result = fold_call_stmt (stmt, ignore);
2896 if (result)
2898 if (ignore)
2899 STRIP_NOPS (result);
2900 else
2901 result = fold_convert (gimple_call_return_type (stmt), result);
2902 if (!update_call_from_tree (gsi, result))
2903 gimplify_and_update_call_from_tree (gsi, result);
2904 return true;
2907 return false;
2910 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2911 function calls to constants, where possible. */
2913 static tree
2914 fold_internal_goacc_dim (const gimple *call)
2916 int axis = get_oacc_ifn_dim_arg (call);
2917 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2918 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2919 tree result = NULL_TREE;
2921 /* If the size is 1, or we only want the size and it is not dynamic,
2922 we know the answer. */
2923 if (size == 1 || (!is_pos && size))
2925 tree type = TREE_TYPE (gimple_call_lhs (call));
2926 result = build_int_cst (type, size - is_pos);
2929 return result;
2932 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2933 doesn't fit into TYPE. The test for overflow should be regardless of
2934 -fwrapv, and even for unsigned types. */
2936 bool
2937 arith_overflowed_p (enum tree_code code, const_tree type,
2938 const_tree arg0, const_tree arg1)
2940 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2941 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2942 widest2_int_cst;
2943 widest2_int warg0 = widest2_int_cst (arg0);
2944 widest2_int warg1 = widest2_int_cst (arg1);
2945 widest2_int wres;
2946 switch (code)
2948 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2949 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2950 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2951 default: gcc_unreachable ();
2953 signop sign = TYPE_SIGN (type);
2954 if (sign == UNSIGNED && wi::neg_p (wres))
2955 return true;
2956 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2959 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2960 The statement may be replaced by another statement, e.g., if the call
2961 simplifies to a constant value. Return true if any changes were made.
2962 It is assumed that the operands have been previously folded. */
2964 static bool
2965 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2967 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2968 tree callee;
2969 bool changed = false;
2970 unsigned i;
2972 /* Fold *& in call arguments. */
2973 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2974 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2976 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2977 if (tmp)
2979 gimple_call_set_arg (stmt, i, tmp);
2980 changed = true;
2984 /* Check for virtual calls that became direct calls. */
2985 callee = gimple_call_fn (stmt);
2986 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2988 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2990 if (dump_file && virtual_method_call_p (callee)
2991 && !possible_polymorphic_call_target_p
2992 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2993 (OBJ_TYPE_REF_EXPR (callee)))))
2995 fprintf (dump_file,
2996 "Type inheritance inconsistent devirtualization of ");
2997 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2998 fprintf (dump_file, " to ");
2999 print_generic_expr (dump_file, callee, TDF_SLIM);
3000 fprintf (dump_file, "\n");
3003 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3004 changed = true;
3006 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3008 bool final;
3009 vec <cgraph_node *>targets
3010 = possible_polymorphic_call_targets (callee, stmt, &final);
3011 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3013 tree lhs = gimple_call_lhs (stmt);
3014 if (dump_enabled_p ())
3016 location_t loc = gimple_location_safe (stmt);
3017 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3018 "folding virtual function call to %s\n",
3019 targets.length () == 1
3020 ? targets[0]->name ()
3021 : "__builtin_unreachable");
3023 if (targets.length () == 1)
3025 gimple_call_set_fndecl (stmt, targets[0]->decl);
3026 changed = true;
3027 /* If the call becomes noreturn, remove the lhs. */
3028 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3030 if (TREE_CODE (lhs) == SSA_NAME)
3032 tree var = create_tmp_var (TREE_TYPE (lhs));
3033 tree def = get_or_create_ssa_default_def (cfun, var);
3034 gimple *new_stmt = gimple_build_assign (lhs, def);
3035 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3037 gimple_call_set_lhs (stmt, NULL_TREE);
3039 maybe_remove_unused_call_args (cfun, stmt);
3041 else
3043 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3044 gimple *new_stmt = gimple_build_call (fndecl, 0);
3045 gimple_set_location (new_stmt, gimple_location (stmt));
3046 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3048 tree var = create_tmp_var (TREE_TYPE (lhs));
3049 tree def = get_or_create_ssa_default_def (cfun, var);
3051 /* To satisfy condition for
3052 cgraph_update_edges_for_call_stmt_node,
3053 we need to preserve GIMPLE_CALL statement
3054 at position of GSI iterator. */
3055 update_call_from_tree (gsi, def);
3056 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3058 else
3060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3061 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3062 gsi_replace (gsi, new_stmt, false);
3064 return true;
3070 /* Check for indirect calls that became direct calls, and then
3071 no longer require a static chain. */
3072 if (gimple_call_chain (stmt))
3074 tree fn = gimple_call_fndecl (stmt);
3075 if (fn && !DECL_STATIC_CHAIN (fn))
3077 gimple_call_set_chain (stmt, NULL);
3078 changed = true;
3080 else
3082 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3083 if (tmp)
3085 gimple_call_set_chain (stmt, tmp);
3086 changed = true;
3091 if (inplace)
3092 return changed;
3094 /* Check for builtins that CCP can handle using information not
3095 available in the generic fold routines. */
3096 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3098 if (gimple_fold_builtin (gsi))
3099 changed = true;
3101 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3103 changed |= targetm.gimple_fold_builtin (gsi);
3105 else if (gimple_call_internal_p (stmt))
3107 enum tree_code subcode = ERROR_MARK;
3108 tree result = NULL_TREE;
3109 bool cplx_result = false;
3110 tree overflow = NULL_TREE;
3111 switch (gimple_call_internal_fn (stmt))
3113 case IFN_BUILTIN_EXPECT:
3114 result = fold_builtin_expect (gimple_location (stmt),
3115 gimple_call_arg (stmt, 0),
3116 gimple_call_arg (stmt, 1),
3117 gimple_call_arg (stmt, 2));
3118 break;
3119 case IFN_UBSAN_OBJECT_SIZE:
3120 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3121 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3122 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3123 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3124 gimple_call_arg (stmt, 2))))
3126 gsi_replace (gsi, gimple_build_nop (), false);
3127 unlink_stmt_vdef (stmt);
3128 release_defs (stmt);
3129 return true;
3131 break;
3132 case IFN_GOACC_DIM_SIZE:
3133 case IFN_GOACC_DIM_POS:
3134 result = fold_internal_goacc_dim (stmt);
3135 break;
3136 case IFN_UBSAN_CHECK_ADD:
3137 subcode = PLUS_EXPR;
3138 break;
3139 case IFN_UBSAN_CHECK_SUB:
3140 subcode = MINUS_EXPR;
3141 break;
3142 case IFN_UBSAN_CHECK_MUL:
3143 subcode = MULT_EXPR;
3144 break;
3145 case IFN_ADD_OVERFLOW:
3146 subcode = PLUS_EXPR;
3147 cplx_result = true;
3148 break;
3149 case IFN_SUB_OVERFLOW:
3150 subcode = MINUS_EXPR;
3151 cplx_result = true;
3152 break;
3153 case IFN_MUL_OVERFLOW:
3154 subcode = MULT_EXPR;
3155 cplx_result = true;
3156 break;
3157 default:
3158 break;
3160 if (subcode != ERROR_MARK)
3162 tree arg0 = gimple_call_arg (stmt, 0);
3163 tree arg1 = gimple_call_arg (stmt, 1);
3164 tree type = TREE_TYPE (arg0);
3165 if (cplx_result)
3167 tree lhs = gimple_call_lhs (stmt);
3168 if (lhs == NULL_TREE)
3169 type = NULL_TREE;
3170 else
3171 type = TREE_TYPE (TREE_TYPE (lhs));
3173 if (type == NULL_TREE)
3175 /* x = y + 0; x = y - 0; x = y * 0; */
3176 else if (integer_zerop (arg1))
3177 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3178 /* x = 0 + y; x = 0 * y; */
3179 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3180 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3181 /* x = y - y; */
3182 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3183 result = integer_zero_node;
3184 /* x = y * 1; x = 1 * y; */
3185 else if (subcode == MULT_EXPR && integer_onep (arg1))
3186 result = arg0;
3187 else if (subcode == MULT_EXPR && integer_onep (arg0))
3188 result = arg1;
3189 else if (TREE_CODE (arg0) == INTEGER_CST
3190 && TREE_CODE (arg1) == INTEGER_CST)
3192 if (cplx_result)
3193 result = int_const_binop (subcode, fold_convert (type, arg0),
3194 fold_convert (type, arg1));
3195 else
3196 result = int_const_binop (subcode, arg0, arg1);
3197 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3199 if (cplx_result)
3200 overflow = build_one_cst (type);
3201 else
3202 result = NULL_TREE;
3205 if (result)
3207 if (result == integer_zero_node)
3208 result = build_zero_cst (type);
3209 else if (cplx_result && TREE_TYPE (result) != type)
3211 if (TREE_CODE (result) == INTEGER_CST)
3213 if (arith_overflowed_p (PLUS_EXPR, type, result,
3214 integer_zero_node))
3215 overflow = build_one_cst (type);
3217 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3218 && TYPE_UNSIGNED (type))
3219 || (TYPE_PRECISION (type)
3220 < (TYPE_PRECISION (TREE_TYPE (result))
3221 + (TYPE_UNSIGNED (TREE_TYPE (result))
3222 && !TYPE_UNSIGNED (type)))))
3223 result = NULL_TREE;
3224 if (result)
3225 result = fold_convert (type, result);
3230 if (result)
3232 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3233 result = drop_tree_overflow (result);
3234 if (cplx_result)
3236 if (overflow == NULL_TREE)
3237 overflow = build_zero_cst (TREE_TYPE (result));
3238 tree ctype = build_complex_type (TREE_TYPE (result));
3239 if (TREE_CODE (result) == INTEGER_CST
3240 && TREE_CODE (overflow) == INTEGER_CST)
3241 result = build_complex (ctype, result, overflow);
3242 else
3243 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3244 ctype, result, overflow);
3246 if (!update_call_from_tree (gsi, result))
3247 gimplify_and_update_call_from_tree (gsi, result);
3248 changed = true;
3252 return changed;
3256 /* Return true whether NAME has a use on STMT. */
3258 static bool
3259 has_use_on_stmt (tree name, gimple *stmt)
3261 imm_use_iterator iter;
3262 use_operand_p use_p;
3263 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3264 if (USE_STMT (use_p) == stmt)
3265 return true;
3266 return false;
3269 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3270 gimple_simplify.
3272 Replaces *GSI with the simplification result in RCODE and OPS
3273 and the associated statements in *SEQ. Does the replacement
3274 according to INPLACE and returns true if the operation succeeded. */
3276 static bool
3277 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3278 code_helper rcode, tree *ops,
3279 gimple_seq *seq, bool inplace)
3281 gimple *stmt = gsi_stmt (*gsi);
3283 /* Play safe and do not allow abnormals to be mentioned in
3284 newly created statements. See also maybe_push_res_to_seq.
3285 As an exception allow such uses if there was a use of the
3286 same SSA name on the old stmt. */
3287 if ((TREE_CODE (ops[0]) == SSA_NAME
3288 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3289 && !has_use_on_stmt (ops[0], stmt))
3290 || (ops[1]
3291 && TREE_CODE (ops[1]) == SSA_NAME
3292 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3293 && !has_use_on_stmt (ops[1], stmt))
3294 || (ops[2]
3295 && TREE_CODE (ops[2]) == SSA_NAME
3296 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3297 && !has_use_on_stmt (ops[2], stmt)))
3298 return false;
3300 /* Don't insert new statements when INPLACE is true, even if we could
3301 reuse STMT for the final statement. */
3302 if (inplace && !gimple_seq_empty_p (*seq))
3303 return false;
3305 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3307 gcc_assert (rcode.is_tree_code ());
3308 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3309 /* GIMPLE_CONDs condition may not throw. */
3310 && (!flag_exceptions
3311 || !cfun->can_throw_non_call_exceptions
3312 || !operation_could_trap_p (rcode,
3313 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3314 false, NULL_TREE)))
3315 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3316 else if (rcode == SSA_NAME)
3317 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3318 build_zero_cst (TREE_TYPE (ops[0])));
3319 else if (rcode == INTEGER_CST)
3321 if (integer_zerop (ops[0]))
3322 gimple_cond_make_false (cond_stmt);
3323 else
3324 gimple_cond_make_true (cond_stmt);
3326 else if (!inplace)
3328 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3329 ops, seq);
3330 if (!res)
3331 return false;
3332 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3333 build_zero_cst (TREE_TYPE (res)));
3335 else
3336 return false;
3337 if (dump_file && (dump_flags & TDF_DETAILS))
3339 fprintf (dump_file, "gimple_simplified to ");
3340 if (!gimple_seq_empty_p (*seq))
3341 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3342 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3343 0, TDF_SLIM);
3345 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3346 return true;
3348 else if (is_gimple_assign (stmt)
3349 && rcode.is_tree_code ())
3351 if (!inplace
3352 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3354 maybe_build_generic_op (rcode,
3355 TREE_TYPE (gimple_assign_lhs (stmt)),
3356 &ops[0], ops[1], ops[2]);
3357 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3358 if (dump_file && (dump_flags & TDF_DETAILS))
3360 fprintf (dump_file, "gimple_simplified to ");
3361 if (!gimple_seq_empty_p (*seq))
3362 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3363 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3364 0, TDF_SLIM);
3366 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3367 return true;
3370 else if (rcode.is_fn_code ()
3371 && gimple_call_builtin_p (stmt, rcode))
3373 unsigned i;
3374 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3376 gcc_assert (ops[i] != NULL_TREE);
3377 gimple_call_set_arg (stmt, i, ops[i]);
3379 if (i < 3)
3380 gcc_assert (ops[i] == NULL_TREE);
3381 if (dump_file && (dump_flags & TDF_DETAILS))
3383 fprintf (dump_file, "gimple_simplified to ");
3384 if (!gimple_seq_empty_p (*seq))
3385 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3386 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3388 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3389 return true;
3391 else if (!inplace)
3393 if (gimple_has_lhs (stmt))
3395 tree lhs = gimple_get_lhs (stmt);
3396 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3397 ops, seq, lhs))
3398 return false;
3399 if (dump_file && (dump_flags & TDF_DETAILS))
3401 fprintf (dump_file, "gimple_simplified to ");
3402 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3404 gsi_replace_with_seq_vops (gsi, *seq);
3405 return true;
3407 else
3408 gcc_unreachable ();
3411 return false;
3414 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3416 static bool
3417 maybe_canonicalize_mem_ref_addr (tree *t)
3419 bool res = false;
3421 if (TREE_CODE (*t) == ADDR_EXPR)
3422 t = &TREE_OPERAND (*t, 0);
3424 while (handled_component_p (*t))
3425 t = &TREE_OPERAND (*t, 0);
3427 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3428 of invariant addresses into a SSA name MEM_REF address. */
3429 if (TREE_CODE (*t) == MEM_REF
3430 || TREE_CODE (*t) == TARGET_MEM_REF)
3432 tree addr = TREE_OPERAND (*t, 0);
3433 if (TREE_CODE (addr) == ADDR_EXPR
3434 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3435 || handled_component_p (TREE_OPERAND (addr, 0))))
3437 tree base;
3438 HOST_WIDE_INT coffset;
3439 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3440 &coffset);
3441 if (!base)
3442 gcc_unreachable ();
3444 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3445 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3446 TREE_OPERAND (*t, 1),
3447 size_int (coffset));
3448 res = true;
3450 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3451 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3454 /* Canonicalize back MEM_REFs to plain reference trees if the object
3455 accessed is a decl that has the same access semantics as the MEM_REF. */
3456 if (TREE_CODE (*t) == MEM_REF
3457 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3458 && integer_zerop (TREE_OPERAND (*t, 1))
3459 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3461 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3462 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3463 if (/* Same volatile qualification. */
3464 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3465 /* Same TBAA behavior with -fstrict-aliasing. */
3466 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3467 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3468 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3469 /* Same alignment. */
3470 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3471 /* We have to look out here to not drop a required conversion
3472 from the rhs to the lhs if *t appears on the lhs or vice-versa
3473 if it appears on the rhs. Thus require strict type
3474 compatibility. */
3475 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3477 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3478 res = true;
3482 /* Canonicalize TARGET_MEM_REF in particular with respect to
3483 the indexes becoming constant. */
3484 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3486 tree tem = maybe_fold_tmr (*t);
3487 if (tem)
3489 *t = tem;
3490 res = true;
3494 return res;
3497 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3498 distinguishes both cases. */
3500 static bool
3501 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3503 bool changed = false;
3504 gimple *stmt = gsi_stmt (*gsi);
3505 unsigned i;
3507 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3508 after propagation.
3509 ??? This shouldn't be done in generic folding but in the
3510 propagation helpers which also know whether an address was
3511 propagated.
3512 Also canonicalize operand order. */
3513 switch (gimple_code (stmt))
3515 case GIMPLE_ASSIGN:
3516 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3518 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3519 if ((REFERENCE_CLASS_P (*rhs)
3520 || TREE_CODE (*rhs) == ADDR_EXPR)
3521 && maybe_canonicalize_mem_ref_addr (rhs))
3522 changed = true;
3523 tree *lhs = gimple_assign_lhs_ptr (stmt);
3524 if (REFERENCE_CLASS_P (*lhs)
3525 && maybe_canonicalize_mem_ref_addr (lhs))
3526 changed = true;
3528 else
3530 /* Canonicalize operand order. */
3531 enum tree_code code = gimple_assign_rhs_code (stmt);
3532 if (TREE_CODE_CLASS (code) == tcc_comparison
3533 || commutative_tree_code (code)
3534 || commutative_ternary_tree_code (code))
3536 tree rhs1 = gimple_assign_rhs1 (stmt);
3537 tree rhs2 = gimple_assign_rhs2 (stmt);
3538 if (tree_swap_operands_p (rhs1, rhs2, false))
3540 gimple_assign_set_rhs1 (stmt, rhs2);
3541 gimple_assign_set_rhs2 (stmt, rhs1);
3542 if (TREE_CODE_CLASS (code) == tcc_comparison)
3543 gimple_assign_set_rhs_code (stmt,
3544 swap_tree_comparison (code));
3545 changed = true;
3549 break;
3550 case GIMPLE_CALL:
3552 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3554 tree *arg = gimple_call_arg_ptr (stmt, i);
3555 if (REFERENCE_CLASS_P (*arg)
3556 && maybe_canonicalize_mem_ref_addr (arg))
3557 changed = true;
3559 tree *lhs = gimple_call_lhs_ptr (stmt);
3560 if (*lhs
3561 && REFERENCE_CLASS_P (*lhs)
3562 && maybe_canonicalize_mem_ref_addr (lhs))
3563 changed = true;
3564 break;
3566 case GIMPLE_ASM:
3568 gasm *asm_stmt = as_a <gasm *> (stmt);
3569 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3571 tree link = gimple_asm_output_op (asm_stmt, i);
3572 tree op = TREE_VALUE (link);
3573 if (REFERENCE_CLASS_P (op)
3574 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3575 changed = true;
3577 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3579 tree link = gimple_asm_input_op (asm_stmt, i);
3580 tree op = TREE_VALUE (link);
3581 if ((REFERENCE_CLASS_P (op)
3582 || TREE_CODE (op) == ADDR_EXPR)
3583 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3584 changed = true;
3587 break;
3588 case GIMPLE_DEBUG:
3589 if (gimple_debug_bind_p (stmt))
3591 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3592 if (*val
3593 && (REFERENCE_CLASS_P (*val)
3594 || TREE_CODE (*val) == ADDR_EXPR)
3595 && maybe_canonicalize_mem_ref_addr (val))
3596 changed = true;
3598 break;
3599 case GIMPLE_COND:
3601 /* Canonicalize operand order. */
3602 tree lhs = gimple_cond_lhs (stmt);
3603 tree rhs = gimple_cond_rhs (stmt);
3604 if (tree_swap_operands_p (lhs, rhs, false))
3606 gcond *gc = as_a <gcond *> (stmt);
3607 gimple_cond_set_lhs (gc, rhs);
3608 gimple_cond_set_rhs (gc, lhs);
3609 gimple_cond_set_code (gc,
3610 swap_tree_comparison (gimple_cond_code (gc)));
3611 changed = true;
3614 default:;
3617 /* Dispatch to pattern-based folding. */
3618 if (!inplace
3619 || is_gimple_assign (stmt)
3620 || gimple_code (stmt) == GIMPLE_COND)
3622 gimple_seq seq = NULL;
3623 code_helper rcode;
3624 tree ops[3] = {};
3625 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3626 valueize, valueize))
3628 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3629 changed = true;
3630 else
3631 gimple_seq_discard (seq);
3635 stmt = gsi_stmt (*gsi);
3637 /* Fold the main computation performed by the statement. */
3638 switch (gimple_code (stmt))
3640 case GIMPLE_ASSIGN:
3642 /* Try to canonicalize for boolean-typed X the comparisons
3643 X == 0, X == 1, X != 0, and X != 1. */
3644 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3645 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3647 tree lhs = gimple_assign_lhs (stmt);
3648 tree op1 = gimple_assign_rhs1 (stmt);
3649 tree op2 = gimple_assign_rhs2 (stmt);
3650 tree type = TREE_TYPE (op1);
3652 /* Check whether the comparison operands are of the same boolean
3653 type as the result type is.
3654 Check that second operand is an integer-constant with value
3655 one or zero. */
3656 if (TREE_CODE (op2) == INTEGER_CST
3657 && (integer_zerop (op2) || integer_onep (op2))
3658 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3660 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3661 bool is_logical_not = false;
3663 /* X == 0 and X != 1 is a logical-not.of X
3664 X == 1 and X != 0 is X */
3665 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3666 || (cmp_code == NE_EXPR && integer_onep (op2)))
3667 is_logical_not = true;
3669 if (is_logical_not == false)
3670 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3671 /* Only for one-bit precision typed X the transformation
3672 !X -> ~X is valied. */
3673 else if (TYPE_PRECISION (type) == 1)
3674 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3675 /* Otherwise we use !X -> X ^ 1. */
3676 else
3677 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3678 build_int_cst (type, 1));
3679 changed = true;
3680 break;
3684 unsigned old_num_ops = gimple_num_ops (stmt);
3685 tree lhs = gimple_assign_lhs (stmt);
3686 tree new_rhs = fold_gimple_assign (gsi);
3687 if (new_rhs
3688 && !useless_type_conversion_p (TREE_TYPE (lhs),
3689 TREE_TYPE (new_rhs)))
3690 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3691 if (new_rhs
3692 && (!inplace
3693 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3695 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3696 changed = true;
3698 break;
3701 case GIMPLE_CALL:
3702 changed |= gimple_fold_call (gsi, inplace);
3703 break;
3705 case GIMPLE_ASM:
3706 /* Fold *& in asm operands. */
3708 gasm *asm_stmt = as_a <gasm *> (stmt);
3709 size_t noutputs;
3710 const char **oconstraints;
3711 const char *constraint;
3712 bool allows_mem, allows_reg;
3714 noutputs = gimple_asm_noutputs (asm_stmt);
3715 oconstraints = XALLOCAVEC (const char *, noutputs);
3717 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3719 tree link = gimple_asm_output_op (asm_stmt, i);
3720 tree op = TREE_VALUE (link);
3721 oconstraints[i]
3722 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3723 if (REFERENCE_CLASS_P (op)
3724 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3726 TREE_VALUE (link) = op;
3727 changed = true;
3730 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3732 tree link = gimple_asm_input_op (asm_stmt, i);
3733 tree op = TREE_VALUE (link);
3734 constraint
3735 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3736 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3737 oconstraints, &allows_mem, &allows_reg);
3738 if (REFERENCE_CLASS_P (op)
3739 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3740 != NULL_TREE)
3742 TREE_VALUE (link) = op;
3743 changed = true;
3747 break;
3749 case GIMPLE_DEBUG:
3750 if (gimple_debug_bind_p (stmt))
3752 tree val = gimple_debug_bind_get_value (stmt);
3753 if (val
3754 && REFERENCE_CLASS_P (val))
3756 tree tem = maybe_fold_reference (val, false);
3757 if (tem)
3759 gimple_debug_bind_set_value (stmt, tem);
3760 changed = true;
3763 else if (val
3764 && TREE_CODE (val) == ADDR_EXPR)
3766 tree ref = TREE_OPERAND (val, 0);
3767 tree tem = maybe_fold_reference (ref, false);
3768 if (tem)
3770 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3771 gimple_debug_bind_set_value (stmt, tem);
3772 changed = true;
3776 break;
3778 default:;
3781 stmt = gsi_stmt (*gsi);
3783 /* Fold *& on the lhs. */
3784 if (gimple_has_lhs (stmt))
3786 tree lhs = gimple_get_lhs (stmt);
3787 if (lhs && REFERENCE_CLASS_P (lhs))
3789 tree new_lhs = maybe_fold_reference (lhs, true);
3790 if (new_lhs)
3792 gimple_set_lhs (stmt, new_lhs);
3793 changed = true;
3798 return changed;
3801 /* Valueziation callback that ends up not following SSA edges. */
3803 tree
3804 no_follow_ssa_edges (tree)
3806 return NULL_TREE;
3809 /* Valueization callback that ends up following single-use SSA edges only. */
3811 tree
3812 follow_single_use_edges (tree val)
3814 if (TREE_CODE (val) == SSA_NAME
3815 && !has_single_use (val))
3816 return NULL_TREE;
3817 return val;
3820 /* Fold the statement pointed to by GSI. In some cases, this function may
3821 replace the whole statement with a new one. Returns true iff folding
3822 makes any changes.
3823 The statement pointed to by GSI should be in valid gimple form but may
3824 be in unfolded state as resulting from for example constant propagation
3825 which can produce *&x = 0. */
3827 bool
3828 fold_stmt (gimple_stmt_iterator *gsi)
3830 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3833 bool
3834 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3836 return fold_stmt_1 (gsi, false, valueize);
3839 /* Perform the minimal folding on statement *GSI. Only operations like
3840 *&x created by constant propagation are handled. The statement cannot
3841 be replaced with a new one. Return true if the statement was
3842 changed, false otherwise.
3843 The statement *GSI should be in valid gimple form but may
3844 be in unfolded state as resulting from for example constant propagation
3845 which can produce *&x = 0. */
3847 bool
3848 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3850 gimple *stmt = gsi_stmt (*gsi);
3851 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3852 gcc_assert (gsi_stmt (*gsi) == stmt);
3853 return changed;
3856 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3857 if EXPR is null or we don't know how.
3858 If non-null, the result always has boolean type. */
3860 static tree
3861 canonicalize_bool (tree expr, bool invert)
3863 if (!expr)
3864 return NULL_TREE;
3865 else if (invert)
3867 if (integer_nonzerop (expr))
3868 return boolean_false_node;
3869 else if (integer_zerop (expr))
3870 return boolean_true_node;
3871 else if (TREE_CODE (expr) == SSA_NAME)
3872 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3873 build_int_cst (TREE_TYPE (expr), 0));
3874 else if (COMPARISON_CLASS_P (expr))
3875 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3876 boolean_type_node,
3877 TREE_OPERAND (expr, 0),
3878 TREE_OPERAND (expr, 1));
3879 else
3880 return NULL_TREE;
3882 else
3884 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3885 return expr;
3886 if (integer_nonzerop (expr))
3887 return boolean_true_node;
3888 else if (integer_zerop (expr))
3889 return boolean_false_node;
3890 else if (TREE_CODE (expr) == SSA_NAME)
3891 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3892 build_int_cst (TREE_TYPE (expr), 0));
3893 else if (COMPARISON_CLASS_P (expr))
3894 return fold_build2 (TREE_CODE (expr),
3895 boolean_type_node,
3896 TREE_OPERAND (expr, 0),
3897 TREE_OPERAND (expr, 1));
3898 else
3899 return NULL_TREE;
3903 /* Check to see if a boolean expression EXPR is logically equivalent to the
3904 comparison (OP1 CODE OP2). Check for various identities involving
3905 SSA_NAMEs. */
3907 static bool
3908 same_bool_comparison_p (const_tree expr, enum tree_code code,
3909 const_tree op1, const_tree op2)
3911 gimple *s;
3913 /* The obvious case. */
3914 if (TREE_CODE (expr) == code
3915 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3916 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3917 return true;
3919 /* Check for comparing (name, name != 0) and the case where expr
3920 is an SSA_NAME with a definition matching the comparison. */
3921 if (TREE_CODE (expr) == SSA_NAME
3922 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3924 if (operand_equal_p (expr, op1, 0))
3925 return ((code == NE_EXPR && integer_zerop (op2))
3926 || (code == EQ_EXPR && integer_nonzerop (op2)));
3927 s = SSA_NAME_DEF_STMT (expr);
3928 if (is_gimple_assign (s)
3929 && gimple_assign_rhs_code (s) == code
3930 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3931 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3932 return true;
3935 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3936 of name is a comparison, recurse. */
3937 if (TREE_CODE (op1) == SSA_NAME
3938 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3940 s = SSA_NAME_DEF_STMT (op1);
3941 if (is_gimple_assign (s)
3942 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3944 enum tree_code c = gimple_assign_rhs_code (s);
3945 if ((c == NE_EXPR && integer_zerop (op2))
3946 || (c == EQ_EXPR && integer_nonzerop (op2)))
3947 return same_bool_comparison_p (expr, c,
3948 gimple_assign_rhs1 (s),
3949 gimple_assign_rhs2 (s));
3950 if ((c == EQ_EXPR && integer_zerop (op2))
3951 || (c == NE_EXPR && integer_nonzerop (op2)))
3952 return same_bool_comparison_p (expr,
3953 invert_tree_comparison (c, false),
3954 gimple_assign_rhs1 (s),
3955 gimple_assign_rhs2 (s));
3958 return false;
3961 /* Check to see if two boolean expressions OP1 and OP2 are logically
3962 equivalent. */
3964 static bool
3965 same_bool_result_p (const_tree op1, const_tree op2)
3967 /* Simple cases first. */
3968 if (operand_equal_p (op1, op2, 0))
3969 return true;
3971 /* Check the cases where at least one of the operands is a comparison.
3972 These are a bit smarter than operand_equal_p in that they apply some
3973 identifies on SSA_NAMEs. */
3974 if (COMPARISON_CLASS_P (op2)
3975 && same_bool_comparison_p (op1, TREE_CODE (op2),
3976 TREE_OPERAND (op2, 0),
3977 TREE_OPERAND (op2, 1)))
3978 return true;
3979 if (COMPARISON_CLASS_P (op1)
3980 && same_bool_comparison_p (op2, TREE_CODE (op1),
3981 TREE_OPERAND (op1, 0),
3982 TREE_OPERAND (op1, 1)))
3983 return true;
3985 /* Default case. */
3986 return false;
3989 /* Forward declarations for some mutually recursive functions. */
3991 static tree
3992 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3993 enum tree_code code2, tree op2a, tree op2b);
3994 static tree
3995 and_var_with_comparison (tree var, bool invert,
3996 enum tree_code code2, tree op2a, tree op2b);
3997 static tree
3998 and_var_with_comparison_1 (gimple *stmt,
3999 enum tree_code code2, tree op2a, tree op2b);
4000 static tree
4001 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4002 enum tree_code code2, tree op2a, tree op2b);
4003 static tree
4004 or_var_with_comparison (tree var, bool invert,
4005 enum tree_code code2, tree op2a, tree op2b);
4006 static tree
4007 or_var_with_comparison_1 (gimple *stmt,
4008 enum tree_code code2, tree op2a, tree op2b);
4010 /* Helper function for and_comparisons_1: try to simplify the AND of the
4011 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4012 If INVERT is true, invert the value of the VAR before doing the AND.
4013 Return NULL_EXPR if we can't simplify this to a single expression. */
4015 static tree
4016 and_var_with_comparison (tree var, bool invert,
4017 enum tree_code code2, tree op2a, tree op2b)
4019 tree t;
4020 gimple *stmt = SSA_NAME_DEF_STMT (var);
4022 /* We can only deal with variables whose definitions are assignments. */
4023 if (!is_gimple_assign (stmt))
4024 return NULL_TREE;
4026 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4027 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4028 Then we only have to consider the simpler non-inverted cases. */
4029 if (invert)
4030 t = or_var_with_comparison_1 (stmt,
4031 invert_tree_comparison (code2, false),
4032 op2a, op2b);
4033 else
4034 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4035 return canonicalize_bool (t, invert);
4038 /* Try to simplify the AND of the ssa variable defined by the assignment
4039 STMT with the comparison specified by (OP2A CODE2 OP2B).
4040 Return NULL_EXPR if we can't simplify this to a single expression. */
4042 static tree
4043 and_var_with_comparison_1 (gimple *stmt,
4044 enum tree_code code2, tree op2a, tree op2b)
4046 tree var = gimple_assign_lhs (stmt);
4047 tree true_test_var = NULL_TREE;
4048 tree false_test_var = NULL_TREE;
4049 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4051 /* Check for identities like (var AND (var == 0)) => false. */
4052 if (TREE_CODE (op2a) == SSA_NAME
4053 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4055 if ((code2 == NE_EXPR && integer_zerop (op2b))
4056 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4058 true_test_var = op2a;
4059 if (var == true_test_var)
4060 return var;
4062 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4063 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4065 false_test_var = op2a;
4066 if (var == false_test_var)
4067 return boolean_false_node;
4071 /* If the definition is a comparison, recurse on it. */
4072 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4074 tree t = and_comparisons_1 (innercode,
4075 gimple_assign_rhs1 (stmt),
4076 gimple_assign_rhs2 (stmt),
4077 code2,
4078 op2a,
4079 op2b);
4080 if (t)
4081 return t;
4084 /* If the definition is an AND or OR expression, we may be able to
4085 simplify by reassociating. */
4086 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4087 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4089 tree inner1 = gimple_assign_rhs1 (stmt);
4090 tree inner2 = gimple_assign_rhs2 (stmt);
4091 gimple *s;
4092 tree t;
4093 tree partial = NULL_TREE;
4094 bool is_and = (innercode == BIT_AND_EXPR);
4096 /* Check for boolean identities that don't require recursive examination
4097 of inner1/inner2:
4098 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4099 inner1 AND (inner1 OR inner2) => inner1
4100 !inner1 AND (inner1 AND inner2) => false
4101 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4102 Likewise for similar cases involving inner2. */
4103 if (inner1 == true_test_var)
4104 return (is_and ? var : inner1);
4105 else if (inner2 == true_test_var)
4106 return (is_and ? var : inner2);
4107 else if (inner1 == false_test_var)
4108 return (is_and
4109 ? boolean_false_node
4110 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4111 else if (inner2 == false_test_var)
4112 return (is_and
4113 ? boolean_false_node
4114 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4116 /* Next, redistribute/reassociate the AND across the inner tests.
4117 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4118 if (TREE_CODE (inner1) == SSA_NAME
4119 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4120 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4121 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4122 gimple_assign_rhs1 (s),
4123 gimple_assign_rhs2 (s),
4124 code2, op2a, op2b)))
4126 /* Handle the AND case, where we are reassociating:
4127 (inner1 AND inner2) AND (op2a code2 op2b)
4128 => (t AND inner2)
4129 If the partial result t is a constant, we win. Otherwise
4130 continue on to try reassociating with the other inner test. */
4131 if (is_and)
4133 if (integer_onep (t))
4134 return inner2;
4135 else if (integer_zerop (t))
4136 return boolean_false_node;
4139 /* Handle the OR case, where we are redistributing:
4140 (inner1 OR inner2) AND (op2a code2 op2b)
4141 => (t OR (inner2 AND (op2a code2 op2b))) */
4142 else if (integer_onep (t))
4143 return boolean_true_node;
4145 /* Save partial result for later. */
4146 partial = t;
4149 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4150 if (TREE_CODE (inner2) == SSA_NAME
4151 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4152 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4153 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4154 gimple_assign_rhs1 (s),
4155 gimple_assign_rhs2 (s),
4156 code2, op2a, op2b)))
4158 /* Handle the AND case, where we are reassociating:
4159 (inner1 AND inner2) AND (op2a code2 op2b)
4160 => (inner1 AND t) */
4161 if (is_and)
4163 if (integer_onep (t))
4164 return inner1;
4165 else if (integer_zerop (t))
4166 return boolean_false_node;
4167 /* If both are the same, we can apply the identity
4168 (x AND x) == x. */
4169 else if (partial && same_bool_result_p (t, partial))
4170 return t;
4173 /* Handle the OR case. where we are redistributing:
4174 (inner1 OR inner2) AND (op2a code2 op2b)
4175 => (t OR (inner1 AND (op2a code2 op2b)))
4176 => (t OR partial) */
4177 else
4179 if (integer_onep (t))
4180 return boolean_true_node;
4181 else if (partial)
4183 /* We already got a simplification for the other
4184 operand to the redistributed OR expression. The
4185 interesting case is when at least one is false.
4186 Or, if both are the same, we can apply the identity
4187 (x OR x) == x. */
4188 if (integer_zerop (partial))
4189 return t;
4190 else if (integer_zerop (t))
4191 return partial;
4192 else if (same_bool_result_p (t, partial))
4193 return t;
4198 return NULL_TREE;
4201 /* Try to simplify the AND of two comparisons defined by
4202 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4203 If this can be done without constructing an intermediate value,
4204 return the resulting tree; otherwise NULL_TREE is returned.
4205 This function is deliberately asymmetric as it recurses on SSA_DEFs
4206 in the first comparison but not the second. */
4208 static tree
4209 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4210 enum tree_code code2, tree op2a, tree op2b)
4212 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4214 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4215 if (operand_equal_p (op1a, op2a, 0)
4216 && operand_equal_p (op1b, op2b, 0))
4218 /* Result will be either NULL_TREE, or a combined comparison. */
4219 tree t = combine_comparisons (UNKNOWN_LOCATION,
4220 TRUTH_ANDIF_EXPR, code1, code2,
4221 truth_type, op1a, op1b);
4222 if (t)
4223 return t;
4226 /* Likewise the swapped case of the above. */
4227 if (operand_equal_p (op1a, op2b, 0)
4228 && operand_equal_p (op1b, op2a, 0))
4230 /* Result will be either NULL_TREE, or a combined comparison. */
4231 tree t = combine_comparisons (UNKNOWN_LOCATION,
4232 TRUTH_ANDIF_EXPR, code1,
4233 swap_tree_comparison (code2),
4234 truth_type, op1a, op1b);
4235 if (t)
4236 return t;
4239 /* If both comparisons are of the same value against constants, we might
4240 be able to merge them. */
4241 if (operand_equal_p (op1a, op2a, 0)
4242 && TREE_CODE (op1b) == INTEGER_CST
4243 && TREE_CODE (op2b) == INTEGER_CST)
4245 int cmp = tree_int_cst_compare (op1b, op2b);
4247 /* If we have (op1a == op1b), we should either be able to
4248 return that or FALSE, depending on whether the constant op1b
4249 also satisfies the other comparison against op2b. */
4250 if (code1 == EQ_EXPR)
4252 bool done = true;
4253 bool val;
4254 switch (code2)
4256 case EQ_EXPR: val = (cmp == 0); break;
4257 case NE_EXPR: val = (cmp != 0); break;
4258 case LT_EXPR: val = (cmp < 0); break;
4259 case GT_EXPR: val = (cmp > 0); break;
4260 case LE_EXPR: val = (cmp <= 0); break;
4261 case GE_EXPR: val = (cmp >= 0); break;
4262 default: done = false;
4264 if (done)
4266 if (val)
4267 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4268 else
4269 return boolean_false_node;
4272 /* Likewise if the second comparison is an == comparison. */
4273 else if (code2 == EQ_EXPR)
4275 bool done = true;
4276 bool val;
4277 switch (code1)
4279 case EQ_EXPR: val = (cmp == 0); break;
4280 case NE_EXPR: val = (cmp != 0); break;
4281 case LT_EXPR: val = (cmp > 0); break;
4282 case GT_EXPR: val = (cmp < 0); break;
4283 case LE_EXPR: val = (cmp >= 0); break;
4284 case GE_EXPR: val = (cmp <= 0); break;
4285 default: done = false;
4287 if (done)
4289 if (val)
4290 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4291 else
4292 return boolean_false_node;
4296 /* Same business with inequality tests. */
4297 else if (code1 == NE_EXPR)
4299 bool val;
4300 switch (code2)
4302 case EQ_EXPR: val = (cmp != 0); break;
4303 case NE_EXPR: val = (cmp == 0); break;
4304 case LT_EXPR: val = (cmp >= 0); break;
4305 case GT_EXPR: val = (cmp <= 0); break;
4306 case LE_EXPR: val = (cmp > 0); break;
4307 case GE_EXPR: val = (cmp < 0); break;
4308 default:
4309 val = false;
4311 if (val)
4312 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4314 else if (code2 == NE_EXPR)
4316 bool val;
4317 switch (code1)
4319 case EQ_EXPR: val = (cmp == 0); break;
4320 case NE_EXPR: val = (cmp != 0); break;
4321 case LT_EXPR: val = (cmp <= 0); break;
4322 case GT_EXPR: val = (cmp >= 0); break;
4323 case LE_EXPR: val = (cmp < 0); break;
4324 case GE_EXPR: val = (cmp > 0); break;
4325 default:
4326 val = false;
4328 if (val)
4329 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4332 /* Chose the more restrictive of two < or <= comparisons. */
4333 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4334 && (code2 == LT_EXPR || code2 == LE_EXPR))
4336 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4337 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4338 else
4339 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4342 /* Likewise chose the more restrictive of two > or >= comparisons. */
4343 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4344 && (code2 == GT_EXPR || code2 == GE_EXPR))
4346 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4347 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4348 else
4349 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4352 /* Check for singleton ranges. */
4353 else if (cmp == 0
4354 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4355 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4356 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4358 /* Check for disjoint ranges. */
4359 else if (cmp <= 0
4360 && (code1 == LT_EXPR || code1 == LE_EXPR)
4361 && (code2 == GT_EXPR || code2 == GE_EXPR))
4362 return boolean_false_node;
4363 else if (cmp >= 0
4364 && (code1 == GT_EXPR || code1 == GE_EXPR)
4365 && (code2 == LT_EXPR || code2 == LE_EXPR))
4366 return boolean_false_node;
4369 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4370 NAME's definition is a truth value. See if there are any simplifications
4371 that can be done against the NAME's definition. */
4372 if (TREE_CODE (op1a) == SSA_NAME
4373 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4374 && (integer_zerop (op1b) || integer_onep (op1b)))
4376 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4377 || (code1 == NE_EXPR && integer_onep (op1b)));
4378 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4379 switch (gimple_code (stmt))
4381 case GIMPLE_ASSIGN:
4382 /* Try to simplify by copy-propagating the definition. */
4383 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4385 case GIMPLE_PHI:
4386 /* If every argument to the PHI produces the same result when
4387 ANDed with the second comparison, we win.
4388 Do not do this unless the type is bool since we need a bool
4389 result here anyway. */
4390 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4392 tree result = NULL_TREE;
4393 unsigned i;
4394 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4396 tree arg = gimple_phi_arg_def (stmt, i);
4398 /* If this PHI has itself as an argument, ignore it.
4399 If all the other args produce the same result,
4400 we're still OK. */
4401 if (arg == gimple_phi_result (stmt))
4402 continue;
4403 else if (TREE_CODE (arg) == INTEGER_CST)
4405 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4407 if (!result)
4408 result = boolean_false_node;
4409 else if (!integer_zerop (result))
4410 return NULL_TREE;
4412 else if (!result)
4413 result = fold_build2 (code2, boolean_type_node,
4414 op2a, op2b);
4415 else if (!same_bool_comparison_p (result,
4416 code2, op2a, op2b))
4417 return NULL_TREE;
4419 else if (TREE_CODE (arg) == SSA_NAME
4420 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4422 tree temp;
4423 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4424 /* In simple cases we can look through PHI nodes,
4425 but we have to be careful with loops.
4426 See PR49073. */
4427 if (! dom_info_available_p (CDI_DOMINATORS)
4428 || gimple_bb (def_stmt) == gimple_bb (stmt)
4429 || dominated_by_p (CDI_DOMINATORS,
4430 gimple_bb (def_stmt),
4431 gimple_bb (stmt)))
4432 return NULL_TREE;
4433 temp = and_var_with_comparison (arg, invert, code2,
4434 op2a, op2b);
4435 if (!temp)
4436 return NULL_TREE;
4437 else if (!result)
4438 result = temp;
4439 else if (!same_bool_result_p (result, temp))
4440 return NULL_TREE;
4442 else
4443 return NULL_TREE;
4445 return result;
4448 default:
4449 break;
4452 return NULL_TREE;
4455 /* Try to simplify the AND of two comparisons, specified by
4456 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4457 If this can be simplified to a single expression (without requiring
4458 introducing more SSA variables to hold intermediate values),
4459 return the resulting tree. Otherwise return NULL_TREE.
4460 If the result expression is non-null, it has boolean type. */
4462 tree
4463 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4464 enum tree_code code2, tree op2a, tree op2b)
4466 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4467 if (t)
4468 return t;
4469 else
4470 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4473 /* Helper function for or_comparisons_1: try to simplify the OR of the
4474 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4475 If INVERT is true, invert the value of VAR before doing the OR.
4476 Return NULL_EXPR if we can't simplify this to a single expression. */
4478 static tree
4479 or_var_with_comparison (tree var, bool invert,
4480 enum tree_code code2, tree op2a, tree op2b)
4482 tree t;
4483 gimple *stmt = SSA_NAME_DEF_STMT (var);
4485 /* We can only deal with variables whose definitions are assignments. */
4486 if (!is_gimple_assign (stmt))
4487 return NULL_TREE;
4489 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4490 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4491 Then we only have to consider the simpler non-inverted cases. */
4492 if (invert)
4493 t = and_var_with_comparison_1 (stmt,
4494 invert_tree_comparison (code2, false),
4495 op2a, op2b);
4496 else
4497 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4498 return canonicalize_bool (t, invert);
4501 /* Try to simplify the OR of the ssa variable defined by the assignment
4502 STMT with the comparison specified by (OP2A CODE2 OP2B).
4503 Return NULL_EXPR if we can't simplify this to a single expression. */
4505 static tree
4506 or_var_with_comparison_1 (gimple *stmt,
4507 enum tree_code code2, tree op2a, tree op2b)
4509 tree var = gimple_assign_lhs (stmt);
4510 tree true_test_var = NULL_TREE;
4511 tree false_test_var = NULL_TREE;
4512 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4514 /* Check for identities like (var OR (var != 0)) => true . */
4515 if (TREE_CODE (op2a) == SSA_NAME
4516 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4518 if ((code2 == NE_EXPR && integer_zerop (op2b))
4519 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4521 true_test_var = op2a;
4522 if (var == true_test_var)
4523 return var;
4525 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4526 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4528 false_test_var = op2a;
4529 if (var == false_test_var)
4530 return boolean_true_node;
4534 /* If the definition is a comparison, recurse on it. */
4535 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4537 tree t = or_comparisons_1 (innercode,
4538 gimple_assign_rhs1 (stmt),
4539 gimple_assign_rhs2 (stmt),
4540 code2,
4541 op2a,
4542 op2b);
4543 if (t)
4544 return t;
4547 /* If the definition is an AND or OR expression, we may be able to
4548 simplify by reassociating. */
4549 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4550 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4552 tree inner1 = gimple_assign_rhs1 (stmt);
4553 tree inner2 = gimple_assign_rhs2 (stmt);
4554 gimple *s;
4555 tree t;
4556 tree partial = NULL_TREE;
4557 bool is_or = (innercode == BIT_IOR_EXPR);
4559 /* Check for boolean identities that don't require recursive examination
4560 of inner1/inner2:
4561 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4562 inner1 OR (inner1 AND inner2) => inner1
4563 !inner1 OR (inner1 OR inner2) => true
4564 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4566 if (inner1 == true_test_var)
4567 return (is_or ? var : inner1);
4568 else if (inner2 == true_test_var)
4569 return (is_or ? var : inner2);
4570 else if (inner1 == false_test_var)
4571 return (is_or
4572 ? boolean_true_node
4573 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4574 else if (inner2 == false_test_var)
4575 return (is_or
4576 ? boolean_true_node
4577 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4579 /* Next, redistribute/reassociate the OR across the inner tests.
4580 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4581 if (TREE_CODE (inner1) == SSA_NAME
4582 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4583 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4584 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4585 gimple_assign_rhs1 (s),
4586 gimple_assign_rhs2 (s),
4587 code2, op2a, op2b)))
4589 /* Handle the OR case, where we are reassociating:
4590 (inner1 OR inner2) OR (op2a code2 op2b)
4591 => (t OR inner2)
4592 If the partial result t is a constant, we win. Otherwise
4593 continue on to try reassociating with the other inner test. */
4594 if (is_or)
4596 if (integer_onep (t))
4597 return boolean_true_node;
4598 else if (integer_zerop (t))
4599 return inner2;
4602 /* Handle the AND case, where we are redistributing:
4603 (inner1 AND inner2) OR (op2a code2 op2b)
4604 => (t AND (inner2 OR (op2a code op2b))) */
4605 else if (integer_zerop (t))
4606 return boolean_false_node;
4608 /* Save partial result for later. */
4609 partial = t;
4612 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4613 if (TREE_CODE (inner2) == SSA_NAME
4614 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4615 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4616 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4617 gimple_assign_rhs1 (s),
4618 gimple_assign_rhs2 (s),
4619 code2, op2a, op2b)))
4621 /* Handle the OR case, where we are reassociating:
4622 (inner1 OR inner2) OR (op2a code2 op2b)
4623 => (inner1 OR t)
4624 => (t OR partial) */
4625 if (is_or)
4627 if (integer_zerop (t))
4628 return inner1;
4629 else if (integer_onep (t))
4630 return boolean_true_node;
4631 /* If both are the same, we can apply the identity
4632 (x OR x) == x. */
4633 else if (partial && same_bool_result_p (t, partial))
4634 return t;
4637 /* Handle the AND case, where we are redistributing:
4638 (inner1 AND inner2) OR (op2a code2 op2b)
4639 => (t AND (inner1 OR (op2a code2 op2b)))
4640 => (t AND partial) */
4641 else
4643 if (integer_zerop (t))
4644 return boolean_false_node;
4645 else if (partial)
4647 /* We already got a simplification for the other
4648 operand to the redistributed AND expression. The
4649 interesting case is when at least one is true.
4650 Or, if both are the same, we can apply the identity
4651 (x AND x) == x. */
4652 if (integer_onep (partial))
4653 return t;
4654 else if (integer_onep (t))
4655 return partial;
4656 else if (same_bool_result_p (t, partial))
4657 return t;
4662 return NULL_TREE;
4665 /* Try to simplify the OR of two comparisons defined by
4666 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4667 If this can be done without constructing an intermediate value,
4668 return the resulting tree; otherwise NULL_TREE is returned.
4669 This function is deliberately asymmetric as it recurses on SSA_DEFs
4670 in the first comparison but not the second. */
4672 static tree
4673 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4674 enum tree_code code2, tree op2a, tree op2b)
4676 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4678 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4679 if (operand_equal_p (op1a, op2a, 0)
4680 && operand_equal_p (op1b, op2b, 0))
4682 /* Result will be either NULL_TREE, or a combined comparison. */
4683 tree t = combine_comparisons (UNKNOWN_LOCATION,
4684 TRUTH_ORIF_EXPR, code1, code2,
4685 truth_type, op1a, op1b);
4686 if (t)
4687 return t;
4690 /* Likewise the swapped case of the above. */
4691 if (operand_equal_p (op1a, op2b, 0)
4692 && operand_equal_p (op1b, op2a, 0))
4694 /* Result will be either NULL_TREE, or a combined comparison. */
4695 tree t = combine_comparisons (UNKNOWN_LOCATION,
4696 TRUTH_ORIF_EXPR, code1,
4697 swap_tree_comparison (code2),
4698 truth_type, op1a, op1b);
4699 if (t)
4700 return t;
4703 /* If both comparisons are of the same value against constants, we might
4704 be able to merge them. */
4705 if (operand_equal_p (op1a, op2a, 0)
4706 && TREE_CODE (op1b) == INTEGER_CST
4707 && TREE_CODE (op2b) == INTEGER_CST)
4709 int cmp = tree_int_cst_compare (op1b, op2b);
4711 /* If we have (op1a != op1b), we should either be able to
4712 return that or TRUE, depending on whether the constant op1b
4713 also satisfies the other comparison against op2b. */
4714 if (code1 == NE_EXPR)
4716 bool done = true;
4717 bool val;
4718 switch (code2)
4720 case EQ_EXPR: val = (cmp == 0); break;
4721 case NE_EXPR: val = (cmp != 0); break;
4722 case LT_EXPR: val = (cmp < 0); break;
4723 case GT_EXPR: val = (cmp > 0); break;
4724 case LE_EXPR: val = (cmp <= 0); break;
4725 case GE_EXPR: val = (cmp >= 0); break;
4726 default: done = false;
4728 if (done)
4730 if (val)
4731 return boolean_true_node;
4732 else
4733 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4736 /* Likewise if the second comparison is a != comparison. */
4737 else if (code2 == NE_EXPR)
4739 bool done = true;
4740 bool val;
4741 switch (code1)
4743 case EQ_EXPR: val = (cmp == 0); break;
4744 case NE_EXPR: val = (cmp != 0); break;
4745 case LT_EXPR: val = (cmp > 0); break;
4746 case GT_EXPR: val = (cmp < 0); break;
4747 case LE_EXPR: val = (cmp >= 0); break;
4748 case GE_EXPR: val = (cmp <= 0); break;
4749 default: done = false;
4751 if (done)
4753 if (val)
4754 return boolean_true_node;
4755 else
4756 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4760 /* See if an equality test is redundant with the other comparison. */
4761 else if (code1 == EQ_EXPR)
4763 bool val;
4764 switch (code2)
4766 case EQ_EXPR: val = (cmp == 0); break;
4767 case NE_EXPR: val = (cmp != 0); break;
4768 case LT_EXPR: val = (cmp < 0); break;
4769 case GT_EXPR: val = (cmp > 0); break;
4770 case LE_EXPR: val = (cmp <= 0); break;
4771 case GE_EXPR: val = (cmp >= 0); break;
4772 default:
4773 val = false;
4775 if (val)
4776 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4778 else if (code2 == EQ_EXPR)
4780 bool val;
4781 switch (code1)
4783 case EQ_EXPR: val = (cmp == 0); break;
4784 case NE_EXPR: val = (cmp != 0); break;
4785 case LT_EXPR: val = (cmp > 0); break;
4786 case GT_EXPR: val = (cmp < 0); break;
4787 case LE_EXPR: val = (cmp >= 0); break;
4788 case GE_EXPR: val = (cmp <= 0); break;
4789 default:
4790 val = false;
4792 if (val)
4793 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4796 /* Chose the less restrictive of two < or <= comparisons. */
4797 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4798 && (code2 == LT_EXPR || code2 == LE_EXPR))
4800 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4801 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4802 else
4803 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4806 /* Likewise chose the less restrictive of two > or >= comparisons. */
4807 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4808 && (code2 == GT_EXPR || code2 == GE_EXPR))
4810 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4811 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4812 else
4813 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4816 /* Check for singleton ranges. */
4817 else if (cmp == 0
4818 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4819 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4820 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4822 /* Check for less/greater pairs that don't restrict the range at all. */
4823 else if (cmp >= 0
4824 && (code1 == LT_EXPR || code1 == LE_EXPR)
4825 && (code2 == GT_EXPR || code2 == GE_EXPR))
4826 return boolean_true_node;
4827 else if (cmp <= 0
4828 && (code1 == GT_EXPR || code1 == GE_EXPR)
4829 && (code2 == LT_EXPR || code2 == LE_EXPR))
4830 return boolean_true_node;
4833 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4834 NAME's definition is a truth value. See if there are any simplifications
4835 that can be done against the NAME's definition. */
4836 if (TREE_CODE (op1a) == SSA_NAME
4837 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4838 && (integer_zerop (op1b) || integer_onep (op1b)))
4840 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4841 || (code1 == NE_EXPR && integer_onep (op1b)));
4842 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4843 switch (gimple_code (stmt))
4845 case GIMPLE_ASSIGN:
4846 /* Try to simplify by copy-propagating the definition. */
4847 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4849 case GIMPLE_PHI:
4850 /* If every argument to the PHI produces the same result when
4851 ORed with the second comparison, we win.
4852 Do not do this unless the type is bool since we need a bool
4853 result here anyway. */
4854 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4856 tree result = NULL_TREE;
4857 unsigned i;
4858 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4860 tree arg = gimple_phi_arg_def (stmt, i);
4862 /* If this PHI has itself as an argument, ignore it.
4863 If all the other args produce the same result,
4864 we're still OK. */
4865 if (arg == gimple_phi_result (stmt))
4866 continue;
4867 else if (TREE_CODE (arg) == INTEGER_CST)
4869 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4871 if (!result)
4872 result = boolean_true_node;
4873 else if (!integer_onep (result))
4874 return NULL_TREE;
4876 else if (!result)
4877 result = fold_build2 (code2, boolean_type_node,
4878 op2a, op2b);
4879 else if (!same_bool_comparison_p (result,
4880 code2, op2a, op2b))
4881 return NULL_TREE;
4883 else if (TREE_CODE (arg) == SSA_NAME
4884 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4886 tree temp;
4887 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4888 /* In simple cases we can look through PHI nodes,
4889 but we have to be careful with loops.
4890 See PR49073. */
4891 if (! dom_info_available_p (CDI_DOMINATORS)
4892 || gimple_bb (def_stmt) == gimple_bb (stmt)
4893 || dominated_by_p (CDI_DOMINATORS,
4894 gimple_bb (def_stmt),
4895 gimple_bb (stmt)))
4896 return NULL_TREE;
4897 temp = or_var_with_comparison (arg, invert, code2,
4898 op2a, op2b);
4899 if (!temp)
4900 return NULL_TREE;
4901 else if (!result)
4902 result = temp;
4903 else if (!same_bool_result_p (result, temp))
4904 return NULL_TREE;
4906 else
4907 return NULL_TREE;
4909 return result;
4912 default:
4913 break;
4916 return NULL_TREE;
4919 /* Try to simplify the OR of two comparisons, specified by
4920 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4921 If this can be simplified to a single expression (without requiring
4922 introducing more SSA variables to hold intermediate values),
4923 return the resulting tree. Otherwise return NULL_TREE.
4924 If the result expression is non-null, it has boolean type. */
4926 tree
4927 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4928 enum tree_code code2, tree op2a, tree op2b)
4930 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4931 if (t)
4932 return t;
4933 else
4934 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4938 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4940 Either NULL_TREE, a simplified but non-constant or a constant
4941 is returned.
4943 ??? This should go into a gimple-fold-inline.h file to be eventually
4944 privatized with the single valueize function used in the various TUs
4945 to avoid the indirect function call overhead. */
4947 tree
4948 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4949 tree (*gvalueize) (tree))
4951 code_helper rcode;
4952 tree ops[3] = {};
4953 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4954 edges if there are intermediate VARYING defs. For this reason
4955 do not follow SSA edges here even though SCCVN can technically
4956 just deal fine with that. */
4957 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4959 tree res = NULL_TREE;
4960 if (gimple_simplified_result_is_gimple_val (rcode, ops))
4961 res = ops[0];
4962 else if (mprts_hook)
4963 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4964 if (res)
4966 if (dump_file && dump_flags & TDF_DETAILS)
4968 fprintf (dump_file, "Match-and-simplified ");
4969 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4970 fprintf (dump_file, " to ");
4971 print_generic_expr (dump_file, res, 0);
4972 fprintf (dump_file, "\n");
4974 return res;
4978 location_t loc = gimple_location (stmt);
4979 switch (gimple_code (stmt))
4981 case GIMPLE_ASSIGN:
4983 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4985 switch (get_gimple_rhs_class (subcode))
4987 case GIMPLE_SINGLE_RHS:
4989 tree rhs = gimple_assign_rhs1 (stmt);
4990 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4992 if (TREE_CODE (rhs) == SSA_NAME)
4994 /* If the RHS is an SSA_NAME, return its known constant value,
4995 if any. */
4996 return (*valueize) (rhs);
4998 /* Handle propagating invariant addresses into address
4999 operations. */
5000 else if (TREE_CODE (rhs) == ADDR_EXPR
5001 && !is_gimple_min_invariant (rhs))
5003 HOST_WIDE_INT offset = 0;
5004 tree base;
5005 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5006 &offset,
5007 valueize);
5008 if (base
5009 && (CONSTANT_CLASS_P (base)
5010 || decl_address_invariant_p (base)))
5011 return build_invariant_address (TREE_TYPE (rhs),
5012 base, offset);
5014 else if (TREE_CODE (rhs) == CONSTRUCTOR
5015 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5016 && (CONSTRUCTOR_NELTS (rhs)
5017 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5019 unsigned i;
5020 tree val, *vec;
5022 vec = XALLOCAVEC (tree,
5023 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5024 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5026 val = (*valueize) (val);
5027 if (TREE_CODE (val) == INTEGER_CST
5028 || TREE_CODE (val) == REAL_CST
5029 || TREE_CODE (val) == FIXED_CST)
5030 vec[i] = val;
5031 else
5032 return NULL_TREE;
5035 return build_vector (TREE_TYPE (rhs), vec);
5037 if (subcode == OBJ_TYPE_REF)
5039 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5040 /* If callee is constant, we can fold away the wrapper. */
5041 if (is_gimple_min_invariant (val))
5042 return val;
5045 if (kind == tcc_reference)
5047 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5048 || TREE_CODE (rhs) == REALPART_EXPR
5049 || TREE_CODE (rhs) == IMAGPART_EXPR)
5050 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5052 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5053 return fold_unary_loc (EXPR_LOCATION (rhs),
5054 TREE_CODE (rhs),
5055 TREE_TYPE (rhs), val);
5057 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5058 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5060 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5061 return fold_ternary_loc (EXPR_LOCATION (rhs),
5062 TREE_CODE (rhs),
5063 TREE_TYPE (rhs), val,
5064 TREE_OPERAND (rhs, 1),
5065 TREE_OPERAND (rhs, 2));
5067 else if (TREE_CODE (rhs) == MEM_REF
5068 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5070 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5071 if (TREE_CODE (val) == ADDR_EXPR
5072 && is_gimple_min_invariant (val))
5074 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5075 unshare_expr (val),
5076 TREE_OPERAND (rhs, 1));
5077 if (tem)
5078 rhs = tem;
5081 return fold_const_aggregate_ref_1 (rhs, valueize);
5083 else if (kind == tcc_declaration)
5084 return get_symbol_constant_value (rhs);
5085 return rhs;
5088 case GIMPLE_UNARY_RHS:
5089 return NULL_TREE;
5091 case GIMPLE_BINARY_RHS:
5092 /* Translate &x + CST into an invariant form suitable for
5093 further propagation. */
5094 if (subcode == POINTER_PLUS_EXPR)
5096 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5097 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5098 if (TREE_CODE (op0) == ADDR_EXPR
5099 && TREE_CODE (op1) == INTEGER_CST)
5101 tree off = fold_convert (ptr_type_node, op1);
5102 return build_fold_addr_expr_loc
5103 (loc,
5104 fold_build2 (MEM_REF,
5105 TREE_TYPE (TREE_TYPE (op0)),
5106 unshare_expr (op0), off));
5109 /* Canonicalize bool != 0 and bool == 0 appearing after
5110 valueization. While gimple_simplify handles this
5111 it can get confused by the ~X == 1 -> X == 0 transform
5112 which we cant reduce to a SSA name or a constant
5113 (and we have no way to tell gimple_simplify to not
5114 consider those transforms in the first place). */
5115 else if (subcode == EQ_EXPR
5116 || subcode == NE_EXPR)
5118 tree lhs = gimple_assign_lhs (stmt);
5119 tree op0 = gimple_assign_rhs1 (stmt);
5120 if (useless_type_conversion_p (TREE_TYPE (lhs),
5121 TREE_TYPE (op0)))
5123 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5124 op0 = (*valueize) (op0);
5125 if (TREE_CODE (op0) == INTEGER_CST)
5126 std::swap (op0, op1);
5127 if (TREE_CODE (op1) == INTEGER_CST
5128 && ((subcode == NE_EXPR && integer_zerop (op1))
5129 || (subcode == EQ_EXPR && integer_onep (op1))))
5130 return op0;
5133 return NULL_TREE;
5135 case GIMPLE_TERNARY_RHS:
5137 /* Handle ternary operators that can appear in GIMPLE form. */
5138 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5139 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5140 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5141 return fold_ternary_loc (loc, subcode,
5142 gimple_expr_type (stmt), op0, op1, op2);
5145 default:
5146 gcc_unreachable ();
5150 case GIMPLE_CALL:
5152 tree fn;
5153 gcall *call_stmt = as_a <gcall *> (stmt);
5155 if (gimple_call_internal_p (stmt))
5157 enum tree_code subcode = ERROR_MARK;
5158 switch (gimple_call_internal_fn (stmt))
5160 case IFN_UBSAN_CHECK_ADD:
5161 subcode = PLUS_EXPR;
5162 break;
5163 case IFN_UBSAN_CHECK_SUB:
5164 subcode = MINUS_EXPR;
5165 break;
5166 case IFN_UBSAN_CHECK_MUL:
5167 subcode = MULT_EXPR;
5168 break;
5169 default:
5170 return NULL_TREE;
5172 tree arg0 = gimple_call_arg (stmt, 0);
5173 tree arg1 = gimple_call_arg (stmt, 1);
5174 tree op0 = (*valueize) (arg0);
5175 tree op1 = (*valueize) (arg1);
5177 if (TREE_CODE (op0) != INTEGER_CST
5178 || TREE_CODE (op1) != INTEGER_CST)
5180 switch (subcode)
5182 case MULT_EXPR:
5183 /* x * 0 = 0 * x = 0 without overflow. */
5184 if (integer_zerop (op0) || integer_zerop (op1))
5185 return build_zero_cst (TREE_TYPE (arg0));
5186 break;
5187 case MINUS_EXPR:
5188 /* y - y = 0 without overflow. */
5189 if (operand_equal_p (op0, op1, 0))
5190 return build_zero_cst (TREE_TYPE (arg0));
5191 break;
5192 default:
5193 break;
5196 tree res
5197 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5198 if (res
5199 && TREE_CODE (res) == INTEGER_CST
5200 && !TREE_OVERFLOW (res))
5201 return res;
5202 return NULL_TREE;
5205 fn = (*valueize) (gimple_call_fn (stmt));
5206 if (TREE_CODE (fn) == ADDR_EXPR
5207 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5208 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5209 && gimple_builtin_call_types_compatible_p (stmt,
5210 TREE_OPERAND (fn, 0)))
5212 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5213 tree retval;
5214 unsigned i;
5215 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5216 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5217 retval = fold_builtin_call_array (loc,
5218 gimple_call_return_type (call_stmt),
5219 fn, gimple_call_num_args (stmt), args);
5220 if (retval)
5222 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5223 STRIP_NOPS (retval);
5224 retval = fold_convert (gimple_call_return_type (call_stmt),
5225 retval);
5227 return retval;
5229 return NULL_TREE;
5232 default:
5233 return NULL_TREE;
5237 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5238 Returns NULL_TREE if folding to a constant is not possible, otherwise
5239 returns a constant according to is_gimple_min_invariant. */
5241 tree
5242 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5244 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5245 if (res && is_gimple_min_invariant (res))
5246 return res;
5247 return NULL_TREE;
5251 /* The following set of functions are supposed to fold references using
5252 their constant initializers. */
5254 /* See if we can find constructor defining value of BASE.
5255 When we know the consructor with constant offset (such as
5256 base is array[40] and we do know constructor of array), then
5257 BIT_OFFSET is adjusted accordingly.
5259 As a special case, return error_mark_node when constructor
5260 is not explicitly available, but it is known to be zero
5261 such as 'static const int a;'. */
5262 static tree
5263 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5264 tree (*valueize)(tree))
5266 HOST_WIDE_INT bit_offset2, size, max_size;
5267 if (TREE_CODE (base) == MEM_REF)
5269 if (!integer_zerop (TREE_OPERAND (base, 1)))
5271 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5272 return NULL_TREE;
5273 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5274 * BITS_PER_UNIT);
5277 if (valueize
5278 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5279 base = valueize (TREE_OPERAND (base, 0));
5280 if (!base || TREE_CODE (base) != ADDR_EXPR)
5281 return NULL_TREE;
5282 base = TREE_OPERAND (base, 0);
5285 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5286 DECL_INITIAL. If BASE is a nested reference into another
5287 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5288 the inner reference. */
5289 switch (TREE_CODE (base))
5291 case VAR_DECL:
5292 case CONST_DECL:
5294 tree init = ctor_for_folding (base);
5296 /* Our semantic is exact opposite of ctor_for_folding;
5297 NULL means unknown, while error_mark_node is 0. */
5298 if (init == error_mark_node)
5299 return NULL_TREE;
5300 if (!init)
5301 return error_mark_node;
5302 return init;
5305 case ARRAY_REF:
5306 case COMPONENT_REF:
5307 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5308 if (max_size == -1 || size != max_size)
5309 return NULL_TREE;
5310 *bit_offset += bit_offset2;
5311 return get_base_constructor (base, bit_offset, valueize);
5313 case STRING_CST:
5314 case CONSTRUCTOR:
5315 return base;
5317 default:
5318 return NULL_TREE;
5322 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5323 SIZE to the memory at bit OFFSET. */
5325 static tree
5326 fold_array_ctor_reference (tree type, tree ctor,
5327 unsigned HOST_WIDE_INT offset,
5328 unsigned HOST_WIDE_INT size,
5329 tree from_decl)
5331 offset_int low_bound;
5332 offset_int elt_size;
5333 offset_int access_index;
5334 tree domain_type = NULL_TREE;
5335 HOST_WIDE_INT inner_offset;
5337 /* Compute low bound and elt size. */
5338 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5339 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5340 if (domain_type && TYPE_MIN_VALUE (domain_type))
5342 /* Static constructors for variably sized objects makes no sense. */
5343 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5344 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5346 else
5347 low_bound = 0;
5348 /* Static constructors for variably sized objects makes no sense. */
5349 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5350 == INTEGER_CST);
5351 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5353 /* We can handle only constantly sized accesses that are known to not
5354 be larger than size of array element. */
5355 if (!TYPE_SIZE_UNIT (type)
5356 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5357 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5358 || elt_size == 0)
5359 return NULL_TREE;
5361 /* Compute the array index we look for. */
5362 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5363 elt_size);
5364 access_index += low_bound;
5366 /* And offset within the access. */
5367 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5369 /* See if the array field is large enough to span whole access. We do not
5370 care to fold accesses spanning multiple array indexes. */
5371 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5372 return NULL_TREE;
5373 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5374 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5376 /* When memory is not explicitely mentioned in constructor,
5377 it is 0 (or out of range). */
5378 return build_zero_cst (type);
5381 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5382 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5384 static tree
5385 fold_nonarray_ctor_reference (tree type, tree ctor,
5386 unsigned HOST_WIDE_INT offset,
5387 unsigned HOST_WIDE_INT size,
5388 tree from_decl)
5390 unsigned HOST_WIDE_INT cnt;
5391 tree cfield, cval;
5393 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5394 cval)
5396 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5397 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5398 tree field_size = DECL_SIZE (cfield);
5399 offset_int bitoffset;
5400 offset_int bitoffset_end, access_end;
5402 /* Variable sized objects in static constructors makes no sense,
5403 but field_size can be NULL for flexible array members. */
5404 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5405 && TREE_CODE (byte_offset) == INTEGER_CST
5406 && (field_size != NULL_TREE
5407 ? TREE_CODE (field_size) == INTEGER_CST
5408 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5410 /* Compute bit offset of the field. */
5411 bitoffset = (wi::to_offset (field_offset)
5412 + wi::lshift (wi::to_offset (byte_offset),
5413 LOG2_BITS_PER_UNIT));
5414 /* Compute bit offset where the field ends. */
5415 if (field_size != NULL_TREE)
5416 bitoffset_end = bitoffset + wi::to_offset (field_size);
5417 else
5418 bitoffset_end = 0;
5420 access_end = offset_int (offset) + size;
5422 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5423 [BITOFFSET, BITOFFSET_END)? */
5424 if (wi::cmps (access_end, bitoffset) > 0
5425 && (field_size == NULL_TREE
5426 || wi::lts_p (offset, bitoffset_end)))
5428 offset_int inner_offset = offset_int (offset) - bitoffset;
5429 /* We do have overlap. Now see if field is large enough to
5430 cover the access. Give up for accesses spanning multiple
5431 fields. */
5432 if (wi::cmps (access_end, bitoffset_end) > 0)
5433 return NULL_TREE;
5434 if (wi::lts_p (offset, bitoffset))
5435 return NULL_TREE;
5436 return fold_ctor_reference (type, cval,
5437 inner_offset.to_uhwi (), size,
5438 from_decl);
5441 /* When memory is not explicitely mentioned in constructor, it is 0. */
5442 return build_zero_cst (type);
5445 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5446 to the memory at bit OFFSET. */
5448 tree
5449 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5450 unsigned HOST_WIDE_INT size, tree from_decl)
5452 tree ret;
5454 /* We found the field with exact match. */
5455 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5456 && !offset)
5457 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5459 /* We are at the end of walk, see if we can view convert the
5460 result. */
5461 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5462 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5463 && !compare_tree_int (TYPE_SIZE (type), size)
5464 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5466 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5467 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5468 if (ret)
5469 STRIP_USELESS_TYPE_CONVERSION (ret);
5470 return ret;
5472 /* For constants and byte-aligned/sized reads try to go through
5473 native_encode/interpret. */
5474 if (CONSTANT_CLASS_P (ctor)
5475 && BITS_PER_UNIT == 8
5476 && offset % BITS_PER_UNIT == 0
5477 && size % BITS_PER_UNIT == 0
5478 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5480 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5481 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5482 offset / BITS_PER_UNIT) > 0)
5483 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5485 if (TREE_CODE (ctor) == CONSTRUCTOR)
5488 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5489 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5490 return fold_array_ctor_reference (type, ctor, offset, size,
5491 from_decl);
5492 else
5493 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5494 from_decl);
5497 return NULL_TREE;
5500 /* Return the tree representing the element referenced by T if T is an
5501 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5502 names using VALUEIZE. Return NULL_TREE otherwise. */
5504 tree
5505 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5507 tree ctor, idx, base;
5508 HOST_WIDE_INT offset, size, max_size;
5509 tree tem;
5511 if (TREE_THIS_VOLATILE (t))
5512 return NULL_TREE;
5514 if (DECL_P (t))
5515 return get_symbol_constant_value (t);
5517 tem = fold_read_from_constant_string (t);
5518 if (tem)
5519 return tem;
5521 switch (TREE_CODE (t))
5523 case ARRAY_REF:
5524 case ARRAY_RANGE_REF:
5525 /* Constant indexes are handled well by get_base_constructor.
5526 Only special case variable offsets.
5527 FIXME: This code can't handle nested references with variable indexes
5528 (they will be handled only by iteration of ccp). Perhaps we can bring
5529 get_ref_base_and_extent here and make it use a valueize callback. */
5530 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5531 && valueize
5532 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5533 && TREE_CODE (idx) == INTEGER_CST)
5535 tree low_bound, unit_size;
5537 /* If the resulting bit-offset is constant, track it. */
5538 if ((low_bound = array_ref_low_bound (t),
5539 TREE_CODE (low_bound) == INTEGER_CST)
5540 && (unit_size = array_ref_element_size (t),
5541 tree_fits_uhwi_p (unit_size)))
5543 offset_int woffset
5544 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5545 TYPE_PRECISION (TREE_TYPE (idx)));
5547 if (wi::fits_shwi_p (woffset))
5549 offset = woffset.to_shwi ();
5550 /* TODO: This code seems wrong, multiply then check
5551 to see if it fits. */
5552 offset *= tree_to_uhwi (unit_size);
5553 offset *= BITS_PER_UNIT;
5555 base = TREE_OPERAND (t, 0);
5556 ctor = get_base_constructor (base, &offset, valueize);
5557 /* Empty constructor. Always fold to 0. */
5558 if (ctor == error_mark_node)
5559 return build_zero_cst (TREE_TYPE (t));
5560 /* Out of bound array access. Value is undefined,
5561 but don't fold. */
5562 if (offset < 0)
5563 return NULL_TREE;
5564 /* We can not determine ctor. */
5565 if (!ctor)
5566 return NULL_TREE;
5567 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5568 tree_to_uhwi (unit_size)
5569 * BITS_PER_UNIT,
5570 base);
5574 /* Fallthru. */
5576 case COMPONENT_REF:
5577 case BIT_FIELD_REF:
5578 case TARGET_MEM_REF:
5579 case MEM_REF:
5580 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5581 ctor = get_base_constructor (base, &offset, valueize);
5583 /* Empty constructor. Always fold to 0. */
5584 if (ctor == error_mark_node)
5585 return build_zero_cst (TREE_TYPE (t));
5586 /* We do not know precise address. */
5587 if (max_size == -1 || max_size != size)
5588 return NULL_TREE;
5589 /* We can not determine ctor. */
5590 if (!ctor)
5591 return NULL_TREE;
5593 /* Out of bound array access. Value is undefined, but don't fold. */
5594 if (offset < 0)
5595 return NULL_TREE;
5597 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5598 base);
5600 case REALPART_EXPR:
5601 case IMAGPART_EXPR:
5603 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5604 if (c && TREE_CODE (c) == COMPLEX_CST)
5605 return fold_build1_loc (EXPR_LOCATION (t),
5606 TREE_CODE (t), TREE_TYPE (t), c);
5607 break;
5610 default:
5611 break;
5614 return NULL_TREE;
5617 tree
5618 fold_const_aggregate_ref (tree t)
5620 return fold_const_aggregate_ref_1 (t, NULL);
5623 /* Lookup virtual method with index TOKEN in a virtual table V
5624 at OFFSET.
5625 Set CAN_REFER if non-NULL to false if method
5626 is not referable or if the virtual table is ill-formed (such as rewriten
5627 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5629 tree
5630 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5631 tree v,
5632 unsigned HOST_WIDE_INT offset,
5633 bool *can_refer)
5635 tree vtable = v, init, fn;
5636 unsigned HOST_WIDE_INT size;
5637 unsigned HOST_WIDE_INT elt_size, access_index;
5638 tree domain_type;
5640 if (can_refer)
5641 *can_refer = true;
5643 /* First of all double check we have virtual table. */
5644 if (TREE_CODE (v) != VAR_DECL
5645 || !DECL_VIRTUAL_P (v))
5647 /* Pass down that we lost track of the target. */
5648 if (can_refer)
5649 *can_refer = false;
5650 return NULL_TREE;
5653 init = ctor_for_folding (v);
5655 /* The virtual tables should always be born with constructors
5656 and we always should assume that they are avaialble for
5657 folding. At the moment we do not stream them in all cases,
5658 but it should never happen that ctor seem unreachable. */
5659 gcc_assert (init);
5660 if (init == error_mark_node)
5662 gcc_assert (in_lto_p);
5663 /* Pass down that we lost track of the target. */
5664 if (can_refer)
5665 *can_refer = false;
5666 return NULL_TREE;
5668 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5669 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5670 offset *= BITS_PER_UNIT;
5671 offset += token * size;
5673 /* Lookup the value in the constructor that is assumed to be array.
5674 This is equivalent to
5675 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5676 offset, size, NULL);
5677 but in a constant time. We expect that frontend produced a simple
5678 array without indexed initializers. */
5680 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5681 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5682 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5683 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5685 access_index = offset / BITS_PER_UNIT / elt_size;
5686 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5688 /* This code makes an assumption that there are no
5689 indexed fileds produced by C++ FE, so we can directly index the array. */
5690 if (access_index < CONSTRUCTOR_NELTS (init))
5692 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5693 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5694 STRIP_NOPS (fn);
5696 else
5697 fn = NULL;
5699 /* For type inconsistent program we may end up looking up virtual method
5700 in virtual table that does not contain TOKEN entries. We may overrun
5701 the virtual table and pick up a constant or RTTI info pointer.
5702 In any case the call is undefined. */
5703 if (!fn
5704 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5705 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5706 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5707 else
5709 fn = TREE_OPERAND (fn, 0);
5711 /* When cgraph node is missing and function is not public, we cannot
5712 devirtualize. This can happen in WHOPR when the actual method
5713 ends up in other partition, because we found devirtualization
5714 possibility too late. */
5715 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5717 if (can_refer)
5719 *can_refer = false;
5720 return fn;
5722 return NULL_TREE;
5726 /* Make sure we create a cgraph node for functions we'll reference.
5727 They can be non-existent if the reference comes from an entry
5728 of an external vtable for example. */
5729 cgraph_node::get_create (fn);
5731 return fn;
5734 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5735 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5736 KNOWN_BINFO carries the binfo describing the true type of
5737 OBJ_TYPE_REF_OBJECT(REF).
5738 Set CAN_REFER if non-NULL to false if method
5739 is not referable or if the virtual table is ill-formed (such as rewriten
5740 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5742 tree
5743 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5744 bool *can_refer)
5746 unsigned HOST_WIDE_INT offset;
5747 tree v;
5749 v = BINFO_VTABLE (known_binfo);
5750 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5751 if (!v)
5752 return NULL_TREE;
5754 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5756 if (can_refer)
5757 *can_refer = false;
5758 return NULL_TREE;
5760 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5763 /* Given a pointer value OP0, return a simplified version of an
5764 indirection through OP0, or NULL_TREE if no simplification is
5765 possible. Note that the resulting type may be different from
5766 the type pointed to in the sense that it is still compatible
5767 from the langhooks point of view. */
5769 tree
5770 gimple_fold_indirect_ref (tree t)
5772 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5773 tree sub = t;
5774 tree subtype;
5776 STRIP_NOPS (sub);
5777 subtype = TREE_TYPE (sub);
5778 if (!POINTER_TYPE_P (subtype))
5779 return NULL_TREE;
5781 if (TREE_CODE (sub) == ADDR_EXPR)
5783 tree op = TREE_OPERAND (sub, 0);
5784 tree optype = TREE_TYPE (op);
5785 /* *&p => p */
5786 if (useless_type_conversion_p (type, optype))
5787 return op;
5789 /* *(foo *)&fooarray => fooarray[0] */
5790 if (TREE_CODE (optype) == ARRAY_TYPE
5791 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5792 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5794 tree type_domain = TYPE_DOMAIN (optype);
5795 tree min_val = size_zero_node;
5796 if (type_domain && TYPE_MIN_VALUE (type_domain))
5797 min_val = TYPE_MIN_VALUE (type_domain);
5798 if (TREE_CODE (min_val) == INTEGER_CST)
5799 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5801 /* *(foo *)&complexfoo => __real__ complexfoo */
5802 else if (TREE_CODE (optype) == COMPLEX_TYPE
5803 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5804 return fold_build1 (REALPART_EXPR, type, op);
5805 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5806 else if (TREE_CODE (optype) == VECTOR_TYPE
5807 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5809 tree part_width = TYPE_SIZE (type);
5810 tree index = bitsize_int (0);
5811 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5815 /* *(p + CST) -> ... */
5816 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5817 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5819 tree addr = TREE_OPERAND (sub, 0);
5820 tree off = TREE_OPERAND (sub, 1);
5821 tree addrtype;
5823 STRIP_NOPS (addr);
5824 addrtype = TREE_TYPE (addr);
5826 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5827 if (TREE_CODE (addr) == ADDR_EXPR
5828 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5829 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5830 && tree_fits_uhwi_p (off))
5832 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5833 tree part_width = TYPE_SIZE (type);
5834 unsigned HOST_WIDE_INT part_widthi
5835 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5836 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5837 tree index = bitsize_int (indexi);
5838 if (offset / part_widthi
5839 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5840 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5841 part_width, index);
5844 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5845 if (TREE_CODE (addr) == ADDR_EXPR
5846 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5847 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5849 tree size = TYPE_SIZE_UNIT (type);
5850 if (tree_int_cst_equal (size, off))
5851 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5854 /* *(p + CST) -> MEM_REF <p, CST>. */
5855 if (TREE_CODE (addr) != ADDR_EXPR
5856 || DECL_P (TREE_OPERAND (addr, 0)))
5857 return fold_build2 (MEM_REF, type,
5858 addr,
5859 wide_int_to_tree (ptype, off));
5862 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5863 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5864 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5865 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5867 tree type_domain;
5868 tree min_val = size_zero_node;
5869 tree osub = sub;
5870 sub = gimple_fold_indirect_ref (sub);
5871 if (! sub)
5872 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5873 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5874 if (type_domain && TYPE_MIN_VALUE (type_domain))
5875 min_val = TYPE_MIN_VALUE (type_domain);
5876 if (TREE_CODE (min_val) == INTEGER_CST)
5877 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5880 return NULL_TREE;
5883 /* Return true if CODE is an operation that when operating on signed
5884 integer types involves undefined behavior on overflow and the
5885 operation can be expressed with unsigned arithmetic. */
5887 bool
5888 arith_code_with_undefined_signed_overflow (tree_code code)
5890 switch (code)
5892 case PLUS_EXPR:
5893 case MINUS_EXPR:
5894 case MULT_EXPR:
5895 case NEGATE_EXPR:
5896 case POINTER_PLUS_EXPR:
5897 return true;
5898 default:
5899 return false;
5903 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5904 operation that can be transformed to unsigned arithmetic by converting
5905 its operand, carrying out the operation in the corresponding unsigned
5906 type and converting the result back to the original type.
5908 Returns a sequence of statements that replace STMT and also contain
5909 a modified form of STMT itself. */
5911 gimple_seq
5912 rewrite_to_defined_overflow (gimple *stmt)
5914 if (dump_file && (dump_flags & TDF_DETAILS))
5916 fprintf (dump_file, "rewriting stmt with undefined signed "
5917 "overflow ");
5918 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5921 tree lhs = gimple_assign_lhs (stmt);
5922 tree type = unsigned_type_for (TREE_TYPE (lhs));
5923 gimple_seq stmts = NULL;
5924 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5926 tree op = gimple_op (stmt, i);
5927 op = gimple_convert (&stmts, type, op);
5928 gimple_set_op (stmt, i, op);
5930 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5931 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5932 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5933 gimple_seq_add_stmt (&stmts, stmt);
5934 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5935 gimple_seq_add_stmt (&stmts, cvt);
5937 return stmts;
5941 /* The valueization hook we use for the gimple_build API simplification.
5942 This makes us match fold_buildN behavior by only combining with
5943 statements in the sequence(s) we are currently building. */
5945 static tree
5946 gimple_build_valueize (tree op)
5948 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5949 return op;
5950 return NULL_TREE;
5953 /* Build the expression CODE OP0 of type TYPE with location LOC,
5954 simplifying it first if possible. Returns the built
5955 expression value and appends statements possibly defining it
5956 to SEQ. */
5958 tree
5959 gimple_build (gimple_seq *seq, location_t loc,
5960 enum tree_code code, tree type, tree op0)
5962 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
5963 if (!res)
5965 if (gimple_in_ssa_p (cfun))
5966 res = make_ssa_name (type);
5967 else
5968 res = create_tmp_reg (type);
5969 gimple *stmt;
5970 if (code == REALPART_EXPR
5971 || code == IMAGPART_EXPR
5972 || code == VIEW_CONVERT_EXPR)
5973 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
5974 else
5975 stmt = gimple_build_assign (res, code, op0);
5976 gimple_set_location (stmt, loc);
5977 gimple_seq_add_stmt_without_update (seq, stmt);
5979 return res;
5982 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5983 simplifying it first if possible. Returns the built
5984 expression value and appends statements possibly defining it
5985 to SEQ. */
5987 tree
5988 gimple_build (gimple_seq *seq, location_t loc,
5989 enum tree_code code, tree type, tree op0, tree op1)
5991 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
5992 if (!res)
5994 if (gimple_in_ssa_p (cfun))
5995 res = make_ssa_name (type);
5996 else
5997 res = create_tmp_reg (type);
5998 gimple *stmt = gimple_build_assign (res, code, op0, op1);
5999 gimple_set_location (stmt, loc);
6000 gimple_seq_add_stmt_without_update (seq, stmt);
6002 return res;
6005 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6006 simplifying it first if possible. Returns the built
6007 expression value and appends statements possibly defining it
6008 to SEQ. */
6010 tree
6011 gimple_build (gimple_seq *seq, location_t loc,
6012 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6014 tree res = gimple_simplify (code, type, op0, op1, op2,
6015 seq, gimple_build_valueize);
6016 if (!res)
6018 if (gimple_in_ssa_p (cfun))
6019 res = make_ssa_name (type);
6020 else
6021 res = create_tmp_reg (type);
6022 gimple *stmt;
6023 if (code == BIT_FIELD_REF)
6024 stmt = gimple_build_assign (res, code,
6025 build3 (code, type, op0, op1, op2));
6026 else
6027 stmt = gimple_build_assign (res, code, op0, op1, op2);
6028 gimple_set_location (stmt, loc);
6029 gimple_seq_add_stmt_without_update (seq, stmt);
6031 return res;
6034 /* Build the call FN (ARG0) with a result of type TYPE
6035 (or no result if TYPE is void) with location LOC,
6036 simplifying it first if possible. Returns the built
6037 expression value (or NULL_TREE if TYPE is void) and appends
6038 statements possibly defining it to SEQ. */
6040 tree
6041 gimple_build (gimple_seq *seq, location_t loc,
6042 enum built_in_function fn, tree type, tree arg0)
6044 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6045 if (!res)
6047 tree decl = builtin_decl_implicit (fn);
6048 gimple *stmt = gimple_build_call (decl, 1, arg0);
6049 if (!VOID_TYPE_P (type))
6051 if (gimple_in_ssa_p (cfun))
6052 res = make_ssa_name (type);
6053 else
6054 res = create_tmp_reg (type);
6055 gimple_call_set_lhs (stmt, res);
6057 gimple_set_location (stmt, loc);
6058 gimple_seq_add_stmt_without_update (seq, stmt);
6060 return res;
6063 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6064 (or no result if TYPE is void) with location LOC,
6065 simplifying it first if possible. Returns the built
6066 expression value (or NULL_TREE if TYPE is void) and appends
6067 statements possibly defining it to SEQ. */
6069 tree
6070 gimple_build (gimple_seq *seq, location_t loc,
6071 enum built_in_function fn, tree type, tree arg0, tree arg1)
6073 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6074 if (!res)
6076 tree decl = builtin_decl_implicit (fn);
6077 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6078 if (!VOID_TYPE_P (type))
6080 if (gimple_in_ssa_p (cfun))
6081 res = make_ssa_name (type);
6082 else
6083 res = create_tmp_reg (type);
6084 gimple_call_set_lhs (stmt, res);
6086 gimple_set_location (stmt, loc);
6087 gimple_seq_add_stmt_without_update (seq, stmt);
6089 return res;
6092 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6093 (or no result if TYPE is void) with location LOC,
6094 simplifying it first if possible. Returns the built
6095 expression value (or NULL_TREE if TYPE is void) and appends
6096 statements possibly defining it to SEQ. */
6098 tree
6099 gimple_build (gimple_seq *seq, location_t loc,
6100 enum built_in_function fn, tree type,
6101 tree arg0, tree arg1, tree arg2)
6103 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6104 seq, gimple_build_valueize);
6105 if (!res)
6107 tree decl = builtin_decl_implicit (fn);
6108 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6109 if (!VOID_TYPE_P (type))
6111 if (gimple_in_ssa_p (cfun))
6112 res = make_ssa_name (type);
6113 else
6114 res = create_tmp_reg (type);
6115 gimple_call_set_lhs (stmt, res);
6117 gimple_set_location (stmt, loc);
6118 gimple_seq_add_stmt_without_update (seq, stmt);
6120 return res;
6123 /* Build the conversion (TYPE) OP with a result of type TYPE
6124 with location LOC if such conversion is neccesary in GIMPLE,
6125 simplifying it first.
6126 Returns the built expression value and appends
6127 statements possibly defining it to SEQ. */
6129 tree
6130 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6132 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6133 return op;
6134 return gimple_build (seq, loc, NOP_EXPR, type, op);
6137 /* Build the conversion (ptrofftype) OP with a result of a type
6138 compatible with ptrofftype with location LOC if such conversion
6139 is neccesary in GIMPLE, simplifying it first.
6140 Returns the built expression value and appends
6141 statements possibly defining it to SEQ. */
6143 tree
6144 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6146 if (ptrofftype_p (TREE_TYPE (op)))
6147 return op;
6148 return gimple_convert (seq, loc, sizetype, op);
6151 /* Return true if the result of assignment STMT is known to be non-negative.
6152 If the return value is based on the assumption that signed overflow is
6153 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6154 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6156 static bool
6157 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6158 int depth)
6160 enum tree_code code = gimple_assign_rhs_code (stmt);
6161 switch (get_gimple_rhs_class (code))
6163 case GIMPLE_UNARY_RHS:
6164 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6165 gimple_expr_type (stmt),
6166 gimple_assign_rhs1 (stmt),
6167 strict_overflow_p, depth);
6168 case GIMPLE_BINARY_RHS:
6169 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6170 gimple_expr_type (stmt),
6171 gimple_assign_rhs1 (stmt),
6172 gimple_assign_rhs2 (stmt),
6173 strict_overflow_p, depth);
6174 case GIMPLE_TERNARY_RHS:
6175 return false;
6176 case GIMPLE_SINGLE_RHS:
6177 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6178 strict_overflow_p, depth);
6179 case GIMPLE_INVALID_RHS:
6180 break;
6182 gcc_unreachable ();
6185 /* Return true if return value of call STMT is known to be non-negative.
6186 If the return value is based on the assumption that signed overflow is
6187 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6188 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6190 static bool
6191 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6192 int depth)
6194 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6195 gimple_call_arg (stmt, 0) : NULL_TREE;
6196 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6197 gimple_call_arg (stmt, 1) : NULL_TREE;
6199 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6200 gimple_call_fndecl (stmt),
6201 arg0,
6202 arg1,
6203 strict_overflow_p, depth);
6206 /* Return true if return value of call STMT is known to be non-negative.
6207 If the return value is based on the assumption that signed overflow is
6208 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6209 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6211 static bool
6212 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6213 int depth)
6215 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6217 tree arg = gimple_phi_arg_def (stmt, i);
6218 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6219 return false;
6221 return true;
6224 /* Return true if STMT is known to compute a non-negative value.
6225 If the return value is based on the assumption that signed overflow is
6226 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6227 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6229 bool
6230 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6231 int depth)
6233 switch (gimple_code (stmt))
6235 case GIMPLE_ASSIGN:
6236 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6237 depth);
6238 case GIMPLE_CALL:
6239 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6240 depth);
6241 case GIMPLE_PHI:
6242 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6243 depth);
6244 default:
6245 return false;
6249 /* Return true if the floating-point value computed by assignment STMT
6250 is known to have an integer value. We also allow +Inf, -Inf and NaN
6251 to be considered integer values.
6253 DEPTH is the current nesting depth of the query. */
6255 static bool
6256 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6258 enum tree_code code = gimple_assign_rhs_code (stmt);
6259 switch (get_gimple_rhs_class (code))
6261 case GIMPLE_UNARY_RHS:
6262 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6263 gimple_assign_rhs1 (stmt), depth);
6264 case GIMPLE_BINARY_RHS:
6265 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6266 gimple_assign_rhs1 (stmt),
6267 gimple_assign_rhs2 (stmt), depth);
6268 case GIMPLE_TERNARY_RHS:
6269 return false;
6270 case GIMPLE_SINGLE_RHS:
6271 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6272 case GIMPLE_INVALID_RHS:
6273 break;
6275 gcc_unreachable ();
6278 /* Return true if the floating-point value computed by call STMT is known
6279 to have an integer value. We also allow +Inf, -Inf and NaN to be
6280 considered integer values.
6282 DEPTH is the current nesting depth of the query. */
6284 static bool
6285 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6287 tree arg0 = (gimple_call_num_args (stmt) > 0
6288 ? gimple_call_arg (stmt, 0)
6289 : NULL_TREE);
6290 tree arg1 = (gimple_call_num_args (stmt) > 1
6291 ? gimple_call_arg (stmt, 1)
6292 : NULL_TREE);
6293 return integer_valued_real_call_p (gimple_call_fndecl (stmt),
6294 arg0, arg1, depth);
6297 /* Return true if the floating-point result of phi STMT is known to have
6298 an integer value. We also allow +Inf, -Inf and NaN to be considered
6299 integer values.
6301 DEPTH is the current nesting depth of the query. */
6303 static bool
6304 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6306 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6308 tree arg = gimple_phi_arg_def (stmt, i);
6309 if (!integer_valued_real_single_p (arg, depth + 1))
6310 return false;
6312 return true;
6315 /* Return true if the floating-point value computed by STMT is known
6316 to have an integer value. We also allow +Inf, -Inf and NaN to be
6317 considered integer values.
6319 DEPTH is the current nesting depth of the query. */
6321 bool
6322 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6324 switch (gimple_code (stmt))
6326 case GIMPLE_ASSIGN:
6327 return gimple_assign_integer_valued_real_p (stmt, depth);
6328 case GIMPLE_CALL:
6329 return gimple_call_integer_valued_real_p (stmt, depth);
6330 case GIMPLE_PHI:
6331 return gimple_phi_integer_valued_real_p (stmt, depth);
6332 default:
6333 return false;