[RTL-ifcvt] PR rtl-optimization/68506: Fix emitting order of insns in IF-THEN-JOIN...
[official-gcc.git] / gcc / gimple-fold.c
blob6ff5e266d420a94b80cf1db12d45784b3fc26214
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stmt.h"
35 #include "expr.h"
36 #include "stor-layout.h"
37 #include "dumpfile.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
49 #include "dbgcnt.h"
50 #include "builtins.h"
51 #include "tree-eh.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-low.h"
56 #include "ipa-chkp.h"
59 /* Return true when DECL can be referenced from current unit.
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
62 reasons:
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
69 set.
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
74 declaring the body.
75 3) COMDAT functions referred by external vtables that
76 we devirtualize only during final compilation stage.
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
79 directly. */
81 static bool
82 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
84 varpool_node *vnode;
85 struct cgraph_node *node;
86 symtab_node *snode;
88 if (DECL_ABSTRACT_P (decl))
89 return false;
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 return true;
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (symtab->function_flags_ready)
102 return true;
103 snode = symtab_node::get (decl);
104 if (!snode || !snode->definition)
105 return false;
106 node = dyn_cast <cgraph_node *> (snode);
107 return !node || !node->global.inlined_to;
110 /* We will later output the initializer, so we can refer to it.
111 So we are concerned only when DECL comes from initializer of
112 external var or var that has been optimized out. */
113 if (!from_decl
114 || TREE_CODE (from_decl) != VAR_DECL
115 || (!DECL_EXTERNAL (from_decl)
116 && (vnode = varpool_node::get (from_decl)) != NULL
117 && vnode->definition)
118 || (flag_ltrans
119 && (vnode = varpool_node::get (from_decl)) != NULL
120 && vnode->in_other_partition))
121 return true;
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
129 return false;
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134 return true;
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
139 As observed in PR20991 for already optimized out comdat virtual functions
140 it may be tempting to not necessarily give up because the copy will be
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
145 was privatized. */
146 if (!symtab->function_flags_ready)
147 return true;
149 snode = symtab_node::get (decl);
150 if (!snode
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
154 return false;
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
163 tree
164 canonicalize_constructor_val (tree cval, tree from_decl)
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
180 if (TREE_CODE (cval) == ADDR_EXPR)
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
191 if (!base)
192 return NULL_TREE;
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
196 && !can_refer_decl_in_current_unit_p (base, from_decl))
197 return NULL_TREE;
198 if (TREE_CODE (base) == VAR_DECL)
199 TREE_ADDRESSABLE (base) = 1;
200 else if (TREE_CODE (base) == FUNCTION_DECL)
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
205 cgraph_node::get_create (base);
207 /* Fixup types in global initializers. */
208 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
209 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
212 cval = fold_convert (TREE_TYPE (orig_cval), cval);
213 return cval;
215 if (TREE_OVERFLOW_P (cval))
216 return drop_tree_overflow (cval);
217 return orig_cval;
220 /* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
223 tree
224 get_symbol_constant_value (tree sym)
226 tree val = ctor_for_folding (sym);
227 if (val != error_mark_node)
229 if (val)
231 val = canonicalize_constructor_val (unshare_expr (val), sym);
232 if (val && is_gimple_min_invariant (val))
233 return val;
234 else
235 return NULL_TREE;
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
240 if (!val
241 && is_gimple_reg_type (TREE_TYPE (sym)))
242 return build_zero_cst (TREE_TYPE (sym));
245 return NULL_TREE;
250 /* Subroutine of fold_stmt. We perform several simplifications of the
251 memory reference tree EXPR and make sure to re-gimplify them properly
252 after propagation of constant addresses. IS_LHS is true if the
253 reference is supposed to be an lvalue. */
255 static tree
256 maybe_fold_reference (tree expr, bool is_lhs)
258 tree result;
260 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
261 || TREE_CODE (expr) == REALPART_EXPR
262 || TREE_CODE (expr) == IMAGPART_EXPR)
263 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
264 return fold_unary_loc (EXPR_LOCATION (expr),
265 TREE_CODE (expr),
266 TREE_TYPE (expr),
267 TREE_OPERAND (expr, 0));
268 else if (TREE_CODE (expr) == BIT_FIELD_REF
269 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
270 return fold_ternary_loc (EXPR_LOCATION (expr),
271 TREE_CODE (expr),
272 TREE_TYPE (expr),
273 TREE_OPERAND (expr, 0),
274 TREE_OPERAND (expr, 1),
275 TREE_OPERAND (expr, 2));
277 if (!is_lhs
278 && (result = fold_const_aggregate_ref (expr))
279 && is_gimple_min_invariant (result))
280 return result;
282 return NULL_TREE;
286 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
287 replacement rhs for the statement or NULL_TREE if no simplification
288 could be made. It is assumed that the operands have been previously
289 folded. */
291 static tree
292 fold_gimple_assign (gimple_stmt_iterator *si)
294 gimple *stmt = gsi_stmt (*si);
295 enum tree_code subcode = gimple_assign_rhs_code (stmt);
296 location_t loc = gimple_location (stmt);
298 tree result = NULL_TREE;
300 switch (get_gimple_rhs_class (subcode))
302 case GIMPLE_SINGLE_RHS:
304 tree rhs = gimple_assign_rhs1 (stmt);
306 if (TREE_CLOBBER_P (rhs))
307 return NULL_TREE;
309 if (REFERENCE_CLASS_P (rhs))
310 return maybe_fold_reference (rhs, false);
312 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
314 tree val = OBJ_TYPE_REF_EXPR (rhs);
315 if (is_gimple_min_invariant (val))
316 return val;
317 else if (flag_devirtualize && virtual_method_call_p (rhs))
319 bool final;
320 vec <cgraph_node *>targets
321 = possible_polymorphic_call_targets (rhs, stmt, &final);
322 if (final && targets.length () <= 1 && dbg_cnt (devirt))
324 if (dump_enabled_p ())
326 location_t loc = gimple_location_safe (stmt);
327 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
328 "resolving virtual function address "
329 "reference to function %s\n",
330 targets.length () == 1
331 ? targets[0]->name ()
332 : "NULL");
334 if (targets.length () == 1)
336 val = fold_convert (TREE_TYPE (val),
337 build_fold_addr_expr_loc
338 (loc, targets[0]->decl));
339 STRIP_USELESS_TYPE_CONVERSION (val);
341 else
342 /* We can not use __builtin_unreachable here because it
343 can not have address taken. */
344 val = build_int_cst (TREE_TYPE (val), 0);
345 return val;
350 else if (TREE_CODE (rhs) == ADDR_EXPR)
352 tree ref = TREE_OPERAND (rhs, 0);
353 tree tem = maybe_fold_reference (ref, true);
354 if (tem
355 && TREE_CODE (tem) == MEM_REF
356 && integer_zerop (TREE_OPERAND (tem, 1)))
357 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
358 else if (tem)
359 result = fold_convert (TREE_TYPE (rhs),
360 build_fold_addr_expr_loc (loc, tem));
361 else if (TREE_CODE (ref) == MEM_REF
362 && integer_zerop (TREE_OPERAND (ref, 1)))
363 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
365 if (result)
367 /* Strip away useless type conversions. Both the
368 NON_LVALUE_EXPR that may have been added by fold, and
369 "useless" type conversions that might now be apparent
370 due to propagation. */
371 STRIP_USELESS_TYPE_CONVERSION (result);
373 if (result != rhs && valid_gimple_rhs_p (result))
374 return result;
378 else if (TREE_CODE (rhs) == CONSTRUCTOR
379 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
381 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
382 unsigned i;
383 tree val;
385 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
386 if (! CONSTANT_CLASS_P (val))
387 return NULL_TREE;
389 return build_vector_from_ctor (TREE_TYPE (rhs),
390 CONSTRUCTOR_ELTS (rhs));
393 else if (DECL_P (rhs))
394 return get_symbol_constant_value (rhs);
396 break;
398 case GIMPLE_UNARY_RHS:
399 break;
401 case GIMPLE_BINARY_RHS:
402 break;
404 case GIMPLE_TERNARY_RHS:
405 result = fold_ternary_loc (loc, subcode,
406 TREE_TYPE (gimple_assign_lhs (stmt)),
407 gimple_assign_rhs1 (stmt),
408 gimple_assign_rhs2 (stmt),
409 gimple_assign_rhs3 (stmt));
411 if (result)
413 STRIP_USELESS_TYPE_CONVERSION (result);
414 if (valid_gimple_rhs_p (result))
415 return result;
417 break;
419 case GIMPLE_INVALID_RHS:
420 gcc_unreachable ();
423 return NULL_TREE;
427 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
428 adjusting the replacement stmts location and virtual operands.
429 If the statement has a lhs the last stmt in the sequence is expected
430 to assign to that lhs. */
432 static void
433 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
435 gimple *stmt = gsi_stmt (*si_p);
437 if (gimple_has_location (stmt))
438 annotate_all_with_location (stmts, gimple_location (stmt));
440 /* First iterate over the replacement statements backward, assigning
441 virtual operands to their defining statements. */
442 gimple *laststore = NULL;
443 for (gimple_stmt_iterator i = gsi_last (stmts);
444 !gsi_end_p (i); gsi_prev (&i))
446 gimple *new_stmt = gsi_stmt (i);
447 if ((gimple_assign_single_p (new_stmt)
448 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
449 || (is_gimple_call (new_stmt)
450 && (gimple_call_flags (new_stmt)
451 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
453 tree vdef;
454 if (!laststore)
455 vdef = gimple_vdef (stmt);
456 else
457 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
458 gimple_set_vdef (new_stmt, vdef);
459 if (vdef && TREE_CODE (vdef) == SSA_NAME)
460 SSA_NAME_DEF_STMT (vdef) = new_stmt;
461 laststore = new_stmt;
465 /* Second iterate over the statements forward, assigning virtual
466 operands to their uses. */
467 tree reaching_vuse = gimple_vuse (stmt);
468 for (gimple_stmt_iterator i = gsi_start (stmts);
469 !gsi_end_p (i); gsi_next (&i))
471 gimple *new_stmt = gsi_stmt (i);
472 /* If the new statement possibly has a VUSE, update it with exact SSA
473 name we know will reach this one. */
474 if (gimple_has_mem_ops (new_stmt))
475 gimple_set_vuse (new_stmt, reaching_vuse);
476 gimple_set_modified (new_stmt, true);
477 if (gimple_vdef (new_stmt))
478 reaching_vuse = gimple_vdef (new_stmt);
481 /* If the new sequence does not do a store release the virtual
482 definition of the original statement. */
483 if (reaching_vuse
484 && reaching_vuse == gimple_vuse (stmt))
486 tree vdef = gimple_vdef (stmt);
487 if (vdef
488 && TREE_CODE (vdef) == SSA_NAME)
490 unlink_stmt_vdef (stmt);
491 release_ssa_name (vdef);
495 /* Finally replace the original statement with the sequence. */
496 gsi_replace_with_seq (si_p, stmts, false);
499 /* Convert EXPR into a GIMPLE value suitable for substitution on the
500 RHS of an assignment. Insert the necessary statements before
501 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
502 is replaced. If the call is expected to produces a result, then it
503 is replaced by an assignment of the new RHS to the result variable.
504 If the result is to be ignored, then the call is replaced by a
505 GIMPLE_NOP. A proper VDEF chain is retained by making the first
506 VUSE and the last VDEF of the whole sequence be the same as the replaced
507 statement and using new SSA names for stores in between. */
509 void
510 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
512 tree lhs;
513 gimple *stmt, *new_stmt;
514 gimple_stmt_iterator i;
515 gimple_seq stmts = NULL;
517 stmt = gsi_stmt (*si_p);
519 gcc_assert (is_gimple_call (stmt));
521 push_gimplify_context (gimple_in_ssa_p (cfun));
523 lhs = gimple_call_lhs (stmt);
524 if (lhs == NULL_TREE)
526 gimplify_and_add (expr, &stmts);
527 /* We can end up with folding a memcpy of an empty class assignment
528 which gets optimized away by C++ gimplification. */
529 if (gimple_seq_empty_p (stmts))
531 pop_gimplify_context (NULL);
532 if (gimple_in_ssa_p (cfun))
534 unlink_stmt_vdef (stmt);
535 release_defs (stmt);
537 gsi_replace (si_p, gimple_build_nop (), false);
538 return;
541 else
543 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
544 new_stmt = gimple_build_assign (lhs, tmp);
545 i = gsi_last (stmts);
546 gsi_insert_after_without_update (&i, new_stmt,
547 GSI_CONTINUE_LINKING);
550 pop_gimplify_context (NULL);
552 gsi_replace_with_seq_vops (si_p, stmts);
556 /* Replace the call at *GSI with the gimple value VAL. */
558 static void
559 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
561 gimple *stmt = gsi_stmt (*gsi);
562 tree lhs = gimple_call_lhs (stmt);
563 gimple *repl;
564 if (lhs)
566 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
567 val = fold_convert (TREE_TYPE (lhs), val);
568 repl = gimple_build_assign (lhs, val);
570 else
571 repl = gimple_build_nop ();
572 tree vdef = gimple_vdef (stmt);
573 if (vdef && TREE_CODE (vdef) == SSA_NAME)
575 unlink_stmt_vdef (stmt);
576 release_ssa_name (vdef);
578 gsi_replace (gsi, repl, false);
581 /* Replace the call at *GSI with the new call REPL and fold that
582 again. */
584 static void
585 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
587 gimple *stmt = gsi_stmt (*gsi);
588 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
589 gimple_set_location (repl, gimple_location (stmt));
590 if (gimple_vdef (stmt)
591 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
593 gimple_set_vdef (repl, gimple_vdef (stmt));
594 gimple_set_vuse (repl, gimple_vuse (stmt));
595 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
597 gsi_replace (gsi, repl, false);
598 fold_stmt (gsi);
601 /* Return true if VAR is a VAR_DECL or a component thereof. */
603 static bool
604 var_decl_component_p (tree var)
606 tree inner = var;
607 while (handled_component_p (inner))
608 inner = TREE_OPERAND (inner, 0);
609 return SSA_VAR_P (inner);
612 /* Fold function call to builtin mem{{,p}cpy,move}. Return
613 false if no simplification can be made.
614 If ENDP is 0, return DEST (like memcpy).
615 If ENDP is 1, return DEST+LEN (like mempcpy).
616 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
617 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
618 (memmove). */
620 static bool
621 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
622 tree dest, tree src, int endp)
624 gimple *stmt = gsi_stmt (*gsi);
625 tree lhs = gimple_call_lhs (stmt);
626 tree len = gimple_call_arg (stmt, 2);
627 tree destvar, srcvar;
628 location_t loc = gimple_location (stmt);
630 /* If the LEN parameter is zero, return DEST. */
631 if (integer_zerop (len))
633 gimple *repl;
634 if (gimple_call_lhs (stmt))
635 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
636 else
637 repl = gimple_build_nop ();
638 tree vdef = gimple_vdef (stmt);
639 if (vdef && TREE_CODE (vdef) == SSA_NAME)
641 unlink_stmt_vdef (stmt);
642 release_ssa_name (vdef);
644 gsi_replace (gsi, repl, false);
645 return true;
648 /* If SRC and DEST are the same (and not volatile), return
649 DEST{,+LEN,+LEN-1}. */
650 if (operand_equal_p (src, dest, 0))
652 unlink_stmt_vdef (stmt);
653 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
654 release_ssa_name (gimple_vdef (stmt));
655 if (!lhs)
657 gsi_replace (gsi, gimple_build_nop (), false);
658 return true;
660 goto done;
662 else
664 tree srctype, desttype;
665 unsigned int src_align, dest_align;
666 tree off0;
668 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
669 pointers as wide integer) and also may result in huge function
670 size because of inlined bounds copy. Thus don't inline for
671 functions we want to instrument. */
672 if (flag_check_pointer_bounds
673 && chkp_instrumentable_p (cfun->decl)
674 /* Even if data may contain pointers we can inline if copy
675 less than a pointer size. */
676 && (!tree_fits_uhwi_p (len)
677 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
678 return false;
680 /* Build accesses at offset zero with a ref-all character type. */
681 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
682 ptr_mode, true), 0);
684 /* If we can perform the copy efficiently with first doing all loads
685 and then all stores inline it that way. Currently efficiently
686 means that we can load all the memory into a single integer
687 register which is what MOVE_MAX gives us. */
688 src_align = get_pointer_alignment (src);
689 dest_align = get_pointer_alignment (dest);
690 if (tree_fits_uhwi_p (len)
691 && compare_tree_int (len, MOVE_MAX) <= 0
692 /* ??? Don't transform copies from strings with known length this
693 confuses the tree-ssa-strlen.c. This doesn't handle
694 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
695 reason. */
696 && !c_strlen (src, 2))
698 unsigned ilen = tree_to_uhwi (len);
699 if (exact_log2 (ilen) != -1)
701 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
702 if (type
703 && TYPE_MODE (type) != BLKmode
704 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
705 == ilen * 8)
706 /* If the destination pointer is not aligned we must be able
707 to emit an unaligned store. */
708 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
709 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
710 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
711 != CODE_FOR_nothing)))
713 tree srctype = type;
714 tree desttype = type;
715 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
716 srctype = build_aligned_type (type, src_align);
717 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
718 tree tem = fold_const_aggregate_ref (srcmem);
719 if (tem)
720 srcmem = tem;
721 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
722 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
723 src_align)
724 && (optab_handler (movmisalign_optab,
725 TYPE_MODE (type))
726 == CODE_FOR_nothing))
727 srcmem = NULL_TREE;
728 if (srcmem)
730 gimple *new_stmt;
731 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
733 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
734 if (gimple_in_ssa_p (cfun))
735 srcmem = make_ssa_name (TREE_TYPE (srcmem),
736 new_stmt);
737 else
738 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
739 gimple_assign_set_lhs (new_stmt, srcmem);
740 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
741 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
743 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
744 desttype = build_aligned_type (type, dest_align);
745 new_stmt
746 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
747 dest, off0),
748 srcmem);
749 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
750 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
751 if (gimple_vdef (new_stmt)
752 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
753 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
754 if (!lhs)
756 gsi_replace (gsi, new_stmt, false);
757 return true;
759 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
760 goto done;
766 if (endp == 3)
768 /* Both DEST and SRC must be pointer types.
769 ??? This is what old code did. Is the testing for pointer types
770 really mandatory?
772 If either SRC is readonly or length is 1, we can use memcpy. */
773 if (!dest_align || !src_align)
774 return false;
775 if (readonly_data_expr (src)
776 || (tree_fits_uhwi_p (len)
777 && (MIN (src_align, dest_align) / BITS_PER_UNIT
778 >= tree_to_uhwi (len))))
780 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
781 if (!fn)
782 return false;
783 gimple_call_set_fndecl (stmt, fn);
784 gimple_call_set_arg (stmt, 0, dest);
785 gimple_call_set_arg (stmt, 1, src);
786 fold_stmt (gsi);
787 return true;
790 /* If *src and *dest can't overlap, optimize into memcpy as well. */
791 if (TREE_CODE (src) == ADDR_EXPR
792 && TREE_CODE (dest) == ADDR_EXPR)
794 tree src_base, dest_base, fn;
795 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
796 HOST_WIDE_INT size = -1;
797 HOST_WIDE_INT maxsize = -1;
798 bool reverse;
800 srcvar = TREE_OPERAND (src, 0);
801 src_base = get_ref_base_and_extent (srcvar, &src_offset,
802 &size, &maxsize, &reverse);
803 destvar = TREE_OPERAND (dest, 0);
804 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
805 &size, &maxsize, &reverse);
806 if (tree_fits_uhwi_p (len))
807 maxsize = tree_to_uhwi (len);
808 else
809 maxsize = -1;
810 src_offset /= BITS_PER_UNIT;
811 dest_offset /= BITS_PER_UNIT;
812 if (SSA_VAR_P (src_base)
813 && SSA_VAR_P (dest_base))
815 if (operand_equal_p (src_base, dest_base, 0)
816 && ranges_overlap_p (src_offset, maxsize,
817 dest_offset, maxsize))
818 return false;
820 else if (TREE_CODE (src_base) == MEM_REF
821 && TREE_CODE (dest_base) == MEM_REF)
823 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
824 TREE_OPERAND (dest_base, 0), 0))
825 return false;
826 offset_int off = mem_ref_offset (src_base) + src_offset;
827 if (!wi::fits_shwi_p (off))
828 return false;
829 src_offset = off.to_shwi ();
831 off = mem_ref_offset (dest_base) + dest_offset;
832 if (!wi::fits_shwi_p (off))
833 return false;
834 dest_offset = off.to_shwi ();
835 if (ranges_overlap_p (src_offset, maxsize,
836 dest_offset, maxsize))
837 return false;
839 else
840 return false;
842 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
843 if (!fn)
844 return false;
845 gimple_call_set_fndecl (stmt, fn);
846 gimple_call_set_arg (stmt, 0, dest);
847 gimple_call_set_arg (stmt, 1, src);
848 fold_stmt (gsi);
849 return true;
852 /* If the destination and source do not alias optimize into
853 memcpy as well. */
854 if ((is_gimple_min_invariant (dest)
855 || TREE_CODE (dest) == SSA_NAME)
856 && (is_gimple_min_invariant (src)
857 || TREE_CODE (src) == SSA_NAME))
859 ao_ref destr, srcr;
860 ao_ref_init_from_ptr_and_size (&destr, dest, len);
861 ao_ref_init_from_ptr_and_size (&srcr, src, len);
862 if (!refs_may_alias_p_1 (&destr, &srcr, false))
864 tree fn;
865 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
866 if (!fn)
867 return false;
868 gimple_call_set_fndecl (stmt, fn);
869 gimple_call_set_arg (stmt, 0, dest);
870 gimple_call_set_arg (stmt, 1, src);
871 fold_stmt (gsi);
872 return true;
876 return false;
879 if (!tree_fits_shwi_p (len))
880 return false;
881 /* FIXME:
882 This logic lose for arguments like (type *)malloc (sizeof (type)),
883 since we strip the casts of up to VOID return value from malloc.
884 Perhaps we ought to inherit type from non-VOID argument here? */
885 STRIP_NOPS (src);
886 STRIP_NOPS (dest);
887 if (!POINTER_TYPE_P (TREE_TYPE (src))
888 || !POINTER_TYPE_P (TREE_TYPE (dest)))
889 return false;
890 /* In the following try to find a type that is most natural to be
891 used for the memcpy source and destination and that allows
892 the most optimization when memcpy is turned into a plain assignment
893 using that type. In theory we could always use a char[len] type
894 but that only gains us that the destination and source possibly
895 no longer will have their address taken. */
896 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
897 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
899 tree tem = TREE_OPERAND (src, 0);
900 STRIP_NOPS (tem);
901 if (tem != TREE_OPERAND (src, 0))
902 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
904 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
906 tree tem = TREE_OPERAND (dest, 0);
907 STRIP_NOPS (tem);
908 if (tem != TREE_OPERAND (dest, 0))
909 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
911 srctype = TREE_TYPE (TREE_TYPE (src));
912 if (TREE_CODE (srctype) == ARRAY_TYPE
913 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
915 srctype = TREE_TYPE (srctype);
916 STRIP_NOPS (src);
917 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
919 desttype = TREE_TYPE (TREE_TYPE (dest));
920 if (TREE_CODE (desttype) == ARRAY_TYPE
921 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
923 desttype = TREE_TYPE (desttype);
924 STRIP_NOPS (dest);
925 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
927 if (TREE_ADDRESSABLE (srctype)
928 || TREE_ADDRESSABLE (desttype))
929 return false;
931 /* Make sure we are not copying using a floating-point mode or
932 a type whose size possibly does not match its precision. */
933 if (FLOAT_MODE_P (TYPE_MODE (desttype))
934 || TREE_CODE (desttype) == BOOLEAN_TYPE
935 || TREE_CODE (desttype) == ENUMERAL_TYPE)
936 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
937 if (FLOAT_MODE_P (TYPE_MODE (srctype))
938 || TREE_CODE (srctype) == BOOLEAN_TYPE
939 || TREE_CODE (srctype) == ENUMERAL_TYPE)
940 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
941 if (!srctype)
942 srctype = desttype;
943 if (!desttype)
944 desttype = srctype;
945 if (!srctype)
946 return false;
948 src_align = get_pointer_alignment (src);
949 dest_align = get_pointer_alignment (dest);
950 if (dest_align < TYPE_ALIGN (desttype)
951 || src_align < TYPE_ALIGN (srctype))
952 return false;
954 destvar = dest;
955 STRIP_NOPS (destvar);
956 if (TREE_CODE (destvar) == ADDR_EXPR
957 && var_decl_component_p (TREE_OPERAND (destvar, 0))
958 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
959 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
960 else
961 destvar = NULL_TREE;
963 srcvar = src;
964 STRIP_NOPS (srcvar);
965 if (TREE_CODE (srcvar) == ADDR_EXPR
966 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
967 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
969 if (!destvar
970 || src_align >= TYPE_ALIGN (desttype))
971 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
972 srcvar, off0);
973 else if (!STRICT_ALIGNMENT)
975 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
976 src_align);
977 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
979 else
980 srcvar = NULL_TREE;
982 else
983 srcvar = NULL_TREE;
985 if (srcvar == NULL_TREE && destvar == NULL_TREE)
986 return false;
988 if (srcvar == NULL_TREE)
990 STRIP_NOPS (src);
991 if (src_align >= TYPE_ALIGN (desttype))
992 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
993 else
995 if (STRICT_ALIGNMENT)
996 return false;
997 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
998 src_align);
999 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1002 else if (destvar == NULL_TREE)
1004 STRIP_NOPS (dest);
1005 if (dest_align >= TYPE_ALIGN (srctype))
1006 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1007 else
1009 if (STRICT_ALIGNMENT)
1010 return false;
1011 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1012 dest_align);
1013 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1017 gimple *new_stmt;
1018 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1020 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1021 if (gimple_in_ssa_p (cfun))
1022 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1023 else
1024 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1025 gimple_assign_set_lhs (new_stmt, srcvar);
1026 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1027 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1029 new_stmt = gimple_build_assign (destvar, srcvar);
1030 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1031 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1032 if (gimple_vdef (new_stmt)
1033 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1034 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1035 if (!lhs)
1037 gsi_replace (gsi, new_stmt, false);
1038 return true;
1040 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 done:
1044 gimple_seq stmts = NULL;
1045 if (endp == 0 || endp == 3)
1046 len = NULL_TREE;
1047 else if (endp == 2)
1048 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1049 ssize_int (1));
1050 if (endp == 2 || endp == 1)
1052 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1053 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1054 TREE_TYPE (dest), dest, len);
1057 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1058 gimple *repl = gimple_build_assign (lhs, dest);
1059 gsi_replace (gsi, repl, false);
1060 return true;
1063 /* Fold function call to builtin memset or bzero at *GSI setting the
1064 memory of size LEN to VAL. Return whether a simplification was made. */
1066 static bool
1067 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1069 gimple *stmt = gsi_stmt (*gsi);
1070 tree etype;
1071 unsigned HOST_WIDE_INT length, cval;
1073 /* If the LEN parameter is zero, return DEST. */
1074 if (integer_zerop (len))
1076 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1077 return true;
1080 if (! tree_fits_uhwi_p (len))
1081 return false;
1083 if (TREE_CODE (c) != INTEGER_CST)
1084 return false;
1086 tree dest = gimple_call_arg (stmt, 0);
1087 tree var = dest;
1088 if (TREE_CODE (var) != ADDR_EXPR)
1089 return false;
1091 var = TREE_OPERAND (var, 0);
1092 if (TREE_THIS_VOLATILE (var))
1093 return false;
1095 etype = TREE_TYPE (var);
1096 if (TREE_CODE (etype) == ARRAY_TYPE)
1097 etype = TREE_TYPE (etype);
1099 if (!INTEGRAL_TYPE_P (etype)
1100 && !POINTER_TYPE_P (etype))
1101 return NULL_TREE;
1103 if (! var_decl_component_p (var))
1104 return NULL_TREE;
1106 length = tree_to_uhwi (len);
1107 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1108 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1109 return NULL_TREE;
1111 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1112 return NULL_TREE;
1114 if (integer_zerop (c))
1115 cval = 0;
1116 else
1118 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1119 return NULL_TREE;
1121 cval = TREE_INT_CST_LOW (c);
1122 cval &= 0xff;
1123 cval |= cval << 8;
1124 cval |= cval << 16;
1125 cval |= (cval << 31) << 1;
1128 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1129 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1130 gimple_set_vuse (store, gimple_vuse (stmt));
1131 tree vdef = gimple_vdef (stmt);
1132 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1134 gimple_set_vdef (store, gimple_vdef (stmt));
1135 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1137 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1138 if (gimple_call_lhs (stmt))
1140 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1141 gsi_replace (gsi, asgn, false);
1143 else
1145 gimple_stmt_iterator gsi2 = *gsi;
1146 gsi_prev (gsi);
1147 gsi_remove (&gsi2, true);
1150 return true;
1154 /* Return the string length, maximum string length or maximum value of
1155 ARG in LENGTH.
1156 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1157 is not NULL and, for TYPE == 0, its value is not equal to the length
1158 we determine or if we are unable to determine the length or value,
1159 return false. VISITED is a bitmap of visited variables.
1160 TYPE is 0 if string length should be returned, 1 for maximum string
1161 length and 2 for maximum value ARG can have. */
1163 static bool
1164 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1166 tree var, val;
1167 gimple *def_stmt;
1169 if (TREE_CODE (arg) != SSA_NAME)
1171 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1172 if (TREE_CODE (arg) == ADDR_EXPR
1173 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1174 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1176 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1177 if (TREE_CODE (aop0) == INDIRECT_REF
1178 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1179 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1180 length, visited, type);
1183 if (type == 2)
1185 val = arg;
1186 if (TREE_CODE (val) != INTEGER_CST
1187 || tree_int_cst_sgn (val) < 0)
1188 return false;
1190 else
1191 val = c_strlen (arg, 1);
1192 if (!val)
1193 return false;
1195 if (*length)
1197 if (type > 0)
1199 if (TREE_CODE (*length) != INTEGER_CST
1200 || TREE_CODE (val) != INTEGER_CST)
1201 return false;
1203 if (tree_int_cst_lt (*length, val))
1204 *length = val;
1205 return true;
1207 else if (simple_cst_equal (val, *length) != 1)
1208 return false;
1211 *length = val;
1212 return true;
1215 /* If ARG is registered for SSA update we cannot look at its defining
1216 statement. */
1217 if (name_registered_for_update_p (arg))
1218 return false;
1220 /* If we were already here, break the infinite cycle. */
1221 if (!*visited)
1222 *visited = BITMAP_ALLOC (NULL);
1223 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1224 return true;
1226 var = arg;
1227 def_stmt = SSA_NAME_DEF_STMT (var);
1229 switch (gimple_code (def_stmt))
1231 case GIMPLE_ASSIGN:
1232 /* The RHS of the statement defining VAR must either have a
1233 constant length or come from another SSA_NAME with a constant
1234 length. */
1235 if (gimple_assign_single_p (def_stmt)
1236 || gimple_assign_unary_nop_p (def_stmt))
1238 tree rhs = gimple_assign_rhs1 (def_stmt);
1239 return get_maxval_strlen (rhs, length, visited, type);
1241 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1243 tree op2 = gimple_assign_rhs2 (def_stmt);
1244 tree op3 = gimple_assign_rhs3 (def_stmt);
1245 return get_maxval_strlen (op2, length, visited, type)
1246 && get_maxval_strlen (op3, length, visited, type);
1248 return false;
1250 case GIMPLE_PHI:
1252 /* All the arguments of the PHI node must have the same constant
1253 length. */
1254 unsigned i;
1256 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1258 tree arg = gimple_phi_arg (def_stmt, i)->def;
1260 /* If this PHI has itself as an argument, we cannot
1261 determine the string length of this argument. However,
1262 if we can find a constant string length for the other
1263 PHI args then we can still be sure that this is a
1264 constant string length. So be optimistic and just
1265 continue with the next argument. */
1266 if (arg == gimple_phi_result (def_stmt))
1267 continue;
1269 if (!get_maxval_strlen (arg, length, visited, type))
1270 return false;
1273 return true;
1275 default:
1276 return false;
1280 tree
1281 get_maxval_strlen (tree arg, int type)
1283 bitmap visited = NULL;
1284 tree len = NULL_TREE;
1285 if (!get_maxval_strlen (arg, &len, &visited, type))
1286 len = NULL_TREE;
1287 if (visited)
1288 BITMAP_FREE (visited);
1290 return len;
1294 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1295 If LEN is not NULL, it represents the length of the string to be
1296 copied. Return NULL_TREE if no simplification can be made. */
1298 static bool
1299 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1300 tree dest, tree src)
1302 location_t loc = gimple_location (gsi_stmt (*gsi));
1303 tree fn;
1305 /* If SRC and DEST are the same (and not volatile), return DEST. */
1306 if (operand_equal_p (src, dest, 0))
1308 replace_call_with_value (gsi, dest);
1309 return true;
1312 if (optimize_function_for_size_p (cfun))
1313 return false;
1315 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1316 if (!fn)
1317 return false;
1319 tree len = get_maxval_strlen (src, 0);
1320 if (!len)
1321 return false;
1323 len = fold_convert_loc (loc, size_type_node, len);
1324 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1325 len = force_gimple_operand_gsi (gsi, len, true,
1326 NULL_TREE, true, GSI_SAME_STMT);
1327 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1328 replace_call_with_call_and_fold (gsi, repl);
1329 return true;
1332 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1333 If SLEN is not NULL, it represents the length of the source string.
1334 Return NULL_TREE if no simplification can be made. */
1336 static bool
1337 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1338 tree dest, tree src, tree len)
1340 location_t loc = gimple_location (gsi_stmt (*gsi));
1341 tree fn;
1343 /* If the LEN parameter is zero, return DEST. */
1344 if (integer_zerop (len))
1346 replace_call_with_value (gsi, dest);
1347 return true;
1350 /* We can't compare slen with len as constants below if len is not a
1351 constant. */
1352 if (TREE_CODE (len) != INTEGER_CST)
1353 return false;
1355 /* Now, we must be passed a constant src ptr parameter. */
1356 tree slen = get_maxval_strlen (src, 0);
1357 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1358 return false;
1360 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1362 /* We do not support simplification of this case, though we do
1363 support it when expanding trees into RTL. */
1364 /* FIXME: generate a call to __builtin_memset. */
1365 if (tree_int_cst_lt (slen, len))
1366 return false;
1368 /* OK transform into builtin memcpy. */
1369 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1370 if (!fn)
1371 return false;
1373 len = fold_convert_loc (loc, size_type_node, len);
1374 len = force_gimple_operand_gsi (gsi, len, true,
1375 NULL_TREE, true, GSI_SAME_STMT);
1376 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1377 replace_call_with_call_and_fold (gsi, repl);
1378 return true;
1381 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1382 to the call.
1384 Return NULL_TREE if no simplification was possible, otherwise return the
1385 simplified form of the call as a tree.
1387 The simplified form may be a constant or other expression which
1388 computes the same value, but in a more efficient manner (including
1389 calls to other builtin functions).
1391 The call may contain arguments which need to be evaluated, but
1392 which are not useful to determine the result of the call. In
1393 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1394 COMPOUND_EXPR will be an argument which must be evaluated.
1395 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1396 COMPOUND_EXPR in the chain will contain the tree for the simplified
1397 form of the builtin function call. */
1399 static bool
1400 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1402 gimple *stmt = gsi_stmt (*gsi);
1403 location_t loc = gimple_location (stmt);
1405 const char *p = c_getstr (src);
1407 /* If the string length is zero, return the dst parameter. */
1408 if (p && *p == '\0')
1410 replace_call_with_value (gsi, dst);
1411 return true;
1414 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1415 return false;
1417 /* See if we can store by pieces into (dst + strlen(dst)). */
1418 tree newdst;
1419 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1420 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1422 if (!strlen_fn || !memcpy_fn)
1423 return false;
1425 /* If the length of the source string isn't computable don't
1426 split strcat into strlen and memcpy. */
1427 tree len = get_maxval_strlen (src, 0);
1428 if (! len)
1429 return false;
1431 /* Create strlen (dst). */
1432 gimple_seq stmts = NULL, stmts2;
1433 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1434 gimple_set_location (repl, loc);
1435 if (gimple_in_ssa_p (cfun))
1436 newdst = make_ssa_name (size_type_node);
1437 else
1438 newdst = create_tmp_reg (size_type_node);
1439 gimple_call_set_lhs (repl, newdst);
1440 gimple_seq_add_stmt_without_update (&stmts, repl);
1442 /* Create (dst p+ strlen (dst)). */
1443 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1444 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1445 gimple_seq_add_seq_without_update (&stmts, stmts2);
1447 len = fold_convert_loc (loc, size_type_node, len);
1448 len = size_binop_loc (loc, PLUS_EXPR, len,
1449 build_int_cst (size_type_node, 1));
1450 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1451 gimple_seq_add_seq_without_update (&stmts, stmts2);
1453 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1454 gimple_seq_add_stmt_without_update (&stmts, repl);
1455 if (gimple_call_lhs (stmt))
1457 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1458 gimple_seq_add_stmt_without_update (&stmts, repl);
1459 gsi_replace_with_seq_vops (gsi, stmts);
1460 /* gsi now points at the assignment to the lhs, get a
1461 stmt iterator to the memcpy call.
1462 ??? We can't use gsi_for_stmt as that doesn't work when the
1463 CFG isn't built yet. */
1464 gimple_stmt_iterator gsi2 = *gsi;
1465 gsi_prev (&gsi2);
1466 fold_stmt (&gsi2);
1468 else
1470 gsi_replace_with_seq_vops (gsi, stmts);
1471 fold_stmt (gsi);
1473 return true;
1476 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1477 are the arguments to the call. */
1479 static bool
1480 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1482 gimple *stmt = gsi_stmt (*gsi);
1483 tree dest = gimple_call_arg (stmt, 0);
1484 tree src = gimple_call_arg (stmt, 1);
1485 tree size = gimple_call_arg (stmt, 2);
1486 tree fn;
1487 const char *p;
1490 p = c_getstr (src);
1491 /* If the SRC parameter is "", return DEST. */
1492 if (p && *p == '\0')
1494 replace_call_with_value (gsi, dest);
1495 return true;
1498 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1499 return false;
1501 /* If __builtin_strcat_chk is used, assume strcat is available. */
1502 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1503 if (!fn)
1504 return false;
1506 gimple *repl = gimple_build_call (fn, 2, dest, src);
1507 replace_call_with_call_and_fold (gsi, repl);
1508 return true;
1511 /* Simplify a call to the strncat builtin. */
1513 static bool
1514 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1516 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1517 tree dst = gimple_call_arg (stmt, 0);
1518 tree src = gimple_call_arg (stmt, 1);
1519 tree len = gimple_call_arg (stmt, 2);
1521 const char *p = c_getstr (src);
1523 /* If the requested length is zero, or the src parameter string
1524 length is zero, return the dst parameter. */
1525 if (integer_zerop (len) || (p && *p == '\0'))
1527 replace_call_with_value (gsi, dst);
1528 return true;
1531 /* If the requested len is greater than or equal to the string
1532 length, call strcat. */
1533 if (TREE_CODE (len) == INTEGER_CST && p
1534 && compare_tree_int (len, strlen (p)) >= 0)
1536 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1538 /* If the replacement _DECL isn't initialized, don't do the
1539 transformation. */
1540 if (!fn)
1541 return false;
1543 gcall *repl = gimple_build_call (fn, 2, dst, src);
1544 replace_call_with_call_and_fold (gsi, repl);
1545 return true;
1548 return false;
1551 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1552 LEN, and SIZE. */
1554 static bool
1555 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1557 gimple *stmt = gsi_stmt (*gsi);
1558 tree dest = gimple_call_arg (stmt, 0);
1559 tree src = gimple_call_arg (stmt, 1);
1560 tree len = gimple_call_arg (stmt, 2);
1561 tree size = gimple_call_arg (stmt, 3);
1562 tree fn;
1563 const char *p;
1565 p = c_getstr (src);
1566 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1567 if ((p && *p == '\0')
1568 || integer_zerop (len))
1570 replace_call_with_value (gsi, dest);
1571 return true;
1574 if (! tree_fits_uhwi_p (size))
1575 return false;
1577 if (! integer_all_onesp (size))
1579 tree src_len = c_strlen (src, 1);
1580 if (src_len
1581 && tree_fits_uhwi_p (src_len)
1582 && tree_fits_uhwi_p (len)
1583 && ! tree_int_cst_lt (len, src_len))
1585 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1586 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1587 if (!fn)
1588 return false;
1590 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1591 replace_call_with_call_and_fold (gsi, repl);
1592 return true;
1594 return false;
1597 /* If __builtin_strncat_chk is used, assume strncat is available. */
1598 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1599 if (!fn)
1600 return false;
1602 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1603 replace_call_with_call_and_fold (gsi, repl);
1604 return true;
1607 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1608 to the call. IGNORE is true if the value returned
1609 by the builtin will be ignored. UNLOCKED is true is true if this
1610 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1611 the known length of the string. Return NULL_TREE if no simplification
1612 was possible. */
1614 static bool
1615 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1616 tree arg0, tree arg1,
1617 bool unlocked)
1619 gimple *stmt = gsi_stmt (*gsi);
1621 /* If we're using an unlocked function, assume the other unlocked
1622 functions exist explicitly. */
1623 tree const fn_fputc = (unlocked
1624 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1625 : builtin_decl_implicit (BUILT_IN_FPUTC));
1626 tree const fn_fwrite = (unlocked
1627 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1628 : builtin_decl_implicit (BUILT_IN_FWRITE));
1630 /* If the return value is used, don't do the transformation. */
1631 if (gimple_call_lhs (stmt))
1632 return false;
1634 /* Get the length of the string passed to fputs. If the length
1635 can't be determined, punt. */
1636 tree len = get_maxval_strlen (arg0, 0);
1637 if (!len
1638 || TREE_CODE (len) != INTEGER_CST)
1639 return false;
1641 switch (compare_tree_int (len, 1))
1643 case -1: /* length is 0, delete the call entirely . */
1644 replace_call_with_value (gsi, integer_zero_node);
1645 return true;
1647 case 0: /* length is 1, call fputc. */
1649 const char *p = c_getstr (arg0);
1650 if (p != NULL)
1652 if (!fn_fputc)
1653 return false;
1655 gimple *repl = gimple_build_call (fn_fputc, 2,
1656 build_int_cst
1657 (integer_type_node, p[0]), arg1);
1658 replace_call_with_call_and_fold (gsi, repl);
1659 return true;
1662 /* FALLTHROUGH */
1663 case 1: /* length is greater than 1, call fwrite. */
1665 /* If optimizing for size keep fputs. */
1666 if (optimize_function_for_size_p (cfun))
1667 return false;
1668 /* New argument list transforming fputs(string, stream) to
1669 fwrite(string, 1, len, stream). */
1670 if (!fn_fwrite)
1671 return false;
1673 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1674 size_one_node, len, arg1);
1675 replace_call_with_call_and_fold (gsi, repl);
1676 return true;
1678 default:
1679 gcc_unreachable ();
1681 return false;
1684 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1685 DEST, SRC, LEN, and SIZE are the arguments to the call.
1686 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1687 code of the builtin. If MAXLEN is not NULL, it is maximum length
1688 passed as third argument. */
1690 static bool
1691 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1692 tree dest, tree src, tree len, tree size,
1693 enum built_in_function fcode)
1695 gimple *stmt = gsi_stmt (*gsi);
1696 location_t loc = gimple_location (stmt);
1697 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1698 tree fn;
1700 /* If SRC and DEST are the same (and not volatile), return DEST
1701 (resp. DEST+LEN for __mempcpy_chk). */
1702 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1704 if (fcode != BUILT_IN_MEMPCPY_CHK)
1706 replace_call_with_value (gsi, dest);
1707 return true;
1709 else
1711 gimple_seq stmts = NULL;
1712 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1713 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, dest, len);
1714 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1715 replace_call_with_value (gsi, temp);
1716 return true;
1720 if (! tree_fits_uhwi_p (size))
1721 return false;
1723 tree maxlen = get_maxval_strlen (len, 2);
1724 if (! integer_all_onesp (size))
1726 if (! tree_fits_uhwi_p (len))
1728 /* If LEN is not constant, try MAXLEN too.
1729 For MAXLEN only allow optimizing into non-_ocs function
1730 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1731 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1733 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1735 /* (void) __mempcpy_chk () can be optimized into
1736 (void) __memcpy_chk (). */
1737 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1738 if (!fn)
1739 return false;
1741 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1742 replace_call_with_call_and_fold (gsi, repl);
1743 return true;
1745 return false;
1748 else
1749 maxlen = len;
1751 if (tree_int_cst_lt (size, maxlen))
1752 return false;
1755 fn = NULL_TREE;
1756 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1757 mem{cpy,pcpy,move,set} is available. */
1758 switch (fcode)
1760 case BUILT_IN_MEMCPY_CHK:
1761 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1762 break;
1763 case BUILT_IN_MEMPCPY_CHK:
1764 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1765 break;
1766 case BUILT_IN_MEMMOVE_CHK:
1767 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1768 break;
1769 case BUILT_IN_MEMSET_CHK:
1770 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1771 break;
1772 default:
1773 break;
1776 if (!fn)
1777 return false;
1779 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1780 replace_call_with_call_and_fold (gsi, repl);
1781 return true;
1784 /* Fold a call to the __st[rp]cpy_chk builtin.
1785 DEST, SRC, and SIZE are the arguments to the call.
1786 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1787 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1788 strings passed as second argument. */
1790 static bool
1791 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1792 tree dest,
1793 tree src, tree size,
1794 enum built_in_function fcode)
1796 gimple *stmt = gsi_stmt (*gsi);
1797 location_t loc = gimple_location (stmt);
1798 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1799 tree len, fn;
1801 /* If SRC and DEST are the same (and not volatile), return DEST. */
1802 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1804 replace_call_with_value (gsi, dest);
1805 return true;
1808 if (! tree_fits_uhwi_p (size))
1809 return false;
1811 tree maxlen = get_maxval_strlen (src, 1);
1812 if (! integer_all_onesp (size))
1814 len = c_strlen (src, 1);
1815 if (! len || ! tree_fits_uhwi_p (len))
1817 /* If LEN is not constant, try MAXLEN too.
1818 For MAXLEN only allow optimizing into non-_ocs function
1819 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1820 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1822 if (fcode == BUILT_IN_STPCPY_CHK)
1824 if (! ignore)
1825 return false;
1827 /* If return value of __stpcpy_chk is ignored,
1828 optimize into __strcpy_chk. */
1829 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1830 if (!fn)
1831 return false;
1833 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1834 replace_call_with_call_and_fold (gsi, repl);
1835 return true;
1838 if (! len || TREE_SIDE_EFFECTS (len))
1839 return false;
1841 /* If c_strlen returned something, but not a constant,
1842 transform __strcpy_chk into __memcpy_chk. */
1843 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1844 if (!fn)
1845 return false;
1847 gimple_seq stmts = NULL;
1848 len = gimple_convert (&stmts, loc, size_type_node, len);
1849 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1850 build_int_cst (size_type_node, 1));
1851 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1852 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1853 replace_call_with_call_and_fold (gsi, repl);
1854 return true;
1857 else
1858 maxlen = len;
1860 if (! tree_int_cst_lt (maxlen, size))
1861 return false;
1864 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1865 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1866 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1867 if (!fn)
1868 return false;
1870 gimple *repl = gimple_build_call (fn, 2, dest, src);
1871 replace_call_with_call_and_fold (gsi, repl);
1872 return true;
1875 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1876 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1877 length passed as third argument. IGNORE is true if return value can be
1878 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1880 static bool
1881 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1882 tree dest, tree src,
1883 tree len, tree size,
1884 enum built_in_function fcode)
1886 gimple *stmt = gsi_stmt (*gsi);
1887 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1888 tree fn;
1890 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1892 /* If return value of __stpncpy_chk is ignored,
1893 optimize into __strncpy_chk. */
1894 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1895 if (fn)
1897 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1898 replace_call_with_call_and_fold (gsi, repl);
1899 return true;
1903 if (! tree_fits_uhwi_p (size))
1904 return false;
1906 tree maxlen = get_maxval_strlen (len, 2);
1907 if (! integer_all_onesp (size))
1909 if (! tree_fits_uhwi_p (len))
1911 /* If LEN is not constant, try MAXLEN too.
1912 For MAXLEN only allow optimizing into non-_ocs function
1913 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1914 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1915 return false;
1917 else
1918 maxlen = len;
1920 if (tree_int_cst_lt (size, maxlen))
1921 return false;
1924 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1925 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1926 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1927 if (!fn)
1928 return false;
1930 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1931 replace_call_with_call_and_fold (gsi, repl);
1932 return true;
1935 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1936 Return NULL_TREE if no simplification can be made. */
1938 static bool
1939 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1941 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1942 location_t loc = gimple_location (stmt);
1943 tree dest = gimple_call_arg (stmt, 0);
1944 tree src = gimple_call_arg (stmt, 1);
1945 tree fn, len, lenp1;
1947 /* If the result is unused, replace stpcpy with strcpy. */
1948 if (gimple_call_lhs (stmt) == NULL_TREE)
1950 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1951 if (!fn)
1952 return false;
1953 gimple_call_set_fndecl (stmt, fn);
1954 fold_stmt (gsi);
1955 return true;
1958 len = c_strlen (src, 1);
1959 if (!len
1960 || TREE_CODE (len) != INTEGER_CST)
1961 return false;
1963 if (optimize_function_for_size_p (cfun)
1964 /* If length is zero it's small enough. */
1965 && !integer_zerop (len))
1966 return false;
1968 /* If the source has a known length replace stpcpy with memcpy. */
1969 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1970 if (!fn)
1971 return false;
1973 gimple_seq stmts = NULL;
1974 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1975 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1976 tem, build_int_cst (size_type_node, 1));
1977 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1978 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1979 gimple_set_vuse (repl, gimple_vuse (stmt));
1980 gimple_set_vdef (repl, gimple_vdef (stmt));
1981 if (gimple_vdef (repl)
1982 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1983 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1984 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1985 /* Replace the result with dest + len. */
1986 stmts = NULL;
1987 tem = gimple_convert (&stmts, loc, sizetype, len);
1988 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1989 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1990 POINTER_PLUS_EXPR, dest, tem);
1991 gsi_replace (gsi, ret, false);
1992 /* Finally fold the memcpy call. */
1993 gimple_stmt_iterator gsi2 = *gsi;
1994 gsi_prev (&gsi2);
1995 fold_stmt (&gsi2);
1996 return true;
1999 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2000 NULL_TREE if a normal call should be emitted rather than expanding
2001 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2002 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2003 passed as second argument. */
2005 static bool
2006 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2007 enum built_in_function fcode)
2009 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2010 tree dest, size, len, fn, fmt, flag;
2011 const char *fmt_str;
2013 /* Verify the required arguments in the original call. */
2014 if (gimple_call_num_args (stmt) < 5)
2015 return false;
2017 dest = gimple_call_arg (stmt, 0);
2018 len = gimple_call_arg (stmt, 1);
2019 flag = gimple_call_arg (stmt, 2);
2020 size = gimple_call_arg (stmt, 3);
2021 fmt = gimple_call_arg (stmt, 4);
2023 if (! tree_fits_uhwi_p (size))
2024 return false;
2026 if (! integer_all_onesp (size))
2028 tree maxlen = get_maxval_strlen (len, 2);
2029 if (! tree_fits_uhwi_p (len))
2031 /* If LEN is not constant, try MAXLEN too.
2032 For MAXLEN only allow optimizing into non-_ocs function
2033 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2034 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2035 return false;
2037 else
2038 maxlen = len;
2040 if (tree_int_cst_lt (size, maxlen))
2041 return false;
2044 if (!init_target_chars ())
2045 return false;
2047 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2048 or if format doesn't contain % chars or is "%s". */
2049 if (! integer_zerop (flag))
2051 fmt_str = c_getstr (fmt);
2052 if (fmt_str == NULL)
2053 return false;
2054 if (strchr (fmt_str, target_percent) != NULL
2055 && strcmp (fmt_str, target_percent_s))
2056 return false;
2059 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2060 available. */
2061 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2062 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2063 if (!fn)
2064 return false;
2066 /* Replace the called function and the first 5 argument by 3 retaining
2067 trailing varargs. */
2068 gimple_call_set_fndecl (stmt, fn);
2069 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2070 gimple_call_set_arg (stmt, 0, dest);
2071 gimple_call_set_arg (stmt, 1, len);
2072 gimple_call_set_arg (stmt, 2, fmt);
2073 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2074 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2075 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2076 fold_stmt (gsi);
2077 return true;
2080 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2081 Return NULL_TREE if a normal call should be emitted rather than
2082 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2083 or BUILT_IN_VSPRINTF_CHK. */
2085 static bool
2086 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2087 enum built_in_function fcode)
2089 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2090 tree dest, size, len, fn, fmt, flag;
2091 const char *fmt_str;
2092 unsigned nargs = gimple_call_num_args (stmt);
2094 /* Verify the required arguments in the original call. */
2095 if (nargs < 4)
2096 return false;
2097 dest = gimple_call_arg (stmt, 0);
2098 flag = gimple_call_arg (stmt, 1);
2099 size = gimple_call_arg (stmt, 2);
2100 fmt = gimple_call_arg (stmt, 3);
2102 if (! tree_fits_uhwi_p (size))
2103 return false;
2105 len = NULL_TREE;
2107 if (!init_target_chars ())
2108 return false;
2110 /* Check whether the format is a literal string constant. */
2111 fmt_str = c_getstr (fmt);
2112 if (fmt_str != NULL)
2114 /* If the format doesn't contain % args or %%, we know the size. */
2115 if (strchr (fmt_str, target_percent) == 0)
2117 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2118 len = build_int_cstu (size_type_node, strlen (fmt_str));
2120 /* If the format is "%s" and first ... argument is a string literal,
2121 we know the size too. */
2122 else if (fcode == BUILT_IN_SPRINTF_CHK
2123 && strcmp (fmt_str, target_percent_s) == 0)
2125 tree arg;
2127 if (nargs == 5)
2129 arg = gimple_call_arg (stmt, 4);
2130 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2132 len = c_strlen (arg, 1);
2133 if (! len || ! tree_fits_uhwi_p (len))
2134 len = NULL_TREE;
2140 if (! integer_all_onesp (size))
2142 if (! len || ! tree_int_cst_lt (len, size))
2143 return false;
2146 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2147 or if format doesn't contain % chars or is "%s". */
2148 if (! integer_zerop (flag))
2150 if (fmt_str == NULL)
2151 return false;
2152 if (strchr (fmt_str, target_percent) != NULL
2153 && strcmp (fmt_str, target_percent_s))
2154 return false;
2157 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2158 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2159 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2160 if (!fn)
2161 return false;
2163 /* Replace the called function and the first 4 argument by 2 retaining
2164 trailing varargs. */
2165 gimple_call_set_fndecl (stmt, fn);
2166 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2167 gimple_call_set_arg (stmt, 0, dest);
2168 gimple_call_set_arg (stmt, 1, fmt);
2169 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2170 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2171 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2172 fold_stmt (gsi);
2173 return true;
2176 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2177 ORIG may be null if this is a 2-argument call. We don't attempt to
2178 simplify calls with more than 3 arguments.
2180 Return NULL_TREE if no simplification was possible, otherwise return the
2181 simplified form of the call as a tree. If IGNORED is true, it means that
2182 the caller does not use the returned value of the function. */
2184 static bool
2185 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2187 gimple *stmt = gsi_stmt (*gsi);
2188 tree dest = gimple_call_arg (stmt, 0);
2189 tree fmt = gimple_call_arg (stmt, 1);
2190 tree orig = NULL_TREE;
2191 const char *fmt_str = NULL;
2193 /* Verify the required arguments in the original call. We deal with two
2194 types of sprintf() calls: 'sprintf (str, fmt)' and
2195 'sprintf (dest, "%s", orig)'. */
2196 if (gimple_call_num_args (stmt) > 3)
2197 return false;
2199 if (gimple_call_num_args (stmt) == 3)
2200 orig = gimple_call_arg (stmt, 2);
2202 /* Check whether the format is a literal string constant. */
2203 fmt_str = c_getstr (fmt);
2204 if (fmt_str == NULL)
2205 return false;
2207 if (!init_target_chars ())
2208 return false;
2210 /* If the format doesn't contain % args or %%, use strcpy. */
2211 if (strchr (fmt_str, target_percent) == NULL)
2213 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2215 if (!fn)
2216 return false;
2218 /* Don't optimize sprintf (buf, "abc", ptr++). */
2219 if (orig)
2220 return false;
2222 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2223 'format' is known to contain no % formats. */
2224 gimple_seq stmts = NULL;
2225 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2226 gimple_seq_add_stmt_without_update (&stmts, repl);
2227 if (gimple_call_lhs (stmt))
2229 repl = gimple_build_assign (gimple_call_lhs (stmt),
2230 build_int_cst (integer_type_node,
2231 strlen (fmt_str)));
2232 gimple_seq_add_stmt_without_update (&stmts, repl);
2233 gsi_replace_with_seq_vops (gsi, stmts);
2234 /* gsi now points at the assignment to the lhs, get a
2235 stmt iterator to the memcpy call.
2236 ??? We can't use gsi_for_stmt as that doesn't work when the
2237 CFG isn't built yet. */
2238 gimple_stmt_iterator gsi2 = *gsi;
2239 gsi_prev (&gsi2);
2240 fold_stmt (&gsi2);
2242 else
2244 gsi_replace_with_seq_vops (gsi, stmts);
2245 fold_stmt (gsi);
2247 return true;
2250 /* If the format is "%s", use strcpy if the result isn't used. */
2251 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2253 tree fn;
2254 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2256 if (!fn)
2257 return false;
2259 /* Don't crash on sprintf (str1, "%s"). */
2260 if (!orig)
2261 return false;
2263 tree orig_len = NULL_TREE;
2264 if (gimple_call_lhs (stmt))
2266 orig_len = get_maxval_strlen (orig, 0);
2267 if (!orig_len)
2268 return false;
2271 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2272 gimple_seq stmts = NULL;
2273 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2274 gimple_seq_add_stmt_without_update (&stmts, repl);
2275 if (gimple_call_lhs (stmt))
2277 if (!useless_type_conversion_p (integer_type_node,
2278 TREE_TYPE (orig_len)))
2279 orig_len = fold_convert (integer_type_node, orig_len);
2280 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2281 gimple_seq_add_stmt_without_update (&stmts, repl);
2282 gsi_replace_with_seq_vops (gsi, stmts);
2283 /* gsi now points at the assignment to the lhs, get a
2284 stmt iterator to the memcpy call.
2285 ??? We can't use gsi_for_stmt as that doesn't work when the
2286 CFG isn't built yet. */
2287 gimple_stmt_iterator gsi2 = *gsi;
2288 gsi_prev (&gsi2);
2289 fold_stmt (&gsi2);
2291 else
2293 gsi_replace_with_seq_vops (gsi, stmts);
2294 fold_stmt (gsi);
2296 return true;
2298 return false;
2301 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2302 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2303 attempt to simplify calls with more than 4 arguments.
2305 Return NULL_TREE if no simplification was possible, otherwise return the
2306 simplified form of the call as a tree. If IGNORED is true, it means that
2307 the caller does not use the returned value of the function. */
2309 static bool
2310 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2312 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2313 tree dest = gimple_call_arg (stmt, 0);
2314 tree destsize = gimple_call_arg (stmt, 1);
2315 tree fmt = gimple_call_arg (stmt, 2);
2316 tree orig = NULL_TREE;
2317 const char *fmt_str = NULL;
2319 if (gimple_call_num_args (stmt) > 4)
2320 return false;
2322 if (gimple_call_num_args (stmt) == 4)
2323 orig = gimple_call_arg (stmt, 3);
2325 if (!tree_fits_uhwi_p (destsize))
2326 return false;
2327 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2329 /* Check whether the format is a literal string constant. */
2330 fmt_str = c_getstr (fmt);
2331 if (fmt_str == NULL)
2332 return false;
2334 if (!init_target_chars ())
2335 return false;
2337 /* If the format doesn't contain % args or %%, use strcpy. */
2338 if (strchr (fmt_str, target_percent) == NULL)
2340 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2341 if (!fn)
2342 return false;
2344 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2345 if (orig)
2346 return false;
2348 /* We could expand this as
2349 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2350 or to
2351 memcpy (str, fmt_with_nul_at_cstm1, cst);
2352 but in the former case that might increase code size
2353 and in the latter case grow .rodata section too much.
2354 So punt for now. */
2355 size_t len = strlen (fmt_str);
2356 if (len >= destlen)
2357 return false;
2359 gimple_seq stmts = NULL;
2360 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2361 gimple_seq_add_stmt_without_update (&stmts, repl);
2362 if (gimple_call_lhs (stmt))
2364 repl = gimple_build_assign (gimple_call_lhs (stmt),
2365 build_int_cst (integer_type_node, len));
2366 gimple_seq_add_stmt_without_update (&stmts, repl);
2367 gsi_replace_with_seq_vops (gsi, stmts);
2368 /* gsi now points at the assignment to the lhs, get a
2369 stmt iterator to the memcpy call.
2370 ??? We can't use gsi_for_stmt as that doesn't work when the
2371 CFG isn't built yet. */
2372 gimple_stmt_iterator gsi2 = *gsi;
2373 gsi_prev (&gsi2);
2374 fold_stmt (&gsi2);
2376 else
2378 gsi_replace_with_seq_vops (gsi, stmts);
2379 fold_stmt (gsi);
2381 return true;
2384 /* If the format is "%s", use strcpy if the result isn't used. */
2385 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2387 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2388 if (!fn)
2389 return false;
2391 /* Don't crash on snprintf (str1, cst, "%s"). */
2392 if (!orig)
2393 return false;
2395 tree orig_len = get_maxval_strlen (orig, 0);
2396 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2397 return false;
2399 /* We could expand this as
2400 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2401 or to
2402 memcpy (str1, str2_with_nul_at_cstm1, cst);
2403 but in the former case that might increase code size
2404 and in the latter case grow .rodata section too much.
2405 So punt for now. */
2406 if (compare_tree_int (orig_len, destlen) >= 0)
2407 return false;
2409 /* Convert snprintf (str1, cst, "%s", str2) into
2410 strcpy (str1, str2) if strlen (str2) < cst. */
2411 gimple_seq stmts = NULL;
2412 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2413 gimple_seq_add_stmt_without_update (&stmts, repl);
2414 if (gimple_call_lhs (stmt))
2416 if (!useless_type_conversion_p (integer_type_node,
2417 TREE_TYPE (orig_len)))
2418 orig_len = fold_convert (integer_type_node, orig_len);
2419 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2420 gimple_seq_add_stmt_without_update (&stmts, repl);
2421 gsi_replace_with_seq_vops (gsi, stmts);
2422 /* gsi now points at the assignment to the lhs, get a
2423 stmt iterator to the memcpy call.
2424 ??? We can't use gsi_for_stmt as that doesn't work when the
2425 CFG isn't built yet. */
2426 gimple_stmt_iterator gsi2 = *gsi;
2427 gsi_prev (&gsi2);
2428 fold_stmt (&gsi2);
2430 else
2432 gsi_replace_with_seq_vops (gsi, stmts);
2433 fold_stmt (gsi);
2435 return true;
2437 return false;
2440 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2441 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2442 more than 3 arguments, and ARG may be null in the 2-argument case.
2444 Return NULL_TREE if no simplification was possible, otherwise return the
2445 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2446 code of the function to be simplified. */
2448 static bool
2449 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2450 tree fp, tree fmt, tree arg,
2451 enum built_in_function fcode)
2453 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2454 tree fn_fputc, fn_fputs;
2455 const char *fmt_str = NULL;
2457 /* If the return value is used, don't do the transformation. */
2458 if (gimple_call_lhs (stmt) != NULL_TREE)
2459 return false;
2461 /* Check whether the format is a literal string constant. */
2462 fmt_str = c_getstr (fmt);
2463 if (fmt_str == NULL)
2464 return false;
2466 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2468 /* If we're using an unlocked function, assume the other
2469 unlocked functions exist explicitly. */
2470 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2471 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2473 else
2475 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2476 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2479 if (!init_target_chars ())
2480 return false;
2482 /* If the format doesn't contain % args or %%, use strcpy. */
2483 if (strchr (fmt_str, target_percent) == NULL)
2485 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2486 && arg)
2487 return false;
2489 /* If the format specifier was "", fprintf does nothing. */
2490 if (fmt_str[0] == '\0')
2492 replace_call_with_value (gsi, NULL_TREE);
2493 return true;
2496 /* When "string" doesn't contain %, replace all cases of
2497 fprintf (fp, string) with fputs (string, fp). The fputs
2498 builtin will take care of special cases like length == 1. */
2499 if (fn_fputs)
2501 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2502 replace_call_with_call_and_fold (gsi, repl);
2503 return true;
2507 /* The other optimizations can be done only on the non-va_list variants. */
2508 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2509 return false;
2511 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2512 else if (strcmp (fmt_str, target_percent_s) == 0)
2514 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2515 return false;
2516 if (fn_fputs)
2518 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2519 replace_call_with_call_and_fold (gsi, repl);
2520 return true;
2524 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2525 else if (strcmp (fmt_str, target_percent_c) == 0)
2527 if (!arg
2528 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2529 return false;
2530 if (fn_fputc)
2532 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2533 replace_call_with_call_and_fold (gsi, repl);
2534 return true;
2538 return false;
2541 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2542 FMT and ARG are the arguments to the call; we don't fold cases with
2543 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2545 Return NULL_TREE if no simplification was possible, otherwise return the
2546 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2547 code of the function to be simplified. */
2549 static bool
2550 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2551 tree arg, enum built_in_function fcode)
2553 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2554 tree fn_putchar, fn_puts, newarg;
2555 const char *fmt_str = NULL;
2557 /* If the return value is used, don't do the transformation. */
2558 if (gimple_call_lhs (stmt) != NULL_TREE)
2559 return false;
2561 /* Check whether the format is a literal string constant. */
2562 fmt_str = c_getstr (fmt);
2563 if (fmt_str == NULL)
2564 return false;
2566 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2568 /* If we're using an unlocked function, assume the other
2569 unlocked functions exist explicitly. */
2570 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2571 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2573 else
2575 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2576 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2579 if (!init_target_chars ())
2580 return false;
2582 if (strcmp (fmt_str, target_percent_s) == 0
2583 || strchr (fmt_str, target_percent) == NULL)
2585 const char *str;
2587 if (strcmp (fmt_str, target_percent_s) == 0)
2589 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2590 return false;
2592 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2593 return false;
2595 str = c_getstr (arg);
2596 if (str == NULL)
2597 return false;
2599 else
2601 /* The format specifier doesn't contain any '%' characters. */
2602 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2603 && arg)
2604 return false;
2605 str = fmt_str;
2608 /* If the string was "", printf does nothing. */
2609 if (str[0] == '\0')
2611 replace_call_with_value (gsi, NULL_TREE);
2612 return true;
2615 /* If the string has length of 1, call putchar. */
2616 if (str[1] == '\0')
2618 /* Given printf("c"), (where c is any one character,)
2619 convert "c"[0] to an int and pass that to the replacement
2620 function. */
2621 newarg = build_int_cst (integer_type_node, str[0]);
2622 if (fn_putchar)
2624 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2625 replace_call_with_call_and_fold (gsi, repl);
2626 return true;
2629 else
2631 /* If the string was "string\n", call puts("string"). */
2632 size_t len = strlen (str);
2633 if ((unsigned char)str[len - 1] == target_newline
2634 && (size_t) (int) len == len
2635 && (int) len > 0)
2637 char *newstr;
2638 tree offset_node, string_cst;
2640 /* Create a NUL-terminated string that's one char shorter
2641 than the original, stripping off the trailing '\n'. */
2642 newarg = build_string_literal (len, str);
2643 string_cst = string_constant (newarg, &offset_node);
2644 gcc_checking_assert (string_cst
2645 && (TREE_STRING_LENGTH (string_cst)
2646 == (int) len)
2647 && integer_zerop (offset_node)
2648 && (unsigned char)
2649 TREE_STRING_POINTER (string_cst)[len - 1]
2650 == target_newline);
2651 /* build_string_literal creates a new STRING_CST,
2652 modify it in place to avoid double copying. */
2653 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2654 newstr[len - 1] = '\0';
2655 if (fn_puts)
2657 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2658 replace_call_with_call_and_fold (gsi, repl);
2659 return true;
2662 else
2663 /* We'd like to arrange to call fputs(string,stdout) here,
2664 but we need stdout and don't have a way to get it yet. */
2665 return false;
2669 /* The other optimizations can be done only on the non-va_list variants. */
2670 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2671 return false;
2673 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2674 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2676 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2677 return false;
2678 if (fn_puts)
2680 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2681 replace_call_with_call_and_fold (gsi, repl);
2682 return true;
2686 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2687 else if (strcmp (fmt_str, target_percent_c) == 0)
2689 if (!arg || ! useless_type_conversion_p (integer_type_node,
2690 TREE_TYPE (arg)))
2691 return false;
2692 if (fn_putchar)
2694 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2695 replace_call_with_call_and_fold (gsi, repl);
2696 return true;
2700 return false;
2705 /* Fold a call to __builtin_strlen with known length LEN. */
2707 static bool
2708 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2710 gimple *stmt = gsi_stmt (*gsi);
2711 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2712 if (!len)
2713 return false;
2714 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2715 replace_call_with_value (gsi, len);
2716 return true;
2719 /* Fold a call to __builtin_acc_on_device. */
2721 static bool
2722 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2724 /* Defer folding until we know which compiler we're in. */
2725 if (symtab->state != EXPANSION)
2726 return false;
2728 unsigned val_host = GOMP_DEVICE_HOST;
2729 unsigned val_dev = GOMP_DEVICE_NONE;
2731 #ifdef ACCEL_COMPILER
2732 val_host = GOMP_DEVICE_NOT_HOST;
2733 val_dev = ACCEL_COMPILER_acc_device;
2734 #endif
2736 location_t loc = gimple_location (gsi_stmt (*gsi));
2738 tree host_eq = make_ssa_name (boolean_type_node);
2739 gimple *host_ass = gimple_build_assign
2740 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2741 gimple_set_location (host_ass, loc);
2742 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2744 tree dev_eq = make_ssa_name (boolean_type_node);
2745 gimple *dev_ass = gimple_build_assign
2746 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2747 gimple_set_location (dev_ass, loc);
2748 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2750 tree result = make_ssa_name (boolean_type_node);
2751 gimple *result_ass = gimple_build_assign
2752 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2753 gimple_set_location (result_ass, loc);
2754 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2756 replace_call_with_value (gsi, result);
2758 return true;
2761 /* Fold the non-target builtin at *GSI and return whether any simplification
2762 was made. */
2764 static bool
2765 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2767 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2768 tree callee = gimple_call_fndecl (stmt);
2770 /* Give up for always_inline inline builtins until they are
2771 inlined. */
2772 if (avoid_folding_inline_builtin (callee))
2773 return false;
2775 unsigned n = gimple_call_num_args (stmt);
2776 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2777 switch (fcode)
2779 case BUILT_IN_BZERO:
2780 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2781 gimple_call_arg (stmt, 1));
2782 case BUILT_IN_MEMSET:
2783 return gimple_fold_builtin_memset (gsi,
2784 gimple_call_arg (stmt, 1),
2785 gimple_call_arg (stmt, 2));
2786 case BUILT_IN_BCOPY:
2787 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2788 gimple_call_arg (stmt, 0), 3);
2789 case BUILT_IN_MEMCPY:
2790 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2791 gimple_call_arg (stmt, 1), 0);
2792 case BUILT_IN_MEMPCPY:
2793 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2794 gimple_call_arg (stmt, 1), 1);
2795 case BUILT_IN_MEMMOVE:
2796 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2797 gimple_call_arg (stmt, 1), 3);
2798 case BUILT_IN_SPRINTF_CHK:
2799 case BUILT_IN_VSPRINTF_CHK:
2800 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2801 case BUILT_IN_STRCAT_CHK:
2802 return gimple_fold_builtin_strcat_chk (gsi);
2803 case BUILT_IN_STRNCAT_CHK:
2804 return gimple_fold_builtin_strncat_chk (gsi);
2805 case BUILT_IN_STRLEN:
2806 return gimple_fold_builtin_strlen (gsi);
2807 case BUILT_IN_STRCPY:
2808 return gimple_fold_builtin_strcpy (gsi,
2809 gimple_call_arg (stmt, 0),
2810 gimple_call_arg (stmt, 1));
2811 case BUILT_IN_STRNCPY:
2812 return gimple_fold_builtin_strncpy (gsi,
2813 gimple_call_arg (stmt, 0),
2814 gimple_call_arg (stmt, 1),
2815 gimple_call_arg (stmt, 2));
2816 case BUILT_IN_STRCAT:
2817 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2818 gimple_call_arg (stmt, 1));
2819 case BUILT_IN_STRNCAT:
2820 return gimple_fold_builtin_strncat (gsi);
2821 case BUILT_IN_FPUTS:
2822 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2823 gimple_call_arg (stmt, 1), false);
2824 case BUILT_IN_FPUTS_UNLOCKED:
2825 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2826 gimple_call_arg (stmt, 1), true);
2827 case BUILT_IN_MEMCPY_CHK:
2828 case BUILT_IN_MEMPCPY_CHK:
2829 case BUILT_IN_MEMMOVE_CHK:
2830 case BUILT_IN_MEMSET_CHK:
2831 return gimple_fold_builtin_memory_chk (gsi,
2832 gimple_call_arg (stmt, 0),
2833 gimple_call_arg (stmt, 1),
2834 gimple_call_arg (stmt, 2),
2835 gimple_call_arg (stmt, 3),
2836 fcode);
2837 case BUILT_IN_STPCPY:
2838 return gimple_fold_builtin_stpcpy (gsi);
2839 case BUILT_IN_STRCPY_CHK:
2840 case BUILT_IN_STPCPY_CHK:
2841 return gimple_fold_builtin_stxcpy_chk (gsi,
2842 gimple_call_arg (stmt, 0),
2843 gimple_call_arg (stmt, 1),
2844 gimple_call_arg (stmt, 2),
2845 fcode);
2846 case BUILT_IN_STRNCPY_CHK:
2847 case BUILT_IN_STPNCPY_CHK:
2848 return gimple_fold_builtin_stxncpy_chk (gsi,
2849 gimple_call_arg (stmt, 0),
2850 gimple_call_arg (stmt, 1),
2851 gimple_call_arg (stmt, 2),
2852 gimple_call_arg (stmt, 3),
2853 fcode);
2854 case BUILT_IN_SNPRINTF_CHK:
2855 case BUILT_IN_VSNPRINTF_CHK:
2856 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2857 case BUILT_IN_SNPRINTF:
2858 return gimple_fold_builtin_snprintf (gsi);
2859 case BUILT_IN_SPRINTF:
2860 return gimple_fold_builtin_sprintf (gsi);
2861 case BUILT_IN_FPRINTF:
2862 case BUILT_IN_FPRINTF_UNLOCKED:
2863 case BUILT_IN_VFPRINTF:
2864 if (n == 2 || n == 3)
2865 return gimple_fold_builtin_fprintf (gsi,
2866 gimple_call_arg (stmt, 0),
2867 gimple_call_arg (stmt, 1),
2868 n == 3
2869 ? gimple_call_arg (stmt, 2)
2870 : NULL_TREE,
2871 fcode);
2872 break;
2873 case BUILT_IN_FPRINTF_CHK:
2874 case BUILT_IN_VFPRINTF_CHK:
2875 if (n == 3 || n == 4)
2876 return gimple_fold_builtin_fprintf (gsi,
2877 gimple_call_arg (stmt, 0),
2878 gimple_call_arg (stmt, 2),
2879 n == 4
2880 ? gimple_call_arg (stmt, 3)
2881 : NULL_TREE,
2882 fcode);
2883 break;
2884 case BUILT_IN_PRINTF:
2885 case BUILT_IN_PRINTF_UNLOCKED:
2886 case BUILT_IN_VPRINTF:
2887 if (n == 1 || n == 2)
2888 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2889 n == 2
2890 ? gimple_call_arg (stmt, 1)
2891 : NULL_TREE, fcode);
2892 break;
2893 case BUILT_IN_PRINTF_CHK:
2894 case BUILT_IN_VPRINTF_CHK:
2895 if (n == 2 || n == 3)
2896 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2897 n == 3
2898 ? gimple_call_arg (stmt, 2)
2899 : NULL_TREE, fcode);
2900 break;
2901 case BUILT_IN_ACC_ON_DEVICE:
2902 return gimple_fold_builtin_acc_on_device (gsi,
2903 gimple_call_arg (stmt, 0));
2904 default:;
2907 /* Try the generic builtin folder. */
2908 bool ignore = (gimple_call_lhs (stmt) == NULL);
2909 tree result = fold_call_stmt (stmt, ignore);
2910 if (result)
2912 if (ignore)
2913 STRIP_NOPS (result);
2914 else
2915 result = fold_convert (gimple_call_return_type (stmt), result);
2916 if (!update_call_from_tree (gsi, result))
2917 gimplify_and_update_call_from_tree (gsi, result);
2918 return true;
2921 return false;
2924 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2925 function calls to constants, where possible. */
2927 static tree
2928 fold_internal_goacc_dim (const gimple *call)
2930 int axis = get_oacc_ifn_dim_arg (call);
2931 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2932 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2933 tree result = NULL_TREE;
2935 /* If the size is 1, or we only want the size and it is not dynamic,
2936 we know the answer. */
2937 if (size == 1 || (!is_pos && size))
2939 tree type = TREE_TYPE (gimple_call_lhs (call));
2940 result = build_int_cst (type, size - is_pos);
2943 return result;
2946 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2947 doesn't fit into TYPE. The test for overflow should be regardless of
2948 -fwrapv, and even for unsigned types. */
2950 bool
2951 arith_overflowed_p (enum tree_code code, const_tree type,
2952 const_tree arg0, const_tree arg1)
2954 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2955 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2956 widest2_int_cst;
2957 widest2_int warg0 = widest2_int_cst (arg0);
2958 widest2_int warg1 = widest2_int_cst (arg1);
2959 widest2_int wres;
2960 switch (code)
2962 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2963 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2964 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2965 default: gcc_unreachable ();
2967 signop sign = TYPE_SIGN (type);
2968 if (sign == UNSIGNED && wi::neg_p (wres))
2969 return true;
2970 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2973 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2974 The statement may be replaced by another statement, e.g., if the call
2975 simplifies to a constant value. Return true if any changes were made.
2976 It is assumed that the operands have been previously folded. */
2978 static bool
2979 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2981 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2982 tree callee;
2983 bool changed = false;
2984 unsigned i;
2986 /* Fold *& in call arguments. */
2987 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2988 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2990 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2991 if (tmp)
2993 gimple_call_set_arg (stmt, i, tmp);
2994 changed = true;
2998 /* Check for virtual calls that became direct calls. */
2999 callee = gimple_call_fn (stmt);
3000 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3002 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3004 if (dump_file && virtual_method_call_p (callee)
3005 && !possible_polymorphic_call_target_p
3006 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3007 (OBJ_TYPE_REF_EXPR (callee)))))
3009 fprintf (dump_file,
3010 "Type inheritance inconsistent devirtualization of ");
3011 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3012 fprintf (dump_file, " to ");
3013 print_generic_expr (dump_file, callee, TDF_SLIM);
3014 fprintf (dump_file, "\n");
3017 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3018 changed = true;
3020 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3022 bool final;
3023 vec <cgraph_node *>targets
3024 = possible_polymorphic_call_targets (callee, stmt, &final);
3025 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3027 tree lhs = gimple_call_lhs (stmt);
3028 if (dump_enabled_p ())
3030 location_t loc = gimple_location_safe (stmt);
3031 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3032 "folding virtual function call to %s\n",
3033 targets.length () == 1
3034 ? targets[0]->name ()
3035 : "__builtin_unreachable");
3037 if (targets.length () == 1)
3039 gimple_call_set_fndecl (stmt, targets[0]->decl);
3040 changed = true;
3041 /* If the call becomes noreturn, remove the lhs. */
3042 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3044 if (TREE_CODE (lhs) == SSA_NAME)
3046 tree var = create_tmp_var (TREE_TYPE (lhs));
3047 tree def = get_or_create_ssa_default_def (cfun, var);
3048 gimple *new_stmt = gimple_build_assign (lhs, def);
3049 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3051 gimple_call_set_lhs (stmt, NULL_TREE);
3053 maybe_remove_unused_call_args (cfun, stmt);
3055 else
3057 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3058 gimple *new_stmt = gimple_build_call (fndecl, 0);
3059 gimple_set_location (new_stmt, gimple_location (stmt));
3060 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3062 tree var = create_tmp_var (TREE_TYPE (lhs));
3063 tree def = get_or_create_ssa_default_def (cfun, var);
3065 /* To satisfy condition for
3066 cgraph_update_edges_for_call_stmt_node,
3067 we need to preserve GIMPLE_CALL statement
3068 at position of GSI iterator. */
3069 update_call_from_tree (gsi, def);
3070 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3072 else
3074 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3075 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3076 gsi_replace (gsi, new_stmt, false);
3078 return true;
3084 /* Check for indirect calls that became direct calls, and then
3085 no longer require a static chain. */
3086 if (gimple_call_chain (stmt))
3088 tree fn = gimple_call_fndecl (stmt);
3089 if (fn && !DECL_STATIC_CHAIN (fn))
3091 gimple_call_set_chain (stmt, NULL);
3092 changed = true;
3094 else
3096 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3097 if (tmp)
3099 gimple_call_set_chain (stmt, tmp);
3100 changed = true;
3105 if (inplace)
3106 return changed;
3108 /* Check for builtins that CCP can handle using information not
3109 available in the generic fold routines. */
3110 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3112 if (gimple_fold_builtin (gsi))
3113 changed = true;
3115 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3117 changed |= targetm.gimple_fold_builtin (gsi);
3119 else if (gimple_call_internal_p (stmt))
3121 enum tree_code subcode = ERROR_MARK;
3122 tree result = NULL_TREE;
3123 bool cplx_result = false;
3124 tree overflow = NULL_TREE;
3125 switch (gimple_call_internal_fn (stmt))
3127 case IFN_BUILTIN_EXPECT:
3128 result = fold_builtin_expect (gimple_location (stmt),
3129 gimple_call_arg (stmt, 0),
3130 gimple_call_arg (stmt, 1),
3131 gimple_call_arg (stmt, 2));
3132 break;
3133 case IFN_UBSAN_OBJECT_SIZE:
3134 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3135 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3136 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3137 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3138 gimple_call_arg (stmt, 2))))
3140 gsi_replace (gsi, gimple_build_nop (), false);
3141 unlink_stmt_vdef (stmt);
3142 release_defs (stmt);
3143 return true;
3145 break;
3146 case IFN_GOACC_DIM_SIZE:
3147 case IFN_GOACC_DIM_POS:
3148 result = fold_internal_goacc_dim (stmt);
3149 break;
3150 case IFN_UBSAN_CHECK_ADD:
3151 subcode = PLUS_EXPR;
3152 break;
3153 case IFN_UBSAN_CHECK_SUB:
3154 subcode = MINUS_EXPR;
3155 break;
3156 case IFN_UBSAN_CHECK_MUL:
3157 subcode = MULT_EXPR;
3158 break;
3159 case IFN_ADD_OVERFLOW:
3160 subcode = PLUS_EXPR;
3161 cplx_result = true;
3162 break;
3163 case IFN_SUB_OVERFLOW:
3164 subcode = MINUS_EXPR;
3165 cplx_result = true;
3166 break;
3167 case IFN_MUL_OVERFLOW:
3168 subcode = MULT_EXPR;
3169 cplx_result = true;
3170 break;
3171 default:
3172 break;
3174 if (subcode != ERROR_MARK)
3176 tree arg0 = gimple_call_arg (stmt, 0);
3177 tree arg1 = gimple_call_arg (stmt, 1);
3178 tree type = TREE_TYPE (arg0);
3179 if (cplx_result)
3181 tree lhs = gimple_call_lhs (stmt);
3182 if (lhs == NULL_TREE)
3183 type = NULL_TREE;
3184 else
3185 type = TREE_TYPE (TREE_TYPE (lhs));
3187 if (type == NULL_TREE)
3189 /* x = y + 0; x = y - 0; x = y * 0; */
3190 else if (integer_zerop (arg1))
3191 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3192 /* x = 0 + y; x = 0 * y; */
3193 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3194 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3195 /* x = y - y; */
3196 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3197 result = integer_zero_node;
3198 /* x = y * 1; x = 1 * y; */
3199 else if (subcode == MULT_EXPR && integer_onep (arg1))
3200 result = arg0;
3201 else if (subcode == MULT_EXPR && integer_onep (arg0))
3202 result = arg1;
3203 else if (TREE_CODE (arg0) == INTEGER_CST
3204 && TREE_CODE (arg1) == INTEGER_CST)
3206 if (cplx_result)
3207 result = int_const_binop (subcode, fold_convert (type, arg0),
3208 fold_convert (type, arg1));
3209 else
3210 result = int_const_binop (subcode, arg0, arg1);
3211 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3213 if (cplx_result)
3214 overflow = build_one_cst (type);
3215 else
3216 result = NULL_TREE;
3219 if (result)
3221 if (result == integer_zero_node)
3222 result = build_zero_cst (type);
3223 else if (cplx_result && TREE_TYPE (result) != type)
3225 if (TREE_CODE (result) == INTEGER_CST)
3227 if (arith_overflowed_p (PLUS_EXPR, type, result,
3228 integer_zero_node))
3229 overflow = build_one_cst (type);
3231 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3232 && TYPE_UNSIGNED (type))
3233 || (TYPE_PRECISION (type)
3234 < (TYPE_PRECISION (TREE_TYPE (result))
3235 + (TYPE_UNSIGNED (TREE_TYPE (result))
3236 && !TYPE_UNSIGNED (type)))))
3237 result = NULL_TREE;
3238 if (result)
3239 result = fold_convert (type, result);
3244 if (result)
3246 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3247 result = drop_tree_overflow (result);
3248 if (cplx_result)
3250 if (overflow == NULL_TREE)
3251 overflow = build_zero_cst (TREE_TYPE (result));
3252 tree ctype = build_complex_type (TREE_TYPE (result));
3253 if (TREE_CODE (result) == INTEGER_CST
3254 && TREE_CODE (overflow) == INTEGER_CST)
3255 result = build_complex (ctype, result, overflow);
3256 else
3257 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3258 ctype, result, overflow);
3260 if (!update_call_from_tree (gsi, result))
3261 gimplify_and_update_call_from_tree (gsi, result);
3262 changed = true;
3266 return changed;
3270 /* Return true whether NAME has a use on STMT. */
3272 static bool
3273 has_use_on_stmt (tree name, gimple *stmt)
3275 imm_use_iterator iter;
3276 use_operand_p use_p;
3277 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3278 if (USE_STMT (use_p) == stmt)
3279 return true;
3280 return false;
3283 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3284 gimple_simplify.
3286 Replaces *GSI with the simplification result in RCODE and OPS
3287 and the associated statements in *SEQ. Does the replacement
3288 according to INPLACE and returns true if the operation succeeded. */
3290 static bool
3291 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3292 code_helper rcode, tree *ops,
3293 gimple_seq *seq, bool inplace)
3295 gimple *stmt = gsi_stmt (*gsi);
3297 /* Play safe and do not allow abnormals to be mentioned in
3298 newly created statements. See also maybe_push_res_to_seq.
3299 As an exception allow such uses if there was a use of the
3300 same SSA name on the old stmt. */
3301 if ((TREE_CODE (ops[0]) == SSA_NAME
3302 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3303 && !has_use_on_stmt (ops[0], stmt))
3304 || (ops[1]
3305 && TREE_CODE (ops[1]) == SSA_NAME
3306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3307 && !has_use_on_stmt (ops[1], stmt))
3308 || (ops[2]
3309 && TREE_CODE (ops[2]) == SSA_NAME
3310 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3311 && !has_use_on_stmt (ops[2], stmt)))
3312 return false;
3314 /* Don't insert new statements when INPLACE is true, even if we could
3315 reuse STMT for the final statement. */
3316 if (inplace && !gimple_seq_empty_p (*seq))
3317 return false;
3319 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3321 gcc_assert (rcode.is_tree_code ());
3322 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3323 /* GIMPLE_CONDs condition may not throw. */
3324 && (!flag_exceptions
3325 || !cfun->can_throw_non_call_exceptions
3326 || !operation_could_trap_p (rcode,
3327 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3328 false, NULL_TREE)))
3329 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3330 else if (rcode == SSA_NAME)
3331 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3332 build_zero_cst (TREE_TYPE (ops[0])));
3333 else if (rcode == INTEGER_CST)
3335 if (integer_zerop (ops[0]))
3336 gimple_cond_make_false (cond_stmt);
3337 else
3338 gimple_cond_make_true (cond_stmt);
3340 else if (!inplace)
3342 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3343 ops, seq);
3344 if (!res)
3345 return false;
3346 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3347 build_zero_cst (TREE_TYPE (res)));
3349 else
3350 return false;
3351 if (dump_file && (dump_flags & TDF_DETAILS))
3353 fprintf (dump_file, "gimple_simplified to ");
3354 if (!gimple_seq_empty_p (*seq))
3355 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3356 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3357 0, TDF_SLIM);
3359 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3360 return true;
3362 else if (is_gimple_assign (stmt)
3363 && rcode.is_tree_code ())
3365 if (!inplace
3366 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3368 maybe_build_generic_op (rcode,
3369 TREE_TYPE (gimple_assign_lhs (stmt)),
3370 &ops[0], ops[1], ops[2]);
3371 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3372 if (dump_file && (dump_flags & TDF_DETAILS))
3374 fprintf (dump_file, "gimple_simplified to ");
3375 if (!gimple_seq_empty_p (*seq))
3376 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3377 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3378 0, TDF_SLIM);
3380 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3381 return true;
3384 else if (rcode.is_fn_code ()
3385 && gimple_call_combined_fn (stmt) == rcode)
3387 unsigned i;
3388 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3390 gcc_assert (ops[i] != NULL_TREE);
3391 gimple_call_set_arg (stmt, i, ops[i]);
3393 if (i < 3)
3394 gcc_assert (ops[i] == NULL_TREE);
3395 if (dump_file && (dump_flags & TDF_DETAILS))
3397 fprintf (dump_file, "gimple_simplified to ");
3398 if (!gimple_seq_empty_p (*seq))
3399 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3400 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3402 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3403 return true;
3405 else if (!inplace)
3407 if (gimple_has_lhs (stmt))
3409 tree lhs = gimple_get_lhs (stmt);
3410 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3411 ops, seq, lhs))
3412 return false;
3413 if (dump_file && (dump_flags & TDF_DETAILS))
3415 fprintf (dump_file, "gimple_simplified to ");
3416 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3418 gsi_replace_with_seq_vops (gsi, *seq);
3419 return true;
3421 else
3422 gcc_unreachable ();
3425 return false;
3428 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3430 static bool
3431 maybe_canonicalize_mem_ref_addr (tree *t)
3433 bool res = false;
3435 if (TREE_CODE (*t) == ADDR_EXPR)
3436 t = &TREE_OPERAND (*t, 0);
3438 while (handled_component_p (*t))
3439 t = &TREE_OPERAND (*t, 0);
3441 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3442 of invariant addresses into a SSA name MEM_REF address. */
3443 if (TREE_CODE (*t) == MEM_REF
3444 || TREE_CODE (*t) == TARGET_MEM_REF)
3446 tree addr = TREE_OPERAND (*t, 0);
3447 if (TREE_CODE (addr) == ADDR_EXPR
3448 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3449 || handled_component_p (TREE_OPERAND (addr, 0))))
3451 tree base;
3452 HOST_WIDE_INT coffset;
3453 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3454 &coffset);
3455 if (!base)
3456 gcc_unreachable ();
3458 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3459 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3460 TREE_OPERAND (*t, 1),
3461 size_int (coffset));
3462 res = true;
3464 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3465 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3468 /* Canonicalize back MEM_REFs to plain reference trees if the object
3469 accessed is a decl that has the same access semantics as the MEM_REF. */
3470 if (TREE_CODE (*t) == MEM_REF
3471 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3472 && integer_zerop (TREE_OPERAND (*t, 1))
3473 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3475 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3476 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3477 if (/* Same volatile qualification. */
3478 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3479 /* Same TBAA behavior with -fstrict-aliasing. */
3480 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3481 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3482 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3483 /* Same alignment. */
3484 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3485 /* We have to look out here to not drop a required conversion
3486 from the rhs to the lhs if *t appears on the lhs or vice-versa
3487 if it appears on the rhs. Thus require strict type
3488 compatibility. */
3489 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3491 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3492 res = true;
3496 /* Canonicalize TARGET_MEM_REF in particular with respect to
3497 the indexes becoming constant. */
3498 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3500 tree tem = maybe_fold_tmr (*t);
3501 if (tem)
3503 *t = tem;
3504 res = true;
3508 return res;
3511 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3512 distinguishes both cases. */
3514 static bool
3515 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3517 bool changed = false;
3518 gimple *stmt = gsi_stmt (*gsi);
3519 unsigned i;
3521 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3522 after propagation.
3523 ??? This shouldn't be done in generic folding but in the
3524 propagation helpers which also know whether an address was
3525 propagated.
3526 Also canonicalize operand order. */
3527 switch (gimple_code (stmt))
3529 case GIMPLE_ASSIGN:
3530 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3532 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3533 if ((REFERENCE_CLASS_P (*rhs)
3534 || TREE_CODE (*rhs) == ADDR_EXPR)
3535 && maybe_canonicalize_mem_ref_addr (rhs))
3536 changed = true;
3537 tree *lhs = gimple_assign_lhs_ptr (stmt);
3538 if (REFERENCE_CLASS_P (*lhs)
3539 && maybe_canonicalize_mem_ref_addr (lhs))
3540 changed = true;
3542 else
3544 /* Canonicalize operand order. */
3545 enum tree_code code = gimple_assign_rhs_code (stmt);
3546 if (TREE_CODE_CLASS (code) == tcc_comparison
3547 || commutative_tree_code (code)
3548 || commutative_ternary_tree_code (code))
3550 tree rhs1 = gimple_assign_rhs1 (stmt);
3551 tree rhs2 = gimple_assign_rhs2 (stmt);
3552 if (tree_swap_operands_p (rhs1, rhs2, false))
3554 gimple_assign_set_rhs1 (stmt, rhs2);
3555 gimple_assign_set_rhs2 (stmt, rhs1);
3556 if (TREE_CODE_CLASS (code) == tcc_comparison)
3557 gimple_assign_set_rhs_code (stmt,
3558 swap_tree_comparison (code));
3559 changed = true;
3563 break;
3564 case GIMPLE_CALL:
3566 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3568 tree *arg = gimple_call_arg_ptr (stmt, i);
3569 if (REFERENCE_CLASS_P (*arg)
3570 && maybe_canonicalize_mem_ref_addr (arg))
3571 changed = true;
3573 tree *lhs = gimple_call_lhs_ptr (stmt);
3574 if (*lhs
3575 && REFERENCE_CLASS_P (*lhs)
3576 && maybe_canonicalize_mem_ref_addr (lhs))
3577 changed = true;
3578 break;
3580 case GIMPLE_ASM:
3582 gasm *asm_stmt = as_a <gasm *> (stmt);
3583 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3585 tree link = gimple_asm_output_op (asm_stmt, i);
3586 tree op = TREE_VALUE (link);
3587 if (REFERENCE_CLASS_P (op)
3588 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3589 changed = true;
3591 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3593 tree link = gimple_asm_input_op (asm_stmt, i);
3594 tree op = TREE_VALUE (link);
3595 if ((REFERENCE_CLASS_P (op)
3596 || TREE_CODE (op) == ADDR_EXPR)
3597 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3598 changed = true;
3601 break;
3602 case GIMPLE_DEBUG:
3603 if (gimple_debug_bind_p (stmt))
3605 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3606 if (*val
3607 && (REFERENCE_CLASS_P (*val)
3608 || TREE_CODE (*val) == ADDR_EXPR)
3609 && maybe_canonicalize_mem_ref_addr (val))
3610 changed = true;
3612 break;
3613 case GIMPLE_COND:
3615 /* Canonicalize operand order. */
3616 tree lhs = gimple_cond_lhs (stmt);
3617 tree rhs = gimple_cond_rhs (stmt);
3618 if (tree_swap_operands_p (lhs, rhs, false))
3620 gcond *gc = as_a <gcond *> (stmt);
3621 gimple_cond_set_lhs (gc, rhs);
3622 gimple_cond_set_rhs (gc, lhs);
3623 gimple_cond_set_code (gc,
3624 swap_tree_comparison (gimple_cond_code (gc)));
3625 changed = true;
3628 default:;
3631 /* Dispatch to pattern-based folding. */
3632 if (!inplace
3633 || is_gimple_assign (stmt)
3634 || gimple_code (stmt) == GIMPLE_COND)
3636 gimple_seq seq = NULL;
3637 code_helper rcode;
3638 tree ops[3] = {};
3639 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3640 valueize, valueize))
3642 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3643 changed = true;
3644 else
3645 gimple_seq_discard (seq);
3649 stmt = gsi_stmt (*gsi);
3651 /* Fold the main computation performed by the statement. */
3652 switch (gimple_code (stmt))
3654 case GIMPLE_ASSIGN:
3656 /* Try to canonicalize for boolean-typed X the comparisons
3657 X == 0, X == 1, X != 0, and X != 1. */
3658 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3659 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3661 tree lhs = gimple_assign_lhs (stmt);
3662 tree op1 = gimple_assign_rhs1 (stmt);
3663 tree op2 = gimple_assign_rhs2 (stmt);
3664 tree type = TREE_TYPE (op1);
3666 /* Check whether the comparison operands are of the same boolean
3667 type as the result type is.
3668 Check that second operand is an integer-constant with value
3669 one or zero. */
3670 if (TREE_CODE (op2) == INTEGER_CST
3671 && (integer_zerop (op2) || integer_onep (op2))
3672 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3674 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3675 bool is_logical_not = false;
3677 /* X == 0 and X != 1 is a logical-not.of X
3678 X == 1 and X != 0 is X */
3679 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3680 || (cmp_code == NE_EXPR && integer_onep (op2)))
3681 is_logical_not = true;
3683 if (is_logical_not == false)
3684 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3685 /* Only for one-bit precision typed X the transformation
3686 !X -> ~X is valied. */
3687 else if (TYPE_PRECISION (type) == 1)
3688 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3689 /* Otherwise we use !X -> X ^ 1. */
3690 else
3691 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3692 build_int_cst (type, 1));
3693 changed = true;
3694 break;
3698 unsigned old_num_ops = gimple_num_ops (stmt);
3699 tree lhs = gimple_assign_lhs (stmt);
3700 tree new_rhs = fold_gimple_assign (gsi);
3701 if (new_rhs
3702 && !useless_type_conversion_p (TREE_TYPE (lhs),
3703 TREE_TYPE (new_rhs)))
3704 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3705 if (new_rhs
3706 && (!inplace
3707 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3709 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3710 changed = true;
3712 break;
3715 case GIMPLE_CALL:
3716 changed |= gimple_fold_call (gsi, inplace);
3717 break;
3719 case GIMPLE_ASM:
3720 /* Fold *& in asm operands. */
3722 gasm *asm_stmt = as_a <gasm *> (stmt);
3723 size_t noutputs;
3724 const char **oconstraints;
3725 const char *constraint;
3726 bool allows_mem, allows_reg;
3728 noutputs = gimple_asm_noutputs (asm_stmt);
3729 oconstraints = XALLOCAVEC (const char *, noutputs);
3731 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3733 tree link = gimple_asm_output_op (asm_stmt, i);
3734 tree op = TREE_VALUE (link);
3735 oconstraints[i]
3736 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3737 if (REFERENCE_CLASS_P (op)
3738 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3740 TREE_VALUE (link) = op;
3741 changed = true;
3744 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3746 tree link = gimple_asm_input_op (asm_stmt, i);
3747 tree op = TREE_VALUE (link);
3748 constraint
3749 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3750 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3751 oconstraints, &allows_mem, &allows_reg);
3752 if (REFERENCE_CLASS_P (op)
3753 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3754 != NULL_TREE)
3756 TREE_VALUE (link) = op;
3757 changed = true;
3761 break;
3763 case GIMPLE_DEBUG:
3764 if (gimple_debug_bind_p (stmt))
3766 tree val = gimple_debug_bind_get_value (stmt);
3767 if (val
3768 && REFERENCE_CLASS_P (val))
3770 tree tem = maybe_fold_reference (val, false);
3771 if (tem)
3773 gimple_debug_bind_set_value (stmt, tem);
3774 changed = true;
3777 else if (val
3778 && TREE_CODE (val) == ADDR_EXPR)
3780 tree ref = TREE_OPERAND (val, 0);
3781 tree tem = maybe_fold_reference (ref, false);
3782 if (tem)
3784 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3785 gimple_debug_bind_set_value (stmt, tem);
3786 changed = true;
3790 break;
3792 default:;
3795 stmt = gsi_stmt (*gsi);
3797 /* Fold *& on the lhs. */
3798 if (gimple_has_lhs (stmt))
3800 tree lhs = gimple_get_lhs (stmt);
3801 if (lhs && REFERENCE_CLASS_P (lhs))
3803 tree new_lhs = maybe_fold_reference (lhs, true);
3804 if (new_lhs)
3806 gimple_set_lhs (stmt, new_lhs);
3807 changed = true;
3812 return changed;
3815 /* Valueziation callback that ends up not following SSA edges. */
3817 tree
3818 no_follow_ssa_edges (tree)
3820 return NULL_TREE;
3823 /* Valueization callback that ends up following single-use SSA edges only. */
3825 tree
3826 follow_single_use_edges (tree val)
3828 if (TREE_CODE (val) == SSA_NAME
3829 && !has_single_use (val))
3830 return NULL_TREE;
3831 return val;
3834 /* Fold the statement pointed to by GSI. In some cases, this function may
3835 replace the whole statement with a new one. Returns true iff folding
3836 makes any changes.
3837 The statement pointed to by GSI should be in valid gimple form but may
3838 be in unfolded state as resulting from for example constant propagation
3839 which can produce *&x = 0. */
3841 bool
3842 fold_stmt (gimple_stmt_iterator *gsi)
3844 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3847 bool
3848 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3850 return fold_stmt_1 (gsi, false, valueize);
3853 /* Perform the minimal folding on statement *GSI. Only operations like
3854 *&x created by constant propagation are handled. The statement cannot
3855 be replaced with a new one. Return true if the statement was
3856 changed, false otherwise.
3857 The statement *GSI should be in valid gimple form but may
3858 be in unfolded state as resulting from for example constant propagation
3859 which can produce *&x = 0. */
3861 bool
3862 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3864 gimple *stmt = gsi_stmt (*gsi);
3865 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3866 gcc_assert (gsi_stmt (*gsi) == stmt);
3867 return changed;
3870 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3871 if EXPR is null or we don't know how.
3872 If non-null, the result always has boolean type. */
3874 static tree
3875 canonicalize_bool (tree expr, bool invert)
3877 if (!expr)
3878 return NULL_TREE;
3879 else if (invert)
3881 if (integer_nonzerop (expr))
3882 return boolean_false_node;
3883 else if (integer_zerop (expr))
3884 return boolean_true_node;
3885 else if (TREE_CODE (expr) == SSA_NAME)
3886 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3887 build_int_cst (TREE_TYPE (expr), 0));
3888 else if (COMPARISON_CLASS_P (expr))
3889 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3890 boolean_type_node,
3891 TREE_OPERAND (expr, 0),
3892 TREE_OPERAND (expr, 1));
3893 else
3894 return NULL_TREE;
3896 else
3898 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3899 return expr;
3900 if (integer_nonzerop (expr))
3901 return boolean_true_node;
3902 else if (integer_zerop (expr))
3903 return boolean_false_node;
3904 else if (TREE_CODE (expr) == SSA_NAME)
3905 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3906 build_int_cst (TREE_TYPE (expr), 0));
3907 else if (COMPARISON_CLASS_P (expr))
3908 return fold_build2 (TREE_CODE (expr),
3909 boolean_type_node,
3910 TREE_OPERAND (expr, 0),
3911 TREE_OPERAND (expr, 1));
3912 else
3913 return NULL_TREE;
3917 /* Check to see if a boolean expression EXPR is logically equivalent to the
3918 comparison (OP1 CODE OP2). Check for various identities involving
3919 SSA_NAMEs. */
3921 static bool
3922 same_bool_comparison_p (const_tree expr, enum tree_code code,
3923 const_tree op1, const_tree op2)
3925 gimple *s;
3927 /* The obvious case. */
3928 if (TREE_CODE (expr) == code
3929 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3930 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3931 return true;
3933 /* Check for comparing (name, name != 0) and the case where expr
3934 is an SSA_NAME with a definition matching the comparison. */
3935 if (TREE_CODE (expr) == SSA_NAME
3936 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3938 if (operand_equal_p (expr, op1, 0))
3939 return ((code == NE_EXPR && integer_zerop (op2))
3940 || (code == EQ_EXPR && integer_nonzerop (op2)));
3941 s = SSA_NAME_DEF_STMT (expr);
3942 if (is_gimple_assign (s)
3943 && gimple_assign_rhs_code (s) == code
3944 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3945 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3946 return true;
3949 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3950 of name is a comparison, recurse. */
3951 if (TREE_CODE (op1) == SSA_NAME
3952 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3954 s = SSA_NAME_DEF_STMT (op1);
3955 if (is_gimple_assign (s)
3956 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3958 enum tree_code c = gimple_assign_rhs_code (s);
3959 if ((c == NE_EXPR && integer_zerop (op2))
3960 || (c == EQ_EXPR && integer_nonzerop (op2)))
3961 return same_bool_comparison_p (expr, c,
3962 gimple_assign_rhs1 (s),
3963 gimple_assign_rhs2 (s));
3964 if ((c == EQ_EXPR && integer_zerop (op2))
3965 || (c == NE_EXPR && integer_nonzerop (op2)))
3966 return same_bool_comparison_p (expr,
3967 invert_tree_comparison (c, false),
3968 gimple_assign_rhs1 (s),
3969 gimple_assign_rhs2 (s));
3972 return false;
3975 /* Check to see if two boolean expressions OP1 and OP2 are logically
3976 equivalent. */
3978 static bool
3979 same_bool_result_p (const_tree op1, const_tree op2)
3981 /* Simple cases first. */
3982 if (operand_equal_p (op1, op2, 0))
3983 return true;
3985 /* Check the cases where at least one of the operands is a comparison.
3986 These are a bit smarter than operand_equal_p in that they apply some
3987 identifies on SSA_NAMEs. */
3988 if (COMPARISON_CLASS_P (op2)
3989 && same_bool_comparison_p (op1, TREE_CODE (op2),
3990 TREE_OPERAND (op2, 0),
3991 TREE_OPERAND (op2, 1)))
3992 return true;
3993 if (COMPARISON_CLASS_P (op1)
3994 && same_bool_comparison_p (op2, TREE_CODE (op1),
3995 TREE_OPERAND (op1, 0),
3996 TREE_OPERAND (op1, 1)))
3997 return true;
3999 /* Default case. */
4000 return false;
4003 /* Forward declarations for some mutually recursive functions. */
4005 static tree
4006 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4007 enum tree_code code2, tree op2a, tree op2b);
4008 static tree
4009 and_var_with_comparison (tree var, bool invert,
4010 enum tree_code code2, tree op2a, tree op2b);
4011 static tree
4012 and_var_with_comparison_1 (gimple *stmt,
4013 enum tree_code code2, tree op2a, tree op2b);
4014 static tree
4015 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4016 enum tree_code code2, tree op2a, tree op2b);
4017 static tree
4018 or_var_with_comparison (tree var, bool invert,
4019 enum tree_code code2, tree op2a, tree op2b);
4020 static tree
4021 or_var_with_comparison_1 (gimple *stmt,
4022 enum tree_code code2, tree op2a, tree op2b);
4024 /* Helper function for and_comparisons_1: try to simplify the AND of the
4025 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4026 If INVERT is true, invert the value of the VAR before doing the AND.
4027 Return NULL_EXPR if we can't simplify this to a single expression. */
4029 static tree
4030 and_var_with_comparison (tree var, bool invert,
4031 enum tree_code code2, tree op2a, tree op2b)
4033 tree t;
4034 gimple *stmt = SSA_NAME_DEF_STMT (var);
4036 /* We can only deal with variables whose definitions are assignments. */
4037 if (!is_gimple_assign (stmt))
4038 return NULL_TREE;
4040 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4041 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4042 Then we only have to consider the simpler non-inverted cases. */
4043 if (invert)
4044 t = or_var_with_comparison_1 (stmt,
4045 invert_tree_comparison (code2, false),
4046 op2a, op2b);
4047 else
4048 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4049 return canonicalize_bool (t, invert);
4052 /* Try to simplify the AND of the ssa variable defined by the assignment
4053 STMT with the comparison specified by (OP2A CODE2 OP2B).
4054 Return NULL_EXPR if we can't simplify this to a single expression. */
4056 static tree
4057 and_var_with_comparison_1 (gimple *stmt,
4058 enum tree_code code2, tree op2a, tree op2b)
4060 tree var = gimple_assign_lhs (stmt);
4061 tree true_test_var = NULL_TREE;
4062 tree false_test_var = NULL_TREE;
4063 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4065 /* Check for identities like (var AND (var == 0)) => false. */
4066 if (TREE_CODE (op2a) == SSA_NAME
4067 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4069 if ((code2 == NE_EXPR && integer_zerop (op2b))
4070 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4072 true_test_var = op2a;
4073 if (var == true_test_var)
4074 return var;
4076 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4077 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4079 false_test_var = op2a;
4080 if (var == false_test_var)
4081 return boolean_false_node;
4085 /* If the definition is a comparison, recurse on it. */
4086 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4088 tree t = and_comparisons_1 (innercode,
4089 gimple_assign_rhs1 (stmt),
4090 gimple_assign_rhs2 (stmt),
4091 code2,
4092 op2a,
4093 op2b);
4094 if (t)
4095 return t;
4098 /* If the definition is an AND or OR expression, we may be able to
4099 simplify by reassociating. */
4100 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4101 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4103 tree inner1 = gimple_assign_rhs1 (stmt);
4104 tree inner2 = gimple_assign_rhs2 (stmt);
4105 gimple *s;
4106 tree t;
4107 tree partial = NULL_TREE;
4108 bool is_and = (innercode == BIT_AND_EXPR);
4110 /* Check for boolean identities that don't require recursive examination
4111 of inner1/inner2:
4112 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4113 inner1 AND (inner1 OR inner2) => inner1
4114 !inner1 AND (inner1 AND inner2) => false
4115 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4116 Likewise for similar cases involving inner2. */
4117 if (inner1 == true_test_var)
4118 return (is_and ? var : inner1);
4119 else if (inner2 == true_test_var)
4120 return (is_and ? var : inner2);
4121 else if (inner1 == false_test_var)
4122 return (is_and
4123 ? boolean_false_node
4124 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4125 else if (inner2 == false_test_var)
4126 return (is_and
4127 ? boolean_false_node
4128 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4130 /* Next, redistribute/reassociate the AND across the inner tests.
4131 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4132 if (TREE_CODE (inner1) == SSA_NAME
4133 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4134 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4135 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4136 gimple_assign_rhs1 (s),
4137 gimple_assign_rhs2 (s),
4138 code2, op2a, op2b)))
4140 /* Handle the AND case, where we are reassociating:
4141 (inner1 AND inner2) AND (op2a code2 op2b)
4142 => (t AND inner2)
4143 If the partial result t is a constant, we win. Otherwise
4144 continue on to try reassociating with the other inner test. */
4145 if (is_and)
4147 if (integer_onep (t))
4148 return inner2;
4149 else if (integer_zerop (t))
4150 return boolean_false_node;
4153 /* Handle the OR case, where we are redistributing:
4154 (inner1 OR inner2) AND (op2a code2 op2b)
4155 => (t OR (inner2 AND (op2a code2 op2b))) */
4156 else if (integer_onep (t))
4157 return boolean_true_node;
4159 /* Save partial result for later. */
4160 partial = t;
4163 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4164 if (TREE_CODE (inner2) == SSA_NAME
4165 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4166 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4167 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4168 gimple_assign_rhs1 (s),
4169 gimple_assign_rhs2 (s),
4170 code2, op2a, op2b)))
4172 /* Handle the AND case, where we are reassociating:
4173 (inner1 AND inner2) AND (op2a code2 op2b)
4174 => (inner1 AND t) */
4175 if (is_and)
4177 if (integer_onep (t))
4178 return inner1;
4179 else if (integer_zerop (t))
4180 return boolean_false_node;
4181 /* If both are the same, we can apply the identity
4182 (x AND x) == x. */
4183 else if (partial && same_bool_result_p (t, partial))
4184 return t;
4187 /* Handle the OR case. where we are redistributing:
4188 (inner1 OR inner2) AND (op2a code2 op2b)
4189 => (t OR (inner1 AND (op2a code2 op2b)))
4190 => (t OR partial) */
4191 else
4193 if (integer_onep (t))
4194 return boolean_true_node;
4195 else if (partial)
4197 /* We already got a simplification for the other
4198 operand to the redistributed OR expression. The
4199 interesting case is when at least one is false.
4200 Or, if both are the same, we can apply the identity
4201 (x OR x) == x. */
4202 if (integer_zerop (partial))
4203 return t;
4204 else if (integer_zerop (t))
4205 return partial;
4206 else if (same_bool_result_p (t, partial))
4207 return t;
4212 return NULL_TREE;
4215 /* Try to simplify the AND of two comparisons defined by
4216 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4217 If this can be done without constructing an intermediate value,
4218 return the resulting tree; otherwise NULL_TREE is returned.
4219 This function is deliberately asymmetric as it recurses on SSA_DEFs
4220 in the first comparison but not the second. */
4222 static tree
4223 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4224 enum tree_code code2, tree op2a, tree op2b)
4226 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4228 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4229 if (operand_equal_p (op1a, op2a, 0)
4230 && operand_equal_p (op1b, op2b, 0))
4232 /* Result will be either NULL_TREE, or a combined comparison. */
4233 tree t = combine_comparisons (UNKNOWN_LOCATION,
4234 TRUTH_ANDIF_EXPR, code1, code2,
4235 truth_type, op1a, op1b);
4236 if (t)
4237 return t;
4240 /* Likewise the swapped case of the above. */
4241 if (operand_equal_p (op1a, op2b, 0)
4242 && operand_equal_p (op1b, op2a, 0))
4244 /* Result will be either NULL_TREE, or a combined comparison. */
4245 tree t = combine_comparisons (UNKNOWN_LOCATION,
4246 TRUTH_ANDIF_EXPR, code1,
4247 swap_tree_comparison (code2),
4248 truth_type, op1a, op1b);
4249 if (t)
4250 return t;
4253 /* If both comparisons are of the same value against constants, we might
4254 be able to merge them. */
4255 if (operand_equal_p (op1a, op2a, 0)
4256 && TREE_CODE (op1b) == INTEGER_CST
4257 && TREE_CODE (op2b) == INTEGER_CST)
4259 int cmp = tree_int_cst_compare (op1b, op2b);
4261 /* If we have (op1a == op1b), we should either be able to
4262 return that or FALSE, depending on whether the constant op1b
4263 also satisfies the other comparison against op2b. */
4264 if (code1 == EQ_EXPR)
4266 bool done = true;
4267 bool val;
4268 switch (code2)
4270 case EQ_EXPR: val = (cmp == 0); break;
4271 case NE_EXPR: val = (cmp != 0); break;
4272 case LT_EXPR: val = (cmp < 0); break;
4273 case GT_EXPR: val = (cmp > 0); break;
4274 case LE_EXPR: val = (cmp <= 0); break;
4275 case GE_EXPR: val = (cmp >= 0); break;
4276 default: done = false;
4278 if (done)
4280 if (val)
4281 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4282 else
4283 return boolean_false_node;
4286 /* Likewise if the second comparison is an == comparison. */
4287 else if (code2 == EQ_EXPR)
4289 bool done = true;
4290 bool val;
4291 switch (code1)
4293 case EQ_EXPR: val = (cmp == 0); break;
4294 case NE_EXPR: val = (cmp != 0); break;
4295 case LT_EXPR: val = (cmp > 0); break;
4296 case GT_EXPR: val = (cmp < 0); break;
4297 case LE_EXPR: val = (cmp >= 0); break;
4298 case GE_EXPR: val = (cmp <= 0); break;
4299 default: done = false;
4301 if (done)
4303 if (val)
4304 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4305 else
4306 return boolean_false_node;
4310 /* Same business with inequality tests. */
4311 else if (code1 == NE_EXPR)
4313 bool val;
4314 switch (code2)
4316 case EQ_EXPR: val = (cmp != 0); break;
4317 case NE_EXPR: val = (cmp == 0); break;
4318 case LT_EXPR: val = (cmp >= 0); break;
4319 case GT_EXPR: val = (cmp <= 0); break;
4320 case LE_EXPR: val = (cmp > 0); break;
4321 case GE_EXPR: val = (cmp < 0); break;
4322 default:
4323 val = false;
4325 if (val)
4326 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4328 else if (code2 == NE_EXPR)
4330 bool val;
4331 switch (code1)
4333 case EQ_EXPR: val = (cmp == 0); break;
4334 case NE_EXPR: val = (cmp != 0); break;
4335 case LT_EXPR: val = (cmp <= 0); break;
4336 case GT_EXPR: val = (cmp >= 0); break;
4337 case LE_EXPR: val = (cmp < 0); break;
4338 case GE_EXPR: val = (cmp > 0); break;
4339 default:
4340 val = false;
4342 if (val)
4343 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4346 /* Chose the more restrictive of two < or <= comparisons. */
4347 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4348 && (code2 == LT_EXPR || code2 == LE_EXPR))
4350 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4351 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4352 else
4353 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4356 /* Likewise chose the more restrictive of two > or >= comparisons. */
4357 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4358 && (code2 == GT_EXPR || code2 == GE_EXPR))
4360 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4361 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4362 else
4363 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4366 /* Check for singleton ranges. */
4367 else if (cmp == 0
4368 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4369 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4370 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4372 /* Check for disjoint ranges. */
4373 else if (cmp <= 0
4374 && (code1 == LT_EXPR || code1 == LE_EXPR)
4375 && (code2 == GT_EXPR || code2 == GE_EXPR))
4376 return boolean_false_node;
4377 else if (cmp >= 0
4378 && (code1 == GT_EXPR || code1 == GE_EXPR)
4379 && (code2 == LT_EXPR || code2 == LE_EXPR))
4380 return boolean_false_node;
4383 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4384 NAME's definition is a truth value. See if there are any simplifications
4385 that can be done against the NAME's definition. */
4386 if (TREE_CODE (op1a) == SSA_NAME
4387 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4388 && (integer_zerop (op1b) || integer_onep (op1b)))
4390 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4391 || (code1 == NE_EXPR && integer_onep (op1b)));
4392 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4393 switch (gimple_code (stmt))
4395 case GIMPLE_ASSIGN:
4396 /* Try to simplify by copy-propagating the definition. */
4397 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4399 case GIMPLE_PHI:
4400 /* If every argument to the PHI produces the same result when
4401 ANDed with the second comparison, we win.
4402 Do not do this unless the type is bool since we need a bool
4403 result here anyway. */
4404 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4406 tree result = NULL_TREE;
4407 unsigned i;
4408 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4410 tree arg = gimple_phi_arg_def (stmt, i);
4412 /* If this PHI has itself as an argument, ignore it.
4413 If all the other args produce the same result,
4414 we're still OK. */
4415 if (arg == gimple_phi_result (stmt))
4416 continue;
4417 else if (TREE_CODE (arg) == INTEGER_CST)
4419 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4421 if (!result)
4422 result = boolean_false_node;
4423 else if (!integer_zerop (result))
4424 return NULL_TREE;
4426 else if (!result)
4427 result = fold_build2 (code2, boolean_type_node,
4428 op2a, op2b);
4429 else if (!same_bool_comparison_p (result,
4430 code2, op2a, op2b))
4431 return NULL_TREE;
4433 else if (TREE_CODE (arg) == SSA_NAME
4434 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4436 tree temp;
4437 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4438 /* In simple cases we can look through PHI nodes,
4439 but we have to be careful with loops.
4440 See PR49073. */
4441 if (! dom_info_available_p (CDI_DOMINATORS)
4442 || gimple_bb (def_stmt) == gimple_bb (stmt)
4443 || dominated_by_p (CDI_DOMINATORS,
4444 gimple_bb (def_stmt),
4445 gimple_bb (stmt)))
4446 return NULL_TREE;
4447 temp = and_var_with_comparison (arg, invert, code2,
4448 op2a, op2b);
4449 if (!temp)
4450 return NULL_TREE;
4451 else if (!result)
4452 result = temp;
4453 else if (!same_bool_result_p (result, temp))
4454 return NULL_TREE;
4456 else
4457 return NULL_TREE;
4459 return result;
4462 default:
4463 break;
4466 return NULL_TREE;
4469 /* Try to simplify the AND of two comparisons, specified by
4470 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4471 If this can be simplified to a single expression (without requiring
4472 introducing more SSA variables to hold intermediate values),
4473 return the resulting tree. Otherwise return NULL_TREE.
4474 If the result expression is non-null, it has boolean type. */
4476 tree
4477 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4478 enum tree_code code2, tree op2a, tree op2b)
4480 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4481 if (t)
4482 return t;
4483 else
4484 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4487 /* Helper function for or_comparisons_1: try to simplify the OR of the
4488 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4489 If INVERT is true, invert the value of VAR before doing the OR.
4490 Return NULL_EXPR if we can't simplify this to a single expression. */
4492 static tree
4493 or_var_with_comparison (tree var, bool invert,
4494 enum tree_code code2, tree op2a, tree op2b)
4496 tree t;
4497 gimple *stmt = SSA_NAME_DEF_STMT (var);
4499 /* We can only deal with variables whose definitions are assignments. */
4500 if (!is_gimple_assign (stmt))
4501 return NULL_TREE;
4503 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4504 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4505 Then we only have to consider the simpler non-inverted cases. */
4506 if (invert)
4507 t = and_var_with_comparison_1 (stmt,
4508 invert_tree_comparison (code2, false),
4509 op2a, op2b);
4510 else
4511 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4512 return canonicalize_bool (t, invert);
4515 /* Try to simplify the OR of the ssa variable defined by the assignment
4516 STMT with the comparison specified by (OP2A CODE2 OP2B).
4517 Return NULL_EXPR if we can't simplify this to a single expression. */
4519 static tree
4520 or_var_with_comparison_1 (gimple *stmt,
4521 enum tree_code code2, tree op2a, tree op2b)
4523 tree var = gimple_assign_lhs (stmt);
4524 tree true_test_var = NULL_TREE;
4525 tree false_test_var = NULL_TREE;
4526 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4528 /* Check for identities like (var OR (var != 0)) => true . */
4529 if (TREE_CODE (op2a) == SSA_NAME
4530 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4532 if ((code2 == NE_EXPR && integer_zerop (op2b))
4533 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4535 true_test_var = op2a;
4536 if (var == true_test_var)
4537 return var;
4539 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4540 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4542 false_test_var = op2a;
4543 if (var == false_test_var)
4544 return boolean_true_node;
4548 /* If the definition is a comparison, recurse on it. */
4549 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4551 tree t = or_comparisons_1 (innercode,
4552 gimple_assign_rhs1 (stmt),
4553 gimple_assign_rhs2 (stmt),
4554 code2,
4555 op2a,
4556 op2b);
4557 if (t)
4558 return t;
4561 /* If the definition is an AND or OR expression, we may be able to
4562 simplify by reassociating. */
4563 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4564 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4566 tree inner1 = gimple_assign_rhs1 (stmt);
4567 tree inner2 = gimple_assign_rhs2 (stmt);
4568 gimple *s;
4569 tree t;
4570 tree partial = NULL_TREE;
4571 bool is_or = (innercode == BIT_IOR_EXPR);
4573 /* Check for boolean identities that don't require recursive examination
4574 of inner1/inner2:
4575 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4576 inner1 OR (inner1 AND inner2) => inner1
4577 !inner1 OR (inner1 OR inner2) => true
4578 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4580 if (inner1 == true_test_var)
4581 return (is_or ? var : inner1);
4582 else if (inner2 == true_test_var)
4583 return (is_or ? var : inner2);
4584 else if (inner1 == false_test_var)
4585 return (is_or
4586 ? boolean_true_node
4587 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4588 else if (inner2 == false_test_var)
4589 return (is_or
4590 ? boolean_true_node
4591 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4593 /* Next, redistribute/reassociate the OR across the inner tests.
4594 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4595 if (TREE_CODE (inner1) == SSA_NAME
4596 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4597 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4598 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4599 gimple_assign_rhs1 (s),
4600 gimple_assign_rhs2 (s),
4601 code2, op2a, op2b)))
4603 /* Handle the OR case, where we are reassociating:
4604 (inner1 OR inner2) OR (op2a code2 op2b)
4605 => (t OR inner2)
4606 If the partial result t is a constant, we win. Otherwise
4607 continue on to try reassociating with the other inner test. */
4608 if (is_or)
4610 if (integer_onep (t))
4611 return boolean_true_node;
4612 else if (integer_zerop (t))
4613 return inner2;
4616 /* Handle the AND case, where we are redistributing:
4617 (inner1 AND inner2) OR (op2a code2 op2b)
4618 => (t AND (inner2 OR (op2a code op2b))) */
4619 else if (integer_zerop (t))
4620 return boolean_false_node;
4622 /* Save partial result for later. */
4623 partial = t;
4626 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4627 if (TREE_CODE (inner2) == SSA_NAME
4628 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4629 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4630 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4631 gimple_assign_rhs1 (s),
4632 gimple_assign_rhs2 (s),
4633 code2, op2a, op2b)))
4635 /* Handle the OR case, where we are reassociating:
4636 (inner1 OR inner2) OR (op2a code2 op2b)
4637 => (inner1 OR t)
4638 => (t OR partial) */
4639 if (is_or)
4641 if (integer_zerop (t))
4642 return inner1;
4643 else if (integer_onep (t))
4644 return boolean_true_node;
4645 /* If both are the same, we can apply the identity
4646 (x OR x) == x. */
4647 else if (partial && same_bool_result_p (t, partial))
4648 return t;
4651 /* Handle the AND case, where we are redistributing:
4652 (inner1 AND inner2) OR (op2a code2 op2b)
4653 => (t AND (inner1 OR (op2a code2 op2b)))
4654 => (t AND partial) */
4655 else
4657 if (integer_zerop (t))
4658 return boolean_false_node;
4659 else if (partial)
4661 /* We already got a simplification for the other
4662 operand to the redistributed AND expression. The
4663 interesting case is when at least one is true.
4664 Or, if both are the same, we can apply the identity
4665 (x AND x) == x. */
4666 if (integer_onep (partial))
4667 return t;
4668 else if (integer_onep (t))
4669 return partial;
4670 else if (same_bool_result_p (t, partial))
4671 return t;
4676 return NULL_TREE;
4679 /* Try to simplify the OR of two comparisons defined by
4680 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4681 If this can be done without constructing an intermediate value,
4682 return the resulting tree; otherwise NULL_TREE is returned.
4683 This function is deliberately asymmetric as it recurses on SSA_DEFs
4684 in the first comparison but not the second. */
4686 static tree
4687 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4688 enum tree_code code2, tree op2a, tree op2b)
4690 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4692 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4693 if (operand_equal_p (op1a, op2a, 0)
4694 && operand_equal_p (op1b, op2b, 0))
4696 /* Result will be either NULL_TREE, or a combined comparison. */
4697 tree t = combine_comparisons (UNKNOWN_LOCATION,
4698 TRUTH_ORIF_EXPR, code1, code2,
4699 truth_type, op1a, op1b);
4700 if (t)
4701 return t;
4704 /* Likewise the swapped case of the above. */
4705 if (operand_equal_p (op1a, op2b, 0)
4706 && operand_equal_p (op1b, op2a, 0))
4708 /* Result will be either NULL_TREE, or a combined comparison. */
4709 tree t = combine_comparisons (UNKNOWN_LOCATION,
4710 TRUTH_ORIF_EXPR, code1,
4711 swap_tree_comparison (code2),
4712 truth_type, op1a, op1b);
4713 if (t)
4714 return t;
4717 /* If both comparisons are of the same value against constants, we might
4718 be able to merge them. */
4719 if (operand_equal_p (op1a, op2a, 0)
4720 && TREE_CODE (op1b) == INTEGER_CST
4721 && TREE_CODE (op2b) == INTEGER_CST)
4723 int cmp = tree_int_cst_compare (op1b, op2b);
4725 /* If we have (op1a != op1b), we should either be able to
4726 return that or TRUE, depending on whether the constant op1b
4727 also satisfies the other comparison against op2b. */
4728 if (code1 == NE_EXPR)
4730 bool done = true;
4731 bool val;
4732 switch (code2)
4734 case EQ_EXPR: val = (cmp == 0); break;
4735 case NE_EXPR: val = (cmp != 0); break;
4736 case LT_EXPR: val = (cmp < 0); break;
4737 case GT_EXPR: val = (cmp > 0); break;
4738 case LE_EXPR: val = (cmp <= 0); break;
4739 case GE_EXPR: val = (cmp >= 0); break;
4740 default: done = false;
4742 if (done)
4744 if (val)
4745 return boolean_true_node;
4746 else
4747 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4750 /* Likewise if the second comparison is a != comparison. */
4751 else if (code2 == NE_EXPR)
4753 bool done = true;
4754 bool val;
4755 switch (code1)
4757 case EQ_EXPR: val = (cmp == 0); break;
4758 case NE_EXPR: val = (cmp != 0); break;
4759 case LT_EXPR: val = (cmp > 0); break;
4760 case GT_EXPR: val = (cmp < 0); break;
4761 case LE_EXPR: val = (cmp >= 0); break;
4762 case GE_EXPR: val = (cmp <= 0); break;
4763 default: done = false;
4765 if (done)
4767 if (val)
4768 return boolean_true_node;
4769 else
4770 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4774 /* See if an equality test is redundant with the other comparison. */
4775 else if (code1 == EQ_EXPR)
4777 bool val;
4778 switch (code2)
4780 case EQ_EXPR: val = (cmp == 0); break;
4781 case NE_EXPR: val = (cmp != 0); break;
4782 case LT_EXPR: val = (cmp < 0); break;
4783 case GT_EXPR: val = (cmp > 0); break;
4784 case LE_EXPR: val = (cmp <= 0); break;
4785 case GE_EXPR: val = (cmp >= 0); break;
4786 default:
4787 val = false;
4789 if (val)
4790 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4792 else if (code2 == EQ_EXPR)
4794 bool val;
4795 switch (code1)
4797 case EQ_EXPR: val = (cmp == 0); break;
4798 case NE_EXPR: val = (cmp != 0); break;
4799 case LT_EXPR: val = (cmp > 0); break;
4800 case GT_EXPR: val = (cmp < 0); break;
4801 case LE_EXPR: val = (cmp >= 0); break;
4802 case GE_EXPR: val = (cmp <= 0); break;
4803 default:
4804 val = false;
4806 if (val)
4807 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4810 /* Chose the less restrictive of two < or <= comparisons. */
4811 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4812 && (code2 == LT_EXPR || code2 == LE_EXPR))
4814 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4815 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4816 else
4817 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4820 /* Likewise chose the less restrictive of two > or >= comparisons. */
4821 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4822 && (code2 == GT_EXPR || code2 == GE_EXPR))
4824 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4825 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4826 else
4827 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4830 /* Check for singleton ranges. */
4831 else if (cmp == 0
4832 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4833 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4834 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4836 /* Check for less/greater pairs that don't restrict the range at all. */
4837 else if (cmp >= 0
4838 && (code1 == LT_EXPR || code1 == LE_EXPR)
4839 && (code2 == GT_EXPR || code2 == GE_EXPR))
4840 return boolean_true_node;
4841 else if (cmp <= 0
4842 && (code1 == GT_EXPR || code1 == GE_EXPR)
4843 && (code2 == LT_EXPR || code2 == LE_EXPR))
4844 return boolean_true_node;
4847 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4848 NAME's definition is a truth value. See if there are any simplifications
4849 that can be done against the NAME's definition. */
4850 if (TREE_CODE (op1a) == SSA_NAME
4851 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4852 && (integer_zerop (op1b) || integer_onep (op1b)))
4854 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4855 || (code1 == NE_EXPR && integer_onep (op1b)));
4856 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4857 switch (gimple_code (stmt))
4859 case GIMPLE_ASSIGN:
4860 /* Try to simplify by copy-propagating the definition. */
4861 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4863 case GIMPLE_PHI:
4864 /* If every argument to the PHI produces the same result when
4865 ORed with the second comparison, we win.
4866 Do not do this unless the type is bool since we need a bool
4867 result here anyway. */
4868 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4870 tree result = NULL_TREE;
4871 unsigned i;
4872 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4874 tree arg = gimple_phi_arg_def (stmt, i);
4876 /* If this PHI has itself as an argument, ignore it.
4877 If all the other args produce the same result,
4878 we're still OK. */
4879 if (arg == gimple_phi_result (stmt))
4880 continue;
4881 else if (TREE_CODE (arg) == INTEGER_CST)
4883 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4885 if (!result)
4886 result = boolean_true_node;
4887 else if (!integer_onep (result))
4888 return NULL_TREE;
4890 else if (!result)
4891 result = fold_build2 (code2, boolean_type_node,
4892 op2a, op2b);
4893 else if (!same_bool_comparison_p (result,
4894 code2, op2a, op2b))
4895 return NULL_TREE;
4897 else if (TREE_CODE (arg) == SSA_NAME
4898 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4900 tree temp;
4901 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4902 /* In simple cases we can look through PHI nodes,
4903 but we have to be careful with loops.
4904 See PR49073. */
4905 if (! dom_info_available_p (CDI_DOMINATORS)
4906 || gimple_bb (def_stmt) == gimple_bb (stmt)
4907 || dominated_by_p (CDI_DOMINATORS,
4908 gimple_bb (def_stmt),
4909 gimple_bb (stmt)))
4910 return NULL_TREE;
4911 temp = or_var_with_comparison (arg, invert, code2,
4912 op2a, op2b);
4913 if (!temp)
4914 return NULL_TREE;
4915 else if (!result)
4916 result = temp;
4917 else if (!same_bool_result_p (result, temp))
4918 return NULL_TREE;
4920 else
4921 return NULL_TREE;
4923 return result;
4926 default:
4927 break;
4930 return NULL_TREE;
4933 /* Try to simplify the OR of two comparisons, specified by
4934 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4935 If this can be simplified to a single expression (without requiring
4936 introducing more SSA variables to hold intermediate values),
4937 return the resulting tree. Otherwise return NULL_TREE.
4938 If the result expression is non-null, it has boolean type. */
4940 tree
4941 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4942 enum tree_code code2, tree op2a, tree op2b)
4944 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4945 if (t)
4946 return t;
4947 else
4948 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4952 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4954 Either NULL_TREE, a simplified but non-constant or a constant
4955 is returned.
4957 ??? This should go into a gimple-fold-inline.h file to be eventually
4958 privatized with the single valueize function used in the various TUs
4959 to avoid the indirect function call overhead. */
4961 tree
4962 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4963 tree (*gvalueize) (tree))
4965 code_helper rcode;
4966 tree ops[3] = {};
4967 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4968 edges if there are intermediate VARYING defs. For this reason
4969 do not follow SSA edges here even though SCCVN can technically
4970 just deal fine with that. */
4971 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4973 tree res = NULL_TREE;
4974 if (gimple_simplified_result_is_gimple_val (rcode, ops))
4975 res = ops[0];
4976 else if (mprts_hook)
4977 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4978 if (res)
4980 if (dump_file && dump_flags & TDF_DETAILS)
4982 fprintf (dump_file, "Match-and-simplified ");
4983 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4984 fprintf (dump_file, " to ");
4985 print_generic_expr (dump_file, res, 0);
4986 fprintf (dump_file, "\n");
4988 return res;
4992 location_t loc = gimple_location (stmt);
4993 switch (gimple_code (stmt))
4995 case GIMPLE_ASSIGN:
4997 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4999 switch (get_gimple_rhs_class (subcode))
5001 case GIMPLE_SINGLE_RHS:
5003 tree rhs = gimple_assign_rhs1 (stmt);
5004 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5006 if (TREE_CODE (rhs) == SSA_NAME)
5008 /* If the RHS is an SSA_NAME, return its known constant value,
5009 if any. */
5010 return (*valueize) (rhs);
5012 /* Handle propagating invariant addresses into address
5013 operations. */
5014 else if (TREE_CODE (rhs) == ADDR_EXPR
5015 && !is_gimple_min_invariant (rhs))
5017 HOST_WIDE_INT offset = 0;
5018 tree base;
5019 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5020 &offset,
5021 valueize);
5022 if (base
5023 && (CONSTANT_CLASS_P (base)
5024 || decl_address_invariant_p (base)))
5025 return build_invariant_address (TREE_TYPE (rhs),
5026 base, offset);
5028 else if (TREE_CODE (rhs) == CONSTRUCTOR
5029 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5030 && (CONSTRUCTOR_NELTS (rhs)
5031 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5033 unsigned i;
5034 tree val, *vec;
5036 vec = XALLOCAVEC (tree,
5037 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5038 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5040 val = (*valueize) (val);
5041 if (TREE_CODE (val) == INTEGER_CST
5042 || TREE_CODE (val) == REAL_CST
5043 || TREE_CODE (val) == FIXED_CST)
5044 vec[i] = val;
5045 else
5046 return NULL_TREE;
5049 return build_vector (TREE_TYPE (rhs), vec);
5051 if (subcode == OBJ_TYPE_REF)
5053 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5054 /* If callee is constant, we can fold away the wrapper. */
5055 if (is_gimple_min_invariant (val))
5056 return val;
5059 if (kind == tcc_reference)
5061 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5062 || TREE_CODE (rhs) == REALPART_EXPR
5063 || TREE_CODE (rhs) == IMAGPART_EXPR)
5064 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5066 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5067 return fold_unary_loc (EXPR_LOCATION (rhs),
5068 TREE_CODE (rhs),
5069 TREE_TYPE (rhs), val);
5071 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5072 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5074 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5075 return fold_ternary_loc (EXPR_LOCATION (rhs),
5076 TREE_CODE (rhs),
5077 TREE_TYPE (rhs), val,
5078 TREE_OPERAND (rhs, 1),
5079 TREE_OPERAND (rhs, 2));
5081 else if (TREE_CODE (rhs) == MEM_REF
5082 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5084 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5085 if (TREE_CODE (val) == ADDR_EXPR
5086 && is_gimple_min_invariant (val))
5088 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5089 unshare_expr (val),
5090 TREE_OPERAND (rhs, 1));
5091 if (tem)
5092 rhs = tem;
5095 return fold_const_aggregate_ref_1 (rhs, valueize);
5097 else if (kind == tcc_declaration)
5098 return get_symbol_constant_value (rhs);
5099 return rhs;
5102 case GIMPLE_UNARY_RHS:
5103 return NULL_TREE;
5105 case GIMPLE_BINARY_RHS:
5106 /* Translate &x + CST into an invariant form suitable for
5107 further propagation. */
5108 if (subcode == POINTER_PLUS_EXPR)
5110 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5111 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5112 if (TREE_CODE (op0) == ADDR_EXPR
5113 && TREE_CODE (op1) == INTEGER_CST)
5115 tree off = fold_convert (ptr_type_node, op1);
5116 return build_fold_addr_expr_loc
5117 (loc,
5118 fold_build2 (MEM_REF,
5119 TREE_TYPE (TREE_TYPE (op0)),
5120 unshare_expr (op0), off));
5123 /* Canonicalize bool != 0 and bool == 0 appearing after
5124 valueization. While gimple_simplify handles this
5125 it can get confused by the ~X == 1 -> X == 0 transform
5126 which we cant reduce to a SSA name or a constant
5127 (and we have no way to tell gimple_simplify to not
5128 consider those transforms in the first place). */
5129 else if (subcode == EQ_EXPR
5130 || subcode == NE_EXPR)
5132 tree lhs = gimple_assign_lhs (stmt);
5133 tree op0 = gimple_assign_rhs1 (stmt);
5134 if (useless_type_conversion_p (TREE_TYPE (lhs),
5135 TREE_TYPE (op0)))
5137 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5138 op0 = (*valueize) (op0);
5139 if (TREE_CODE (op0) == INTEGER_CST)
5140 std::swap (op0, op1);
5141 if (TREE_CODE (op1) == INTEGER_CST
5142 && ((subcode == NE_EXPR && integer_zerop (op1))
5143 || (subcode == EQ_EXPR && integer_onep (op1))))
5144 return op0;
5147 return NULL_TREE;
5149 case GIMPLE_TERNARY_RHS:
5151 /* Handle ternary operators that can appear in GIMPLE form. */
5152 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5153 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5154 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5155 return fold_ternary_loc (loc, subcode,
5156 gimple_expr_type (stmt), op0, op1, op2);
5159 default:
5160 gcc_unreachable ();
5164 case GIMPLE_CALL:
5166 tree fn;
5167 gcall *call_stmt = as_a <gcall *> (stmt);
5169 if (gimple_call_internal_p (stmt))
5171 enum tree_code subcode = ERROR_MARK;
5172 switch (gimple_call_internal_fn (stmt))
5174 case IFN_UBSAN_CHECK_ADD:
5175 subcode = PLUS_EXPR;
5176 break;
5177 case IFN_UBSAN_CHECK_SUB:
5178 subcode = MINUS_EXPR;
5179 break;
5180 case IFN_UBSAN_CHECK_MUL:
5181 subcode = MULT_EXPR;
5182 break;
5183 default:
5184 return NULL_TREE;
5186 tree arg0 = gimple_call_arg (stmt, 0);
5187 tree arg1 = gimple_call_arg (stmt, 1);
5188 tree op0 = (*valueize) (arg0);
5189 tree op1 = (*valueize) (arg1);
5191 if (TREE_CODE (op0) != INTEGER_CST
5192 || TREE_CODE (op1) != INTEGER_CST)
5194 switch (subcode)
5196 case MULT_EXPR:
5197 /* x * 0 = 0 * x = 0 without overflow. */
5198 if (integer_zerop (op0) || integer_zerop (op1))
5199 return build_zero_cst (TREE_TYPE (arg0));
5200 break;
5201 case MINUS_EXPR:
5202 /* y - y = 0 without overflow. */
5203 if (operand_equal_p (op0, op1, 0))
5204 return build_zero_cst (TREE_TYPE (arg0));
5205 break;
5206 default:
5207 break;
5210 tree res
5211 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5212 if (res
5213 && TREE_CODE (res) == INTEGER_CST
5214 && !TREE_OVERFLOW (res))
5215 return res;
5216 return NULL_TREE;
5219 fn = (*valueize) (gimple_call_fn (stmt));
5220 if (TREE_CODE (fn) == ADDR_EXPR
5221 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5222 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5223 && gimple_builtin_call_types_compatible_p (stmt,
5224 TREE_OPERAND (fn, 0)))
5226 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5227 tree retval;
5228 unsigned i;
5229 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5230 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5231 retval = fold_builtin_call_array (loc,
5232 gimple_call_return_type (call_stmt),
5233 fn, gimple_call_num_args (stmt), args);
5234 if (retval)
5236 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5237 STRIP_NOPS (retval);
5238 retval = fold_convert (gimple_call_return_type (call_stmt),
5239 retval);
5241 return retval;
5243 return NULL_TREE;
5246 default:
5247 return NULL_TREE;
5251 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5252 Returns NULL_TREE if folding to a constant is not possible, otherwise
5253 returns a constant according to is_gimple_min_invariant. */
5255 tree
5256 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5258 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5259 if (res && is_gimple_min_invariant (res))
5260 return res;
5261 return NULL_TREE;
5265 /* The following set of functions are supposed to fold references using
5266 their constant initializers. */
5268 /* See if we can find constructor defining value of BASE.
5269 When we know the consructor with constant offset (such as
5270 base is array[40] and we do know constructor of array), then
5271 BIT_OFFSET is adjusted accordingly.
5273 As a special case, return error_mark_node when constructor
5274 is not explicitly available, but it is known to be zero
5275 such as 'static const int a;'. */
5276 static tree
5277 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5278 tree (*valueize)(tree))
5280 HOST_WIDE_INT bit_offset2, size, max_size;
5281 bool reverse;
5283 if (TREE_CODE (base) == MEM_REF)
5285 if (!integer_zerop (TREE_OPERAND (base, 1)))
5287 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5288 return NULL_TREE;
5289 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5290 * BITS_PER_UNIT);
5293 if (valueize
5294 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5295 base = valueize (TREE_OPERAND (base, 0));
5296 if (!base || TREE_CODE (base) != ADDR_EXPR)
5297 return NULL_TREE;
5298 base = TREE_OPERAND (base, 0);
5301 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5302 DECL_INITIAL. If BASE is a nested reference into another
5303 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5304 the inner reference. */
5305 switch (TREE_CODE (base))
5307 case VAR_DECL:
5308 case CONST_DECL:
5310 tree init = ctor_for_folding (base);
5312 /* Our semantic is exact opposite of ctor_for_folding;
5313 NULL means unknown, while error_mark_node is 0. */
5314 if (init == error_mark_node)
5315 return NULL_TREE;
5316 if (!init)
5317 return error_mark_node;
5318 return init;
5321 case ARRAY_REF:
5322 case COMPONENT_REF:
5323 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5324 &reverse);
5325 if (max_size == -1 || size != max_size)
5326 return NULL_TREE;
5327 *bit_offset += bit_offset2;
5328 return get_base_constructor (base, bit_offset, valueize);
5330 case STRING_CST:
5331 case CONSTRUCTOR:
5332 return base;
5334 default:
5335 return NULL_TREE;
5339 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5340 SIZE to the memory at bit OFFSET. */
5342 static tree
5343 fold_array_ctor_reference (tree type, tree ctor,
5344 unsigned HOST_WIDE_INT offset,
5345 unsigned HOST_WIDE_INT size,
5346 tree from_decl)
5348 offset_int low_bound;
5349 offset_int elt_size;
5350 offset_int access_index;
5351 tree domain_type = NULL_TREE;
5352 HOST_WIDE_INT inner_offset;
5354 /* Compute low bound and elt size. */
5355 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5356 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5357 if (domain_type && TYPE_MIN_VALUE (domain_type))
5359 /* Static constructors for variably sized objects makes no sense. */
5360 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5361 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5363 else
5364 low_bound = 0;
5365 /* Static constructors for variably sized objects makes no sense. */
5366 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5367 == INTEGER_CST);
5368 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5370 /* We can handle only constantly sized accesses that are known to not
5371 be larger than size of array element. */
5372 if (!TYPE_SIZE_UNIT (type)
5373 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5374 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5375 || elt_size == 0)
5376 return NULL_TREE;
5378 /* Compute the array index we look for. */
5379 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5380 elt_size);
5381 access_index += low_bound;
5383 /* And offset within the access. */
5384 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5386 /* See if the array field is large enough to span whole access. We do not
5387 care to fold accesses spanning multiple array indexes. */
5388 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5389 return NULL_TREE;
5390 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5391 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5393 /* When memory is not explicitely mentioned in constructor,
5394 it is 0 (or out of range). */
5395 return build_zero_cst (type);
5398 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5399 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5401 static tree
5402 fold_nonarray_ctor_reference (tree type, tree ctor,
5403 unsigned HOST_WIDE_INT offset,
5404 unsigned HOST_WIDE_INT size,
5405 tree from_decl)
5407 unsigned HOST_WIDE_INT cnt;
5408 tree cfield, cval;
5410 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5411 cval)
5413 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5414 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5415 tree field_size = DECL_SIZE (cfield);
5416 offset_int bitoffset;
5417 offset_int bitoffset_end, access_end;
5419 /* Variable sized objects in static constructors makes no sense,
5420 but field_size can be NULL for flexible array members. */
5421 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5422 && TREE_CODE (byte_offset) == INTEGER_CST
5423 && (field_size != NULL_TREE
5424 ? TREE_CODE (field_size) == INTEGER_CST
5425 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5427 /* Compute bit offset of the field. */
5428 bitoffset = (wi::to_offset (field_offset)
5429 + wi::lshift (wi::to_offset (byte_offset),
5430 LOG2_BITS_PER_UNIT));
5431 /* Compute bit offset where the field ends. */
5432 if (field_size != NULL_TREE)
5433 bitoffset_end = bitoffset + wi::to_offset (field_size);
5434 else
5435 bitoffset_end = 0;
5437 access_end = offset_int (offset) + size;
5439 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5440 [BITOFFSET, BITOFFSET_END)? */
5441 if (wi::cmps (access_end, bitoffset) > 0
5442 && (field_size == NULL_TREE
5443 || wi::lts_p (offset, bitoffset_end)))
5445 offset_int inner_offset = offset_int (offset) - bitoffset;
5446 /* We do have overlap. Now see if field is large enough to
5447 cover the access. Give up for accesses spanning multiple
5448 fields. */
5449 if (wi::cmps (access_end, bitoffset_end) > 0)
5450 return NULL_TREE;
5451 if (wi::lts_p (offset, bitoffset))
5452 return NULL_TREE;
5453 return fold_ctor_reference (type, cval,
5454 inner_offset.to_uhwi (), size,
5455 from_decl);
5458 /* When memory is not explicitely mentioned in constructor, it is 0. */
5459 return build_zero_cst (type);
5462 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5463 to the memory at bit OFFSET. */
5465 tree
5466 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5467 unsigned HOST_WIDE_INT size, tree from_decl)
5469 tree ret;
5471 /* We found the field with exact match. */
5472 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5473 && !offset)
5474 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5476 /* We are at the end of walk, see if we can view convert the
5477 result. */
5478 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5479 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5480 && !compare_tree_int (TYPE_SIZE (type), size)
5481 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5483 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5484 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5485 if (ret)
5486 STRIP_USELESS_TYPE_CONVERSION (ret);
5487 return ret;
5489 /* For constants and byte-aligned/sized reads try to go through
5490 native_encode/interpret. */
5491 if (CONSTANT_CLASS_P (ctor)
5492 && BITS_PER_UNIT == 8
5493 && offset % BITS_PER_UNIT == 0
5494 && size % BITS_PER_UNIT == 0
5495 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5497 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5498 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5499 offset / BITS_PER_UNIT) > 0)
5500 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5502 if (TREE_CODE (ctor) == CONSTRUCTOR)
5505 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5506 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5507 return fold_array_ctor_reference (type, ctor, offset, size,
5508 from_decl);
5509 else
5510 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5511 from_decl);
5514 return NULL_TREE;
5517 /* Return the tree representing the element referenced by T if T is an
5518 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5519 names using VALUEIZE. Return NULL_TREE otherwise. */
5521 tree
5522 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5524 tree ctor, idx, base;
5525 HOST_WIDE_INT offset, size, max_size;
5526 tree tem;
5527 bool reverse;
5529 if (TREE_THIS_VOLATILE (t))
5530 return NULL_TREE;
5532 if (DECL_P (t))
5533 return get_symbol_constant_value (t);
5535 tem = fold_read_from_constant_string (t);
5536 if (tem)
5537 return tem;
5539 switch (TREE_CODE (t))
5541 case ARRAY_REF:
5542 case ARRAY_RANGE_REF:
5543 /* Constant indexes are handled well by get_base_constructor.
5544 Only special case variable offsets.
5545 FIXME: This code can't handle nested references with variable indexes
5546 (they will be handled only by iteration of ccp). Perhaps we can bring
5547 get_ref_base_and_extent here and make it use a valueize callback. */
5548 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5549 && valueize
5550 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5551 && TREE_CODE (idx) == INTEGER_CST)
5553 tree low_bound, unit_size;
5555 /* If the resulting bit-offset is constant, track it. */
5556 if ((low_bound = array_ref_low_bound (t),
5557 TREE_CODE (low_bound) == INTEGER_CST)
5558 && (unit_size = array_ref_element_size (t),
5559 tree_fits_uhwi_p (unit_size)))
5561 offset_int woffset
5562 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5563 TYPE_PRECISION (TREE_TYPE (idx)));
5565 if (wi::fits_shwi_p (woffset))
5567 offset = woffset.to_shwi ();
5568 /* TODO: This code seems wrong, multiply then check
5569 to see if it fits. */
5570 offset *= tree_to_uhwi (unit_size);
5571 offset *= BITS_PER_UNIT;
5573 base = TREE_OPERAND (t, 0);
5574 ctor = get_base_constructor (base, &offset, valueize);
5575 /* Empty constructor. Always fold to 0. */
5576 if (ctor == error_mark_node)
5577 return build_zero_cst (TREE_TYPE (t));
5578 /* Out of bound array access. Value is undefined,
5579 but don't fold. */
5580 if (offset < 0)
5581 return NULL_TREE;
5582 /* We can not determine ctor. */
5583 if (!ctor)
5584 return NULL_TREE;
5585 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5586 tree_to_uhwi (unit_size)
5587 * BITS_PER_UNIT,
5588 base);
5592 /* Fallthru. */
5594 case COMPONENT_REF:
5595 case BIT_FIELD_REF:
5596 case TARGET_MEM_REF:
5597 case MEM_REF:
5598 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5599 ctor = get_base_constructor (base, &offset, valueize);
5601 /* Empty constructor. Always fold to 0. */
5602 if (ctor == error_mark_node)
5603 return build_zero_cst (TREE_TYPE (t));
5604 /* We do not know precise address. */
5605 if (max_size == -1 || max_size != size)
5606 return NULL_TREE;
5607 /* We can not determine ctor. */
5608 if (!ctor)
5609 return NULL_TREE;
5611 /* Out of bound array access. Value is undefined, but don't fold. */
5612 if (offset < 0)
5613 return NULL_TREE;
5615 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5616 base);
5618 case REALPART_EXPR:
5619 case IMAGPART_EXPR:
5621 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5622 if (c && TREE_CODE (c) == COMPLEX_CST)
5623 return fold_build1_loc (EXPR_LOCATION (t),
5624 TREE_CODE (t), TREE_TYPE (t), c);
5625 break;
5628 default:
5629 break;
5632 return NULL_TREE;
5635 tree
5636 fold_const_aggregate_ref (tree t)
5638 return fold_const_aggregate_ref_1 (t, NULL);
5641 /* Lookup virtual method with index TOKEN in a virtual table V
5642 at OFFSET.
5643 Set CAN_REFER if non-NULL to false if method
5644 is not referable or if the virtual table is ill-formed (such as rewriten
5645 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5647 tree
5648 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5649 tree v,
5650 unsigned HOST_WIDE_INT offset,
5651 bool *can_refer)
5653 tree vtable = v, init, fn;
5654 unsigned HOST_WIDE_INT size;
5655 unsigned HOST_WIDE_INT elt_size, access_index;
5656 tree domain_type;
5658 if (can_refer)
5659 *can_refer = true;
5661 /* First of all double check we have virtual table. */
5662 if (TREE_CODE (v) != VAR_DECL
5663 || !DECL_VIRTUAL_P (v))
5665 /* Pass down that we lost track of the target. */
5666 if (can_refer)
5667 *can_refer = false;
5668 return NULL_TREE;
5671 init = ctor_for_folding (v);
5673 /* The virtual tables should always be born with constructors
5674 and we always should assume that they are avaialble for
5675 folding. At the moment we do not stream them in all cases,
5676 but it should never happen that ctor seem unreachable. */
5677 gcc_assert (init);
5678 if (init == error_mark_node)
5680 gcc_assert (in_lto_p);
5681 /* Pass down that we lost track of the target. */
5682 if (can_refer)
5683 *can_refer = false;
5684 return NULL_TREE;
5686 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5687 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5688 offset *= BITS_PER_UNIT;
5689 offset += token * size;
5691 /* Lookup the value in the constructor that is assumed to be array.
5692 This is equivalent to
5693 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5694 offset, size, NULL);
5695 but in a constant time. We expect that frontend produced a simple
5696 array without indexed initializers. */
5698 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5699 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5700 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5701 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5703 access_index = offset / BITS_PER_UNIT / elt_size;
5704 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5706 /* This code makes an assumption that there are no
5707 indexed fileds produced by C++ FE, so we can directly index the array. */
5708 if (access_index < CONSTRUCTOR_NELTS (init))
5710 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5711 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5712 STRIP_NOPS (fn);
5714 else
5715 fn = NULL;
5717 /* For type inconsistent program we may end up looking up virtual method
5718 in virtual table that does not contain TOKEN entries. We may overrun
5719 the virtual table and pick up a constant or RTTI info pointer.
5720 In any case the call is undefined. */
5721 if (!fn
5722 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5723 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5724 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5725 else
5727 fn = TREE_OPERAND (fn, 0);
5729 /* When cgraph node is missing and function is not public, we cannot
5730 devirtualize. This can happen in WHOPR when the actual method
5731 ends up in other partition, because we found devirtualization
5732 possibility too late. */
5733 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5735 if (can_refer)
5737 *can_refer = false;
5738 return fn;
5740 return NULL_TREE;
5744 /* Make sure we create a cgraph node for functions we'll reference.
5745 They can be non-existent if the reference comes from an entry
5746 of an external vtable for example. */
5747 cgraph_node::get_create (fn);
5749 return fn;
5752 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5753 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5754 KNOWN_BINFO carries the binfo describing the true type of
5755 OBJ_TYPE_REF_OBJECT(REF).
5756 Set CAN_REFER if non-NULL to false if method
5757 is not referable or if the virtual table is ill-formed (such as rewriten
5758 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5760 tree
5761 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5762 bool *can_refer)
5764 unsigned HOST_WIDE_INT offset;
5765 tree v;
5767 v = BINFO_VTABLE (known_binfo);
5768 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5769 if (!v)
5770 return NULL_TREE;
5772 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5774 if (can_refer)
5775 *can_refer = false;
5776 return NULL_TREE;
5778 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5781 /* Given a pointer value OP0, return a simplified version of an
5782 indirection through OP0, or NULL_TREE if no simplification is
5783 possible. Note that the resulting type may be different from
5784 the type pointed to in the sense that it is still compatible
5785 from the langhooks point of view. */
5787 tree
5788 gimple_fold_indirect_ref (tree t)
5790 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5791 tree sub = t;
5792 tree subtype;
5794 STRIP_NOPS (sub);
5795 subtype = TREE_TYPE (sub);
5796 if (!POINTER_TYPE_P (subtype))
5797 return NULL_TREE;
5799 if (TREE_CODE (sub) == ADDR_EXPR)
5801 tree op = TREE_OPERAND (sub, 0);
5802 tree optype = TREE_TYPE (op);
5803 /* *&p => p */
5804 if (useless_type_conversion_p (type, optype))
5805 return op;
5807 /* *(foo *)&fooarray => fooarray[0] */
5808 if (TREE_CODE (optype) == ARRAY_TYPE
5809 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5810 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5812 tree type_domain = TYPE_DOMAIN (optype);
5813 tree min_val = size_zero_node;
5814 if (type_domain && TYPE_MIN_VALUE (type_domain))
5815 min_val = TYPE_MIN_VALUE (type_domain);
5816 if (TREE_CODE (min_val) == INTEGER_CST)
5817 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5819 /* *(foo *)&complexfoo => __real__ complexfoo */
5820 else if (TREE_CODE (optype) == COMPLEX_TYPE
5821 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5822 return fold_build1 (REALPART_EXPR, type, op);
5823 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5824 else if (TREE_CODE (optype) == VECTOR_TYPE
5825 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5827 tree part_width = TYPE_SIZE (type);
5828 tree index = bitsize_int (0);
5829 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5833 /* *(p + CST) -> ... */
5834 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5835 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5837 tree addr = TREE_OPERAND (sub, 0);
5838 tree off = TREE_OPERAND (sub, 1);
5839 tree addrtype;
5841 STRIP_NOPS (addr);
5842 addrtype = TREE_TYPE (addr);
5844 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5845 if (TREE_CODE (addr) == ADDR_EXPR
5846 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5847 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5848 && tree_fits_uhwi_p (off))
5850 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5851 tree part_width = TYPE_SIZE (type);
5852 unsigned HOST_WIDE_INT part_widthi
5853 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5854 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5855 tree index = bitsize_int (indexi);
5856 if (offset / part_widthi
5857 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5858 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5859 part_width, index);
5862 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5863 if (TREE_CODE (addr) == ADDR_EXPR
5864 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5865 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5867 tree size = TYPE_SIZE_UNIT (type);
5868 if (tree_int_cst_equal (size, off))
5869 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5872 /* *(p + CST) -> MEM_REF <p, CST>. */
5873 if (TREE_CODE (addr) != ADDR_EXPR
5874 || DECL_P (TREE_OPERAND (addr, 0)))
5875 return fold_build2 (MEM_REF, type,
5876 addr,
5877 wide_int_to_tree (ptype, off));
5880 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5881 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5882 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5883 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5885 tree type_domain;
5886 tree min_val = size_zero_node;
5887 tree osub = sub;
5888 sub = gimple_fold_indirect_ref (sub);
5889 if (! sub)
5890 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5891 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5892 if (type_domain && TYPE_MIN_VALUE (type_domain))
5893 min_val = TYPE_MIN_VALUE (type_domain);
5894 if (TREE_CODE (min_val) == INTEGER_CST)
5895 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5898 return NULL_TREE;
5901 /* Return true if CODE is an operation that when operating on signed
5902 integer types involves undefined behavior on overflow and the
5903 operation can be expressed with unsigned arithmetic. */
5905 bool
5906 arith_code_with_undefined_signed_overflow (tree_code code)
5908 switch (code)
5910 case PLUS_EXPR:
5911 case MINUS_EXPR:
5912 case MULT_EXPR:
5913 case NEGATE_EXPR:
5914 case POINTER_PLUS_EXPR:
5915 return true;
5916 default:
5917 return false;
5921 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5922 operation that can be transformed to unsigned arithmetic by converting
5923 its operand, carrying out the operation in the corresponding unsigned
5924 type and converting the result back to the original type.
5926 Returns a sequence of statements that replace STMT and also contain
5927 a modified form of STMT itself. */
5929 gimple_seq
5930 rewrite_to_defined_overflow (gimple *stmt)
5932 if (dump_file && (dump_flags & TDF_DETAILS))
5934 fprintf (dump_file, "rewriting stmt with undefined signed "
5935 "overflow ");
5936 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5939 tree lhs = gimple_assign_lhs (stmt);
5940 tree type = unsigned_type_for (TREE_TYPE (lhs));
5941 gimple_seq stmts = NULL;
5942 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5944 tree op = gimple_op (stmt, i);
5945 op = gimple_convert (&stmts, type, op);
5946 gimple_set_op (stmt, i, op);
5948 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5949 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5950 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5951 gimple_seq_add_stmt (&stmts, stmt);
5952 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5953 gimple_seq_add_stmt (&stmts, cvt);
5955 return stmts;
5959 /* The valueization hook we use for the gimple_build API simplification.
5960 This makes us match fold_buildN behavior by only combining with
5961 statements in the sequence(s) we are currently building. */
5963 static tree
5964 gimple_build_valueize (tree op)
5966 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5967 return op;
5968 return NULL_TREE;
5971 /* Build the expression CODE OP0 of type TYPE with location LOC,
5972 simplifying it first if possible. Returns the built
5973 expression value and appends statements possibly defining it
5974 to SEQ. */
5976 tree
5977 gimple_build (gimple_seq *seq, location_t loc,
5978 enum tree_code code, tree type, tree op0)
5980 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
5981 if (!res)
5983 if (gimple_in_ssa_p (cfun))
5984 res = make_ssa_name (type);
5985 else
5986 res = create_tmp_reg (type);
5987 gimple *stmt;
5988 if (code == REALPART_EXPR
5989 || code == IMAGPART_EXPR
5990 || code == VIEW_CONVERT_EXPR)
5991 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
5992 else
5993 stmt = gimple_build_assign (res, code, op0);
5994 gimple_set_location (stmt, loc);
5995 gimple_seq_add_stmt_without_update (seq, stmt);
5997 return res;
6000 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6001 simplifying it first if possible. Returns the built
6002 expression value and appends statements possibly defining it
6003 to SEQ. */
6005 tree
6006 gimple_build (gimple_seq *seq, location_t loc,
6007 enum tree_code code, tree type, tree op0, tree op1)
6009 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6010 if (!res)
6012 if (gimple_in_ssa_p (cfun))
6013 res = make_ssa_name (type);
6014 else
6015 res = create_tmp_reg (type);
6016 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6017 gimple_set_location (stmt, loc);
6018 gimple_seq_add_stmt_without_update (seq, stmt);
6020 return res;
6023 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6024 simplifying it first if possible. Returns the built
6025 expression value and appends statements possibly defining it
6026 to SEQ. */
6028 tree
6029 gimple_build (gimple_seq *seq, location_t loc,
6030 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6032 tree res = gimple_simplify (code, type, op0, op1, op2,
6033 seq, gimple_build_valueize);
6034 if (!res)
6036 if (gimple_in_ssa_p (cfun))
6037 res = make_ssa_name (type);
6038 else
6039 res = create_tmp_reg (type);
6040 gimple *stmt;
6041 if (code == BIT_FIELD_REF)
6042 stmt = gimple_build_assign (res, code,
6043 build3 (code, type, op0, op1, op2));
6044 else
6045 stmt = gimple_build_assign (res, code, op0, op1, op2);
6046 gimple_set_location (stmt, loc);
6047 gimple_seq_add_stmt_without_update (seq, stmt);
6049 return res;
6052 /* Build the call FN (ARG0) with a result of type TYPE
6053 (or no result if TYPE is void) with location LOC,
6054 simplifying it first if possible. Returns the built
6055 expression value (or NULL_TREE if TYPE is void) and appends
6056 statements possibly defining it to SEQ. */
6058 tree
6059 gimple_build (gimple_seq *seq, location_t loc,
6060 enum built_in_function fn, tree type, tree arg0)
6062 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6063 if (!res)
6065 tree decl = builtin_decl_implicit (fn);
6066 gimple *stmt = gimple_build_call (decl, 1, arg0);
6067 if (!VOID_TYPE_P (type))
6069 if (gimple_in_ssa_p (cfun))
6070 res = make_ssa_name (type);
6071 else
6072 res = create_tmp_reg (type);
6073 gimple_call_set_lhs (stmt, res);
6075 gimple_set_location (stmt, loc);
6076 gimple_seq_add_stmt_without_update (seq, stmt);
6078 return res;
6081 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6082 (or no result if TYPE is void) with location LOC,
6083 simplifying it first if possible. Returns the built
6084 expression value (or NULL_TREE if TYPE is void) and appends
6085 statements possibly defining it to SEQ. */
6087 tree
6088 gimple_build (gimple_seq *seq, location_t loc,
6089 enum built_in_function fn, tree type, tree arg0, tree arg1)
6091 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6092 if (!res)
6094 tree decl = builtin_decl_implicit (fn);
6095 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6096 if (!VOID_TYPE_P (type))
6098 if (gimple_in_ssa_p (cfun))
6099 res = make_ssa_name (type);
6100 else
6101 res = create_tmp_reg (type);
6102 gimple_call_set_lhs (stmt, res);
6104 gimple_set_location (stmt, loc);
6105 gimple_seq_add_stmt_without_update (seq, stmt);
6107 return res;
6110 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6111 (or no result if TYPE is void) with location LOC,
6112 simplifying it first if possible. Returns the built
6113 expression value (or NULL_TREE if TYPE is void) and appends
6114 statements possibly defining it to SEQ. */
6116 tree
6117 gimple_build (gimple_seq *seq, location_t loc,
6118 enum built_in_function fn, tree type,
6119 tree arg0, tree arg1, tree arg2)
6121 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6122 seq, gimple_build_valueize);
6123 if (!res)
6125 tree decl = builtin_decl_implicit (fn);
6126 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6127 if (!VOID_TYPE_P (type))
6129 if (gimple_in_ssa_p (cfun))
6130 res = make_ssa_name (type);
6131 else
6132 res = create_tmp_reg (type);
6133 gimple_call_set_lhs (stmt, res);
6135 gimple_set_location (stmt, loc);
6136 gimple_seq_add_stmt_without_update (seq, stmt);
6138 return res;
6141 /* Build the conversion (TYPE) OP with a result of type TYPE
6142 with location LOC if such conversion is neccesary in GIMPLE,
6143 simplifying it first.
6144 Returns the built expression value and appends
6145 statements possibly defining it to SEQ. */
6147 tree
6148 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6150 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6151 return op;
6152 return gimple_build (seq, loc, NOP_EXPR, type, op);
6155 /* Build the conversion (ptrofftype) OP with a result of a type
6156 compatible with ptrofftype with location LOC if such conversion
6157 is neccesary in GIMPLE, simplifying it first.
6158 Returns the built expression value and appends
6159 statements possibly defining it to SEQ. */
6161 tree
6162 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6164 if (ptrofftype_p (TREE_TYPE (op)))
6165 return op;
6166 return gimple_convert (seq, loc, sizetype, op);
6169 /* Return true if the result of assignment STMT is known to be non-negative.
6170 If the return value is based on the assumption that signed overflow is
6171 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6172 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6174 static bool
6175 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6176 int depth)
6178 enum tree_code code = gimple_assign_rhs_code (stmt);
6179 switch (get_gimple_rhs_class (code))
6181 case GIMPLE_UNARY_RHS:
6182 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6183 gimple_expr_type (stmt),
6184 gimple_assign_rhs1 (stmt),
6185 strict_overflow_p, depth);
6186 case GIMPLE_BINARY_RHS:
6187 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6188 gimple_expr_type (stmt),
6189 gimple_assign_rhs1 (stmt),
6190 gimple_assign_rhs2 (stmt),
6191 strict_overflow_p, depth);
6192 case GIMPLE_TERNARY_RHS:
6193 return false;
6194 case GIMPLE_SINGLE_RHS:
6195 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6196 strict_overflow_p, depth);
6197 case GIMPLE_INVALID_RHS:
6198 break;
6200 gcc_unreachable ();
6203 /* Return true if return value of call STMT is known to be non-negative.
6204 If the return value is based on the assumption that signed overflow is
6205 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6206 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6208 static bool
6209 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6210 int depth)
6212 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6213 gimple_call_arg (stmt, 0) : NULL_TREE;
6214 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6215 gimple_call_arg (stmt, 1) : NULL_TREE;
6217 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6218 gimple_call_combined_fn (stmt),
6219 arg0,
6220 arg1,
6221 strict_overflow_p, depth);
6224 /* Return true if return value of call STMT is known to be non-negative.
6225 If the return value is based on the assumption that signed overflow is
6226 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6227 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6229 static bool
6230 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6231 int depth)
6233 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6235 tree arg = gimple_phi_arg_def (stmt, i);
6236 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6237 return false;
6239 return true;
6242 /* Return true if STMT is known to compute a non-negative value.
6243 If the return value is based on the assumption that signed overflow is
6244 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6245 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6247 bool
6248 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6249 int depth)
6251 switch (gimple_code (stmt))
6253 case GIMPLE_ASSIGN:
6254 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6255 depth);
6256 case GIMPLE_CALL:
6257 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6258 depth);
6259 case GIMPLE_PHI:
6260 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6261 depth);
6262 default:
6263 return false;
6267 /* Return true if the floating-point value computed by assignment STMT
6268 is known to have an integer value. We also allow +Inf, -Inf and NaN
6269 to be considered integer values.
6271 DEPTH is the current nesting depth of the query. */
6273 static bool
6274 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6276 enum tree_code code = gimple_assign_rhs_code (stmt);
6277 switch (get_gimple_rhs_class (code))
6279 case GIMPLE_UNARY_RHS:
6280 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6281 gimple_assign_rhs1 (stmt), depth);
6282 case GIMPLE_BINARY_RHS:
6283 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6284 gimple_assign_rhs1 (stmt),
6285 gimple_assign_rhs2 (stmt), depth);
6286 case GIMPLE_TERNARY_RHS:
6287 return false;
6288 case GIMPLE_SINGLE_RHS:
6289 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6290 case GIMPLE_INVALID_RHS:
6291 break;
6293 gcc_unreachable ();
6296 /* Return true if the floating-point value computed by call STMT is known
6297 to have an integer value. We also allow +Inf, -Inf and NaN to be
6298 considered integer values.
6300 DEPTH is the current nesting depth of the query. */
6302 static bool
6303 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6305 tree arg0 = (gimple_call_num_args (stmt) > 0
6306 ? gimple_call_arg (stmt, 0)
6307 : NULL_TREE);
6308 tree arg1 = (gimple_call_num_args (stmt) > 1
6309 ? gimple_call_arg (stmt, 1)
6310 : NULL_TREE);
6311 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6312 arg0, arg1, depth);
6315 /* Return true if the floating-point result of phi STMT is known to have
6316 an integer value. We also allow +Inf, -Inf and NaN to be considered
6317 integer values.
6319 DEPTH is the current nesting depth of the query. */
6321 static bool
6322 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6324 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6326 tree arg = gimple_phi_arg_def (stmt, i);
6327 if (!integer_valued_real_single_p (arg, depth + 1))
6328 return false;
6330 return true;
6333 /* Return true if the floating-point value computed by STMT is known
6334 to have an integer value. We also allow +Inf, -Inf and NaN to be
6335 considered integer values.
6337 DEPTH is the current nesting depth of the query. */
6339 bool
6340 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6342 switch (gimple_code (stmt))
6344 case GIMPLE_ASSIGN:
6345 return gimple_assign_integer_valued_real_p (stmt, depth);
6346 case GIMPLE_CALL:
6347 return gimple_call_integer_valued_real_p (stmt, depth);
6348 case GIMPLE_PHI:
6349 return gimple_phi_integer_valued_real_p (stmt, depth);
6350 default:
6351 return false;