2014-12-08 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / gimple-fold.c
blobe71e0954f6cab2a140158a450076bd2e492f6228
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "vec.h"
34 #include "machmode.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dumpfile.h"
39 #include "bitmap.h"
40 #include "predict.h"
41 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "gimplify.h"
50 #include "gimple-iterator.h"
51 #include "gimple-ssa.h"
52 #include "tree-ssanames.h"
53 #include "tree-into-ssa.h"
54 #include "tree-dfa.h"
55 #include "tree-ssa.h"
56 #include "tree-ssa-propagate.h"
57 #include "target.h"
58 #include "hash-map.h"
59 #include "plugin-api.h"
60 #include "ipa-ref.h"
61 #include "cgraph.h"
62 #include "ipa-utils.h"
63 #include "gimple-pretty-print.h"
64 #include "tree-ssa-address.h"
65 #include "langhooks.h"
66 #include "gimplify-me.h"
67 #include "dbgcnt.h"
68 #include "builtins.h"
69 #include "output.h"
70 #include "tree-eh.h"
71 #include "gimple-match.h"
72 #include "tree-phinodes.h"
73 #include "ssa-iterators.h"
75 /* Return true when DECL can be referenced from current unit.
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
78 reasons:
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
85 set.
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
90 declaring the body.
91 3) COMDAT functions referred by external vtables that
92 we devirtualize only during final compilation stage.
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
95 directly. */
97 static bool
98 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
100 varpool_node *vnode;
101 struct cgraph_node *node;
102 symtab_node *snode;
104 if (DECL_ABSTRACT_P (decl))
105 return false;
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
109 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
110 return true;
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
117 if (symtab->function_flags_ready)
118 return true;
119 snode = symtab_node::get (decl);
120 if (!snode || !snode->definition)
121 return false;
122 node = dyn_cast <cgraph_node *> (snode);
123 return !node || !node->global.inlined_to;
126 /* We will later output the initializer, so we can refer to it.
127 So we are concerned only when DECL comes from initializer of
128 external var or var that has been optimized out. */
129 if (!from_decl
130 || TREE_CODE (from_decl) != VAR_DECL
131 || (!DECL_EXTERNAL (from_decl)
132 && (vnode = varpool_node::get (from_decl)) != NULL
133 && vnode->definition)
134 || (flag_ltrans
135 && (vnode = varpool_node::get (from_decl)) != NULL
136 && vnode->in_other_partition))
137 return true;
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl)
142 && DECL_EXTERNAL (decl)
143 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
144 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
145 return false;
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
150 return true;
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
155 As observed in PR20991 for already optimized out comdat virtual functions
156 it may be tempting to not necessarily give up because the copy will be
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
161 was privatized. */
162 if (!symtab->function_flags_ready)
163 return true;
165 snode = symtab_node::get (decl);
166 if (!snode
167 || ((!snode->definition || DECL_EXTERNAL (decl))
168 && (!snode->in_other_partition
169 || (!snode->forced_by_abi && !snode->force_output))))
170 return false;
171 node = dyn_cast <cgraph_node *> (snode);
172 return !node || !node->global.inlined_to;
175 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
179 tree
180 canonicalize_constructor_val (tree cval, tree from_decl)
182 tree orig_cval = cval;
183 STRIP_NOPS (cval);
184 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
187 tree ptr = TREE_OPERAND (cval, 0);
188 if (is_gimple_min_invariant (ptr))
189 cval = build1_loc (EXPR_LOCATION (cval),
190 ADDR_EXPR, TREE_TYPE (ptr),
191 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
192 ptr,
193 fold_convert (ptr_type_node,
194 TREE_OPERAND (cval, 1))));
196 if (TREE_CODE (cval) == ADDR_EXPR)
198 tree base = NULL_TREE;
199 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
201 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
202 if (base)
203 TREE_OPERAND (cval, 0) = base;
205 else
206 base = get_base_address (TREE_OPERAND (cval, 0));
207 if (!base)
208 return NULL_TREE;
210 if ((TREE_CODE (base) == VAR_DECL
211 || TREE_CODE (base) == FUNCTION_DECL)
212 && !can_refer_decl_in_current_unit_p (base, from_decl))
213 return NULL_TREE;
214 if (TREE_CODE (base) == VAR_DECL)
215 TREE_ADDRESSABLE (base) = 1;
216 else if (TREE_CODE (base) == FUNCTION_DECL)
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
221 cgraph_node::get_create (base);
223 /* Fixup types in global initializers. */
224 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
225 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
228 cval = fold_convert (TREE_TYPE (orig_cval), cval);
229 return cval;
231 if (TREE_OVERFLOW_P (cval))
232 return drop_tree_overflow (cval);
233 return orig_cval;
236 /* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
239 tree
240 get_symbol_constant_value (tree sym)
242 tree val = ctor_for_folding (sym);
243 if (val != error_mark_node)
245 if (val)
247 val = canonicalize_constructor_val (unshare_expr (val), sym);
248 if (val && is_gimple_min_invariant (val))
249 return val;
250 else
251 return NULL_TREE;
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
256 if (!val
257 && is_gimple_reg_type (TREE_TYPE (sym)))
258 return build_zero_cst (TREE_TYPE (sym));
261 return NULL_TREE;
266 /* Subroutine of fold_stmt. We perform several simplifications of the
267 memory reference tree EXPR and make sure to re-gimplify them properly
268 after propagation of constant addresses. IS_LHS is true if the
269 reference is supposed to be an lvalue. */
271 static tree
272 maybe_fold_reference (tree expr, bool is_lhs)
274 tree result;
276 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
277 || TREE_CODE (expr) == REALPART_EXPR
278 || TREE_CODE (expr) == IMAGPART_EXPR)
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
280 return fold_unary_loc (EXPR_LOCATION (expr),
281 TREE_CODE (expr),
282 TREE_TYPE (expr),
283 TREE_OPERAND (expr, 0));
284 else if (TREE_CODE (expr) == BIT_FIELD_REF
285 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
286 return fold_ternary_loc (EXPR_LOCATION (expr),
287 TREE_CODE (expr),
288 TREE_TYPE (expr),
289 TREE_OPERAND (expr, 0),
290 TREE_OPERAND (expr, 1),
291 TREE_OPERAND (expr, 2));
293 if (!is_lhs
294 && (result = fold_const_aggregate_ref (expr))
295 && is_gimple_min_invariant (result))
296 return result;
298 return NULL_TREE;
302 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
303 replacement rhs for the statement or NULL_TREE if no simplification
304 could be made. It is assumed that the operands have been previously
305 folded. */
307 static tree
308 fold_gimple_assign (gimple_stmt_iterator *si)
310 gimple stmt = gsi_stmt (*si);
311 enum tree_code subcode = gimple_assign_rhs_code (stmt);
312 location_t loc = gimple_location (stmt);
314 tree result = NULL_TREE;
316 switch (get_gimple_rhs_class (subcode))
318 case GIMPLE_SINGLE_RHS:
320 tree rhs = gimple_assign_rhs1 (stmt);
322 if (TREE_CLOBBER_P (rhs))
323 return NULL_TREE;
325 if (REFERENCE_CLASS_P (rhs))
326 return maybe_fold_reference (rhs, false);
328 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
330 tree val = OBJ_TYPE_REF_EXPR (rhs);
331 if (is_gimple_min_invariant (val))
332 return val;
333 else if (flag_devirtualize && virtual_method_call_p (rhs))
335 bool final;
336 vec <cgraph_node *>targets
337 = possible_polymorphic_call_targets (rhs, stmt, &final);
338 if (final && targets.length () <= 1 && dbg_cnt (devirt))
340 if (dump_enabled_p ())
342 location_t loc = gimple_location_safe (stmt);
343 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
344 "resolving virtual function address "
345 "reference to function %s\n",
346 targets.length () == 1
347 ? targets[0]->name ()
348 : "NULL");
350 if (targets.length () == 1)
352 val = fold_convert (TREE_TYPE (val),
353 build_fold_addr_expr_loc
354 (loc, targets[0]->decl));
355 STRIP_USELESS_TYPE_CONVERSION (val);
357 else
358 /* We can not use __builtin_unreachable here because it
359 can not have address taken. */
360 val = build_int_cst (TREE_TYPE (val), 0);
361 return val;
366 else if (TREE_CODE (rhs) == ADDR_EXPR)
368 tree ref = TREE_OPERAND (rhs, 0);
369 tree tem = maybe_fold_reference (ref, true);
370 if (tem
371 && TREE_CODE (tem) == MEM_REF
372 && integer_zerop (TREE_OPERAND (tem, 1)))
373 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
374 else if (tem)
375 result = fold_convert (TREE_TYPE (rhs),
376 build_fold_addr_expr_loc (loc, tem));
377 else if (TREE_CODE (ref) == MEM_REF
378 && integer_zerop (TREE_OPERAND (ref, 1)))
379 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
382 else if (TREE_CODE (rhs) == CONSTRUCTOR
383 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
384 && (CONSTRUCTOR_NELTS (rhs)
385 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
387 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
388 unsigned i;
389 tree val;
391 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
392 if (TREE_CODE (val) != INTEGER_CST
393 && TREE_CODE (val) != REAL_CST
394 && TREE_CODE (val) != FIXED_CST)
395 return NULL_TREE;
397 return build_vector_from_ctor (TREE_TYPE (rhs),
398 CONSTRUCTOR_ELTS (rhs));
401 else if (DECL_P (rhs))
402 return get_symbol_constant_value (rhs);
404 /* If we couldn't fold the RHS, hand over to the generic
405 fold routines. */
406 if (result == NULL_TREE)
407 result = fold (rhs);
409 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
410 that may have been added by fold, and "useless" type
411 conversions that might now be apparent due to propagation. */
412 STRIP_USELESS_TYPE_CONVERSION (result);
414 if (result != rhs && valid_gimple_rhs_p (result))
415 return result;
417 return NULL_TREE;
419 break;
421 case GIMPLE_UNARY_RHS:
422 break;
424 case GIMPLE_BINARY_RHS:
425 /* Try to canonicalize for boolean-typed X the comparisons
426 X == 0, X == 1, X != 0, and X != 1. */
427 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
428 || gimple_assign_rhs_code (stmt) == NE_EXPR)
430 tree lhs = gimple_assign_lhs (stmt);
431 tree op1 = gimple_assign_rhs1 (stmt);
432 tree op2 = gimple_assign_rhs2 (stmt);
433 tree type = TREE_TYPE (op1);
435 /* Check whether the comparison operands are of the same boolean
436 type as the result type is.
437 Check that second operand is an integer-constant with value
438 one or zero. */
439 if (TREE_CODE (op2) == INTEGER_CST
440 && (integer_zerop (op2) || integer_onep (op2))
441 && useless_type_conversion_p (TREE_TYPE (lhs), type))
443 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
444 bool is_logical_not = false;
446 /* X == 0 and X != 1 is a logical-not.of X
447 X == 1 and X != 0 is X */
448 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
449 || (cmp_code == NE_EXPR && integer_onep (op2)))
450 is_logical_not = true;
452 if (is_logical_not == false)
453 result = op1;
454 /* Only for one-bit precision typed X the transformation
455 !X -> ~X is valied. */
456 else if (TYPE_PRECISION (type) == 1)
457 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
458 type, op1);
459 /* Otherwise we use !X -> X ^ 1. */
460 else
461 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
462 type, op1, build_int_cst (type, 1));
467 if (!result)
468 result = fold_binary_loc (loc, subcode,
469 TREE_TYPE (gimple_assign_lhs (stmt)),
470 gimple_assign_rhs1 (stmt),
471 gimple_assign_rhs2 (stmt));
473 if (result)
475 STRIP_USELESS_TYPE_CONVERSION (result);
476 if (valid_gimple_rhs_p (result))
477 return result;
479 break;
481 case GIMPLE_TERNARY_RHS:
482 /* Try to fold a conditional expression. */
483 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
485 tree op0 = gimple_assign_rhs1 (stmt);
486 tree tem;
487 bool set = false;
488 location_t cond_loc = gimple_location (stmt);
490 if (COMPARISON_CLASS_P (op0))
492 fold_defer_overflow_warnings ();
493 tem = fold_binary_loc (cond_loc,
494 TREE_CODE (op0), TREE_TYPE (op0),
495 TREE_OPERAND (op0, 0),
496 TREE_OPERAND (op0, 1));
497 /* This is actually a conditional expression, not a GIMPLE
498 conditional statement, however, the valid_gimple_rhs_p
499 test still applies. */
500 set = (tem && is_gimple_condexpr (tem)
501 && valid_gimple_rhs_p (tem));
502 fold_undefer_overflow_warnings (set, stmt, 0);
504 else if (is_gimple_min_invariant (op0))
506 tem = op0;
507 set = true;
509 else
510 return NULL_TREE;
512 if (set)
513 result = fold_build3_loc (cond_loc, COND_EXPR,
514 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
515 gimple_assign_rhs2 (stmt),
516 gimple_assign_rhs3 (stmt));
519 if (!result)
520 result = fold_ternary_loc (loc, subcode,
521 TREE_TYPE (gimple_assign_lhs (stmt)),
522 gimple_assign_rhs1 (stmt),
523 gimple_assign_rhs2 (stmt),
524 gimple_assign_rhs3 (stmt));
526 if (result)
528 STRIP_USELESS_TYPE_CONVERSION (result);
529 if (valid_gimple_rhs_p (result))
530 return result;
532 break;
534 case GIMPLE_INVALID_RHS:
535 gcc_unreachable ();
538 return NULL_TREE;
541 /* Attempt to fold a conditional statement. Return true if any changes were
542 made. We only attempt to fold the condition expression, and do not perform
543 any transformation that would require alteration of the cfg. It is
544 assumed that the operands have been previously folded. */
546 static bool
547 fold_gimple_cond (gcond *stmt)
549 tree result = fold_binary_loc (gimple_location (stmt),
550 gimple_cond_code (stmt),
551 boolean_type_node,
552 gimple_cond_lhs (stmt),
553 gimple_cond_rhs (stmt));
555 if (result)
557 STRIP_USELESS_TYPE_CONVERSION (result);
558 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
560 gimple_cond_set_condition_from_tree (stmt, result);
561 return true;
565 return false;
569 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
570 adjusting the replacement stmts location and virtual operands.
571 If the statement has a lhs the last stmt in the sequence is expected
572 to assign to that lhs. */
574 static void
575 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
577 gimple stmt = gsi_stmt (*si_p);
579 if (gimple_has_location (stmt))
580 annotate_all_with_location (stmts, gimple_location (stmt));
582 /* First iterate over the replacement statements backward, assigning
583 virtual operands to their defining statements. */
584 gimple laststore = NULL;
585 for (gimple_stmt_iterator i = gsi_last (stmts);
586 !gsi_end_p (i); gsi_prev (&i))
588 gimple new_stmt = gsi_stmt (i);
589 if ((gimple_assign_single_p (new_stmt)
590 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
591 || (is_gimple_call (new_stmt)
592 && (gimple_call_flags (new_stmt)
593 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
595 tree vdef;
596 if (!laststore)
597 vdef = gimple_vdef (stmt);
598 else
599 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
600 gimple_set_vdef (new_stmt, vdef);
601 if (vdef && TREE_CODE (vdef) == SSA_NAME)
602 SSA_NAME_DEF_STMT (vdef) = new_stmt;
603 laststore = new_stmt;
607 /* Second iterate over the statements forward, assigning virtual
608 operands to their uses. */
609 tree reaching_vuse = gimple_vuse (stmt);
610 for (gimple_stmt_iterator i = gsi_start (stmts);
611 !gsi_end_p (i); gsi_next (&i))
613 gimple new_stmt = gsi_stmt (i);
614 /* If the new statement possibly has a VUSE, update it with exact SSA
615 name we know will reach this one. */
616 if (gimple_has_mem_ops (new_stmt))
617 gimple_set_vuse (new_stmt, reaching_vuse);
618 gimple_set_modified (new_stmt, true);
619 if (gimple_vdef (new_stmt))
620 reaching_vuse = gimple_vdef (new_stmt);
623 /* If the new sequence does not do a store release the virtual
624 definition of the original statement. */
625 if (reaching_vuse
626 && reaching_vuse == gimple_vuse (stmt))
628 tree vdef = gimple_vdef (stmt);
629 if (vdef
630 && TREE_CODE (vdef) == SSA_NAME)
632 unlink_stmt_vdef (stmt);
633 release_ssa_name (vdef);
637 /* Finally replace the original statement with the sequence. */
638 gsi_replace_with_seq (si_p, stmts, false);
641 /* Convert EXPR into a GIMPLE value suitable for substitution on the
642 RHS of an assignment. Insert the necessary statements before
643 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
644 is replaced. If the call is expected to produces a result, then it
645 is replaced by an assignment of the new RHS to the result variable.
646 If the result is to be ignored, then the call is replaced by a
647 GIMPLE_NOP. A proper VDEF chain is retained by making the first
648 VUSE and the last VDEF of the whole sequence be the same as the replaced
649 statement and using new SSA names for stores in between. */
651 void
652 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
654 tree lhs;
655 gimple stmt, new_stmt;
656 gimple_stmt_iterator i;
657 gimple_seq stmts = NULL;
659 stmt = gsi_stmt (*si_p);
661 gcc_assert (is_gimple_call (stmt));
663 push_gimplify_context (gimple_in_ssa_p (cfun));
665 lhs = gimple_call_lhs (stmt);
666 if (lhs == NULL_TREE)
668 gimplify_and_add (expr, &stmts);
669 /* We can end up with folding a memcpy of an empty class assignment
670 which gets optimized away by C++ gimplification. */
671 if (gimple_seq_empty_p (stmts))
673 pop_gimplify_context (NULL);
674 if (gimple_in_ssa_p (cfun))
676 unlink_stmt_vdef (stmt);
677 release_defs (stmt);
679 gsi_replace (si_p, gimple_build_nop (), true);
680 return;
683 else
685 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
686 new_stmt = gimple_build_assign (lhs, tmp);
687 i = gsi_last (stmts);
688 gsi_insert_after_without_update (&i, new_stmt,
689 GSI_CONTINUE_LINKING);
692 pop_gimplify_context (NULL);
694 gsi_replace_with_seq_vops (si_p, stmts);
698 /* Replace the call at *GSI with the gimple value VAL. */
700 static void
701 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
703 gimple stmt = gsi_stmt (*gsi);
704 tree lhs = gimple_call_lhs (stmt);
705 gimple repl;
706 if (lhs)
708 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
709 val = fold_convert (TREE_TYPE (lhs), val);
710 repl = gimple_build_assign (lhs, val);
712 else
713 repl = gimple_build_nop ();
714 tree vdef = gimple_vdef (stmt);
715 if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 unlink_stmt_vdef (stmt);
718 release_ssa_name (vdef);
720 gsi_replace (gsi, repl, true);
723 /* Replace the call at *GSI with the new call REPL and fold that
724 again. */
726 static void
727 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
729 gimple stmt = gsi_stmt (*gsi);
730 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
731 gimple_set_location (repl, gimple_location (stmt));
732 if (gimple_vdef (stmt)
733 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
735 gimple_set_vdef (repl, gimple_vdef (stmt));
736 gimple_set_vuse (repl, gimple_vuse (stmt));
737 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
739 gsi_replace (gsi, repl, true);
740 fold_stmt (gsi);
743 /* Return true if VAR is a VAR_DECL or a component thereof. */
745 static bool
746 var_decl_component_p (tree var)
748 tree inner = var;
749 while (handled_component_p (inner))
750 inner = TREE_OPERAND (inner, 0);
751 return SSA_VAR_P (inner);
754 /* Fold function call to builtin mem{{,p}cpy,move}. Return
755 NULL_TREE if no simplification can be made.
756 If ENDP is 0, return DEST (like memcpy).
757 If ENDP is 1, return DEST+LEN (like mempcpy).
758 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
759 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
760 (memmove). */
762 static bool
763 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
764 tree dest, tree src, int endp)
766 gimple stmt = gsi_stmt (*gsi);
767 tree lhs = gimple_call_lhs (stmt);
768 tree len = gimple_call_arg (stmt, 2);
769 tree destvar, srcvar;
770 location_t loc = gimple_location (stmt);
772 /* If the LEN parameter is zero, return DEST. */
773 if (integer_zerop (len))
775 gimple repl;
776 if (gimple_call_lhs (stmt))
777 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
778 else
779 repl = gimple_build_nop ();
780 tree vdef = gimple_vdef (stmt);
781 if (vdef && TREE_CODE (vdef) == SSA_NAME)
783 unlink_stmt_vdef (stmt);
784 release_ssa_name (vdef);
786 gsi_replace (gsi, repl, true);
787 return true;
790 /* If SRC and DEST are the same (and not volatile), return
791 DEST{,+LEN,+LEN-1}. */
792 if (operand_equal_p (src, dest, 0))
794 unlink_stmt_vdef (stmt);
795 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
796 release_ssa_name (gimple_vdef (stmt));
797 if (!lhs)
799 gsi_replace (gsi, gimple_build_nop (), true);
800 return true;
802 goto done;
804 else
806 tree srctype, desttype;
807 unsigned int src_align, dest_align;
808 tree off0;
810 /* Build accesses at offset zero with a ref-all character type. */
811 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
812 ptr_mode, true), 0);
814 /* If we can perform the copy efficiently with first doing all loads
815 and then all stores inline it that way. Currently efficiently
816 means that we can load all the memory into a single integer
817 register which is what MOVE_MAX gives us. */
818 src_align = get_pointer_alignment (src);
819 dest_align = get_pointer_alignment (dest);
820 if (tree_fits_uhwi_p (len)
821 && compare_tree_int (len, MOVE_MAX) <= 0
822 /* ??? Don't transform copies from strings with known length this
823 confuses the tree-ssa-strlen.c. This doesn't handle
824 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
825 reason. */
826 && !c_strlen (src, 2))
828 unsigned ilen = tree_to_uhwi (len);
829 if (exact_log2 (ilen) != -1)
831 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
832 if (type
833 && TYPE_MODE (type) != BLKmode
834 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
835 == ilen * 8)
836 /* If the destination pointer is not aligned we must be able
837 to emit an unaligned store. */
838 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
839 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
841 tree srctype = type;
842 tree desttype = type;
843 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
844 srctype = build_aligned_type (type, src_align);
845 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
846 tree tem = fold_const_aggregate_ref (srcmem);
847 if (tem)
848 srcmem = tem;
849 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
850 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
851 src_align))
852 srcmem = NULL_TREE;
853 if (srcmem)
855 gimple new_stmt;
856 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
858 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
859 if (gimple_in_ssa_p (cfun))
860 srcmem = make_ssa_name (TREE_TYPE (srcmem),
861 new_stmt);
862 else
863 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
864 gimple_assign_set_lhs (new_stmt, srcmem);
865 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
866 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
868 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
869 desttype = build_aligned_type (type, dest_align);
870 new_stmt
871 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
872 dest, off0),
873 srcmem);
874 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
875 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
876 if (gimple_vdef (new_stmt)
877 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
878 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
879 if (!lhs)
881 gsi_replace (gsi, new_stmt, true);
882 return true;
884 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
885 goto done;
891 if (endp == 3)
893 /* Both DEST and SRC must be pointer types.
894 ??? This is what old code did. Is the testing for pointer types
895 really mandatory?
897 If either SRC is readonly or length is 1, we can use memcpy. */
898 if (!dest_align || !src_align)
899 return false;
900 if (readonly_data_expr (src)
901 || (tree_fits_uhwi_p (len)
902 && (MIN (src_align, dest_align) / BITS_PER_UNIT
903 >= tree_to_uhwi (len))))
905 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
906 if (!fn)
907 return false;
908 gimple_call_set_fndecl (stmt, fn);
909 gimple_call_set_arg (stmt, 0, dest);
910 gimple_call_set_arg (stmt, 1, src);
911 fold_stmt (gsi);
912 return true;
915 /* If *src and *dest can't overlap, optimize into memcpy as well. */
916 if (TREE_CODE (src) == ADDR_EXPR
917 && TREE_CODE (dest) == ADDR_EXPR)
919 tree src_base, dest_base, fn;
920 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
921 HOST_WIDE_INT size = -1;
922 HOST_WIDE_INT maxsize = -1;
924 srcvar = TREE_OPERAND (src, 0);
925 src_base = get_ref_base_and_extent (srcvar, &src_offset,
926 &size, &maxsize);
927 destvar = TREE_OPERAND (dest, 0);
928 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
929 &size, &maxsize);
930 if (tree_fits_uhwi_p (len))
931 maxsize = tree_to_uhwi (len);
932 else
933 maxsize = -1;
934 src_offset /= BITS_PER_UNIT;
935 dest_offset /= BITS_PER_UNIT;
936 if (SSA_VAR_P (src_base)
937 && SSA_VAR_P (dest_base))
939 if (operand_equal_p (src_base, dest_base, 0)
940 && ranges_overlap_p (src_offset, maxsize,
941 dest_offset, maxsize))
942 return false;
944 else if (TREE_CODE (src_base) == MEM_REF
945 && TREE_CODE (dest_base) == MEM_REF)
947 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
948 TREE_OPERAND (dest_base, 0), 0))
949 return false;
950 offset_int off = mem_ref_offset (src_base) + src_offset;
951 if (!wi::fits_shwi_p (off))
952 return false;
953 src_offset = off.to_shwi ();
955 off = mem_ref_offset (dest_base) + dest_offset;
956 if (!wi::fits_shwi_p (off))
957 return false;
958 dest_offset = off.to_shwi ();
959 if (ranges_overlap_p (src_offset, maxsize,
960 dest_offset, maxsize))
961 return false;
963 else
964 return false;
966 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
967 if (!fn)
968 return false;
969 gimple_call_set_fndecl (stmt, fn);
970 gimple_call_set_arg (stmt, 0, dest);
971 gimple_call_set_arg (stmt, 1, src);
972 fold_stmt (gsi);
973 return true;
976 /* If the destination and source do not alias optimize into
977 memcpy as well. */
978 if ((is_gimple_min_invariant (dest)
979 || TREE_CODE (dest) == SSA_NAME)
980 && (is_gimple_min_invariant (src)
981 || TREE_CODE (src) == SSA_NAME))
983 ao_ref destr, srcr;
984 ao_ref_init_from_ptr_and_size (&destr, dest, len);
985 ao_ref_init_from_ptr_and_size (&srcr, src, len);
986 if (!refs_may_alias_p_1 (&destr, &srcr, false))
988 tree fn;
989 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
990 if (!fn)
991 return false;
992 gimple_call_set_fndecl (stmt, fn);
993 gimple_call_set_arg (stmt, 0, dest);
994 gimple_call_set_arg (stmt, 1, src);
995 fold_stmt (gsi);
996 return true;
1000 return false;
1003 if (!tree_fits_shwi_p (len))
1004 return false;
1005 /* FIXME:
1006 This logic lose for arguments like (type *)malloc (sizeof (type)),
1007 since we strip the casts of up to VOID return value from malloc.
1008 Perhaps we ought to inherit type from non-VOID argument here? */
1009 STRIP_NOPS (src);
1010 STRIP_NOPS (dest);
1011 if (!POINTER_TYPE_P (TREE_TYPE (src))
1012 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1013 return false;
1014 /* In the following try to find a type that is most natural to be
1015 used for the memcpy source and destination and that allows
1016 the most optimization when memcpy is turned into a plain assignment
1017 using that type. In theory we could always use a char[len] type
1018 but that only gains us that the destination and source possibly
1019 no longer will have their address taken. */
1020 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1021 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1023 tree tem = TREE_OPERAND (src, 0);
1024 STRIP_NOPS (tem);
1025 if (tem != TREE_OPERAND (src, 0))
1026 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1028 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1030 tree tem = TREE_OPERAND (dest, 0);
1031 STRIP_NOPS (tem);
1032 if (tem != TREE_OPERAND (dest, 0))
1033 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1035 srctype = TREE_TYPE (TREE_TYPE (src));
1036 if (TREE_CODE (srctype) == ARRAY_TYPE
1037 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1039 srctype = TREE_TYPE (srctype);
1040 STRIP_NOPS (src);
1041 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1043 desttype = TREE_TYPE (TREE_TYPE (dest));
1044 if (TREE_CODE (desttype) == ARRAY_TYPE
1045 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1047 desttype = TREE_TYPE (desttype);
1048 STRIP_NOPS (dest);
1049 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1051 if (TREE_ADDRESSABLE (srctype)
1052 || TREE_ADDRESSABLE (desttype))
1053 return false;
1055 /* Make sure we are not copying using a floating-point mode or
1056 a type whose size possibly does not match its precision. */
1057 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1058 || TREE_CODE (desttype) == BOOLEAN_TYPE
1059 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1060 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1061 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1062 || TREE_CODE (srctype) == BOOLEAN_TYPE
1063 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1064 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1065 if (!srctype)
1066 srctype = desttype;
1067 if (!desttype)
1068 desttype = srctype;
1069 if (!srctype)
1070 return false;
1072 src_align = get_pointer_alignment (src);
1073 dest_align = get_pointer_alignment (dest);
1074 if (dest_align < TYPE_ALIGN (desttype)
1075 || src_align < TYPE_ALIGN (srctype))
1076 return false;
1078 destvar = dest;
1079 STRIP_NOPS (destvar);
1080 if (TREE_CODE (destvar) == ADDR_EXPR
1081 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1082 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1083 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1084 else
1085 destvar = NULL_TREE;
1087 srcvar = src;
1088 STRIP_NOPS (srcvar);
1089 if (TREE_CODE (srcvar) == ADDR_EXPR
1090 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1091 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1093 if (!destvar
1094 || src_align >= TYPE_ALIGN (desttype))
1095 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1096 srcvar, off0);
1097 else if (!STRICT_ALIGNMENT)
1099 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1100 src_align);
1101 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1103 else
1104 srcvar = NULL_TREE;
1106 else
1107 srcvar = NULL_TREE;
1109 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1110 return false;
1112 if (srcvar == NULL_TREE)
1114 STRIP_NOPS (src);
1115 if (src_align >= TYPE_ALIGN (desttype))
1116 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1117 else
1119 if (STRICT_ALIGNMENT)
1120 return false;
1121 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1122 src_align);
1123 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1126 else if (destvar == NULL_TREE)
1128 STRIP_NOPS (dest);
1129 if (dest_align >= TYPE_ALIGN (srctype))
1130 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1131 else
1133 if (STRICT_ALIGNMENT)
1134 return false;
1135 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1136 dest_align);
1137 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1141 gimple new_stmt;
1142 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1144 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1145 if (gimple_in_ssa_p (cfun))
1146 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1147 else
1148 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1149 gimple_assign_set_lhs (new_stmt, srcvar);
1150 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1151 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1153 new_stmt = gimple_build_assign (destvar, srcvar);
1154 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1155 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1156 if (gimple_vdef (new_stmt)
1157 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1158 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1159 if (!lhs)
1161 gsi_replace (gsi, new_stmt, true);
1162 return true;
1164 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1167 done:
1168 if (endp == 0 || endp == 3)
1169 len = NULL_TREE;
1170 else if (endp == 2)
1171 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1172 ssize_int (1));
1173 if (endp == 2 || endp == 1)
1174 dest = fold_build_pointer_plus_loc (loc, dest, len);
1176 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1177 GSI_SAME_STMT);
1178 gimple repl = gimple_build_assign (lhs, dest);
1179 gsi_replace (gsi, repl, true);
1180 return true;
1183 /* Fold function call to builtin memset or bzero at *GSI setting the
1184 memory of size LEN to VAL. Return whether a simplification was made. */
1186 static bool
1187 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1189 gimple stmt = gsi_stmt (*gsi);
1190 tree etype;
1191 unsigned HOST_WIDE_INT length, cval;
1193 /* If the LEN parameter is zero, return DEST. */
1194 if (integer_zerop (len))
1196 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1197 return true;
1200 if (! tree_fits_uhwi_p (len))
1201 return false;
1203 if (TREE_CODE (c) != INTEGER_CST)
1204 return false;
1206 tree dest = gimple_call_arg (stmt, 0);
1207 tree var = dest;
1208 if (TREE_CODE (var) != ADDR_EXPR)
1209 return false;
1211 var = TREE_OPERAND (var, 0);
1212 if (TREE_THIS_VOLATILE (var))
1213 return false;
1215 etype = TREE_TYPE (var);
1216 if (TREE_CODE (etype) == ARRAY_TYPE)
1217 etype = TREE_TYPE (etype);
1219 if (!INTEGRAL_TYPE_P (etype)
1220 && !POINTER_TYPE_P (etype))
1221 return NULL_TREE;
1223 if (! var_decl_component_p (var))
1224 return NULL_TREE;
1226 length = tree_to_uhwi (len);
1227 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1228 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1229 return NULL_TREE;
1231 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1232 return NULL_TREE;
1234 if (integer_zerop (c))
1235 cval = 0;
1236 else
1238 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1239 return NULL_TREE;
1241 cval = TREE_INT_CST_LOW (c);
1242 cval &= 0xff;
1243 cval |= cval << 8;
1244 cval |= cval << 16;
1245 cval |= (cval << 31) << 1;
1248 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1249 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1250 gimple_set_vuse (store, gimple_vuse (stmt));
1251 tree vdef = gimple_vdef (stmt);
1252 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1254 gimple_set_vdef (store, gimple_vdef (stmt));
1255 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1257 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1258 if (gimple_call_lhs (stmt))
1260 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1261 gsi_replace (gsi, asgn, true);
1263 else
1265 gimple_stmt_iterator gsi2 = *gsi;
1266 gsi_prev (gsi);
1267 gsi_remove (&gsi2, true);
1270 return true;
1274 /* Return the string length, maximum string length or maximum value of
1275 ARG in LENGTH.
1276 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1277 is not NULL and, for TYPE == 0, its value is not equal to the length
1278 we determine or if we are unable to determine the length or value,
1279 return false. VISITED is a bitmap of visited variables.
1280 TYPE is 0 if string length should be returned, 1 for maximum string
1281 length and 2 for maximum value ARG can have. */
1283 static bool
1284 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1286 tree var, val;
1287 gimple def_stmt;
1289 if (TREE_CODE (arg) != SSA_NAME)
1291 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1292 if (TREE_CODE (arg) == ADDR_EXPR
1293 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1294 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1296 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1297 if (TREE_CODE (aop0) == INDIRECT_REF
1298 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1299 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1300 length, visited, type);
1303 if (type == 2)
1305 val = arg;
1306 if (TREE_CODE (val) != INTEGER_CST
1307 || tree_int_cst_sgn (val) < 0)
1308 return false;
1310 else
1311 val = c_strlen (arg, 1);
1312 if (!val)
1313 return false;
1315 if (*length)
1317 if (type > 0)
1319 if (TREE_CODE (*length) != INTEGER_CST
1320 || TREE_CODE (val) != INTEGER_CST)
1321 return false;
1323 if (tree_int_cst_lt (*length, val))
1324 *length = val;
1325 return true;
1327 else if (simple_cst_equal (val, *length) != 1)
1328 return false;
1331 *length = val;
1332 return true;
1335 /* If ARG is registered for SSA update we cannot look at its defining
1336 statement. */
1337 if (name_registered_for_update_p (arg))
1338 return false;
1340 /* If we were already here, break the infinite cycle. */
1341 if (!*visited)
1342 *visited = BITMAP_ALLOC (NULL);
1343 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1344 return true;
1346 var = arg;
1347 def_stmt = SSA_NAME_DEF_STMT (var);
1349 switch (gimple_code (def_stmt))
1351 case GIMPLE_ASSIGN:
1352 /* The RHS of the statement defining VAR must either have a
1353 constant length or come from another SSA_NAME with a constant
1354 length. */
1355 if (gimple_assign_single_p (def_stmt)
1356 || gimple_assign_unary_nop_p (def_stmt))
1358 tree rhs = gimple_assign_rhs1 (def_stmt);
1359 return get_maxval_strlen (rhs, length, visited, type);
1361 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1363 tree op2 = gimple_assign_rhs2 (def_stmt);
1364 tree op3 = gimple_assign_rhs3 (def_stmt);
1365 return get_maxval_strlen (op2, length, visited, type)
1366 && get_maxval_strlen (op3, length, visited, type);
1368 return false;
1370 case GIMPLE_PHI:
1372 /* All the arguments of the PHI node must have the same constant
1373 length. */
1374 unsigned i;
1376 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1378 tree arg = gimple_phi_arg (def_stmt, i)->def;
1380 /* If this PHI has itself as an argument, we cannot
1381 determine the string length of this argument. However,
1382 if we can find a constant string length for the other
1383 PHI args then we can still be sure that this is a
1384 constant string length. So be optimistic and just
1385 continue with the next argument. */
1386 if (arg == gimple_phi_result (def_stmt))
1387 continue;
1389 if (!get_maxval_strlen (arg, length, visited, type))
1390 return false;
1393 return true;
1395 default:
1396 return false;
1400 tree
1401 get_maxval_strlen (tree arg, int type)
1403 bitmap visited = NULL;
1404 tree len = NULL_TREE;
1405 if (!get_maxval_strlen (arg, &len, &visited, type))
1406 len = NULL_TREE;
1407 if (visited)
1408 BITMAP_FREE (visited);
1410 return len;
1414 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1415 If LEN is not NULL, it represents the length of the string to be
1416 copied. Return NULL_TREE if no simplification can be made. */
1418 static bool
1419 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1420 tree dest, tree src)
1422 location_t loc = gimple_location (gsi_stmt (*gsi));
1423 tree fn;
1425 /* If SRC and DEST are the same (and not volatile), return DEST. */
1426 if (operand_equal_p (src, dest, 0))
1428 replace_call_with_value (gsi, dest);
1429 return true;
1432 if (optimize_function_for_size_p (cfun))
1433 return false;
1435 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1436 if (!fn)
1437 return false;
1439 tree len = get_maxval_strlen (src, 0);
1440 if (!len)
1441 return false;
1443 len = fold_convert_loc (loc, size_type_node, len);
1444 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1445 len = force_gimple_operand_gsi (gsi, len, true,
1446 NULL_TREE, true, GSI_SAME_STMT);
1447 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1448 replace_call_with_call_and_fold (gsi, repl);
1449 return true;
1452 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1453 If SLEN is not NULL, it represents the length of the source string.
1454 Return NULL_TREE if no simplification can be made. */
1456 static bool
1457 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1458 tree dest, tree src, tree len)
1460 location_t loc = gimple_location (gsi_stmt (*gsi));
1461 tree fn;
1463 /* If the LEN parameter is zero, return DEST. */
1464 if (integer_zerop (len))
1466 replace_call_with_value (gsi, dest);
1467 return true;
1470 /* We can't compare slen with len as constants below if len is not a
1471 constant. */
1472 if (TREE_CODE (len) != INTEGER_CST)
1473 return false;
1475 /* Now, we must be passed a constant src ptr parameter. */
1476 tree slen = get_maxval_strlen (src, 0);
1477 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1478 return false;
1480 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1482 /* We do not support simplification of this case, though we do
1483 support it when expanding trees into RTL. */
1484 /* FIXME: generate a call to __builtin_memset. */
1485 if (tree_int_cst_lt (slen, len))
1486 return false;
1488 /* OK transform into builtin memcpy. */
1489 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1490 if (!fn)
1491 return false;
1493 len = fold_convert_loc (loc, size_type_node, len);
1494 len = force_gimple_operand_gsi (gsi, len, true,
1495 NULL_TREE, true, GSI_SAME_STMT);
1496 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1497 replace_call_with_call_and_fold (gsi, repl);
1498 return true;
1501 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1502 to the call.
1504 Return NULL_TREE if no simplification was possible, otherwise return the
1505 simplified form of the call as a tree.
1507 The simplified form may be a constant or other expression which
1508 computes the same value, but in a more efficient manner (including
1509 calls to other builtin functions).
1511 The call may contain arguments which need to be evaluated, but
1512 which are not useful to determine the result of the call. In
1513 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1514 COMPOUND_EXPR will be an argument which must be evaluated.
1515 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1516 COMPOUND_EXPR in the chain will contain the tree for the simplified
1517 form of the builtin function call. */
1519 static bool
1520 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1522 gimple stmt = gsi_stmt (*gsi);
1523 location_t loc = gimple_location (stmt);
1525 const char *p = c_getstr (src);
1527 /* If the string length is zero, return the dst parameter. */
1528 if (p && *p == '\0')
1530 replace_call_with_value (gsi, dst);
1531 return true;
1534 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1535 return false;
1537 /* See if we can store by pieces into (dst + strlen(dst)). */
1538 tree newdst;
1539 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1540 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1542 if (!strlen_fn || !memcpy_fn)
1543 return false;
1545 /* If the length of the source string isn't computable don't
1546 split strcat into strlen and memcpy. */
1547 tree len = get_maxval_strlen (src, 0);
1548 if (! len)
1549 return false;
1551 /* Create strlen (dst). */
1552 gimple_seq stmts = NULL, stmts2;
1553 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1554 gimple_set_location (repl, loc);
1555 if (gimple_in_ssa_p (cfun))
1556 newdst = make_ssa_name (size_type_node);
1557 else
1558 newdst = create_tmp_reg (size_type_node);
1559 gimple_call_set_lhs (repl, newdst);
1560 gimple_seq_add_stmt_without_update (&stmts, repl);
1562 /* Create (dst p+ strlen (dst)). */
1563 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1564 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1565 gimple_seq_add_seq_without_update (&stmts, stmts2);
1567 len = fold_convert_loc (loc, size_type_node, len);
1568 len = size_binop_loc (loc, PLUS_EXPR, len,
1569 build_int_cst (size_type_node, 1));
1570 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1571 gimple_seq_add_seq_without_update (&stmts, stmts2);
1573 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1574 gimple_seq_add_stmt_without_update (&stmts, repl);
1575 if (gimple_call_lhs (stmt))
1577 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1578 gimple_seq_add_stmt_without_update (&stmts, repl);
1579 gsi_replace_with_seq_vops (gsi, stmts);
1580 /* gsi now points at the assignment to the lhs, get a
1581 stmt iterator to the memcpy call.
1582 ??? We can't use gsi_for_stmt as that doesn't work when the
1583 CFG isn't built yet. */
1584 gimple_stmt_iterator gsi2 = *gsi;
1585 gsi_prev (&gsi2);
1586 fold_stmt (&gsi2);
1588 else
1590 gsi_replace_with_seq_vops (gsi, stmts);
1591 fold_stmt (gsi);
1593 return true;
1596 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1597 are the arguments to the call. */
1599 static bool
1600 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1602 gimple stmt = gsi_stmt (*gsi);
1603 tree dest = gimple_call_arg (stmt, 0);
1604 tree src = gimple_call_arg (stmt, 1);
1605 tree size = gimple_call_arg (stmt, 2);
1606 tree fn;
1607 const char *p;
1610 p = c_getstr (src);
1611 /* If the SRC parameter is "", return DEST. */
1612 if (p && *p == '\0')
1614 replace_call_with_value (gsi, dest);
1615 return true;
1618 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1619 return false;
1621 /* If __builtin_strcat_chk is used, assume strcat is available. */
1622 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1623 if (!fn)
1624 return false;
1626 gimple repl = gimple_build_call (fn, 2, dest, src);
1627 replace_call_with_call_and_fold (gsi, repl);
1628 return true;
1631 /* Simplify a call to the strncat builtin. */
1633 static bool
1634 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1636 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1637 tree dst = gimple_call_arg (stmt, 0);
1638 tree src = gimple_call_arg (stmt, 1);
1639 tree len = gimple_call_arg (stmt, 2);
1641 const char *p = c_getstr (src);
1643 /* If the requested length is zero, or the src parameter string
1644 length is zero, return the dst parameter. */
1645 if (integer_zerop (len) || (p && *p == '\0'))
1647 replace_call_with_value (gsi, dst);
1648 return true;
1651 /* If the requested len is greater than or equal to the string
1652 length, call strcat. */
1653 if (TREE_CODE (len) == INTEGER_CST && p
1654 && compare_tree_int (len, strlen (p)) >= 0)
1656 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1658 /* If the replacement _DECL isn't initialized, don't do the
1659 transformation. */
1660 if (!fn)
1661 return false;
1663 gcall *repl = gimple_build_call (fn, 2, dst, src);
1664 replace_call_with_call_and_fold (gsi, repl);
1665 return true;
1668 return false;
1671 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1672 LEN, and SIZE. */
1674 static bool
1675 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1677 gimple stmt = gsi_stmt (*gsi);
1678 tree dest = gimple_call_arg (stmt, 0);
1679 tree src = gimple_call_arg (stmt, 1);
1680 tree len = gimple_call_arg (stmt, 2);
1681 tree size = gimple_call_arg (stmt, 3);
1682 tree fn;
1683 const char *p;
1685 p = c_getstr (src);
1686 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1687 if ((p && *p == '\0')
1688 || integer_zerop (len))
1690 replace_call_with_value (gsi, dest);
1691 return true;
1694 if (! tree_fits_uhwi_p (size))
1695 return false;
1697 if (! integer_all_onesp (size))
1699 tree src_len = c_strlen (src, 1);
1700 if (src_len
1701 && tree_fits_uhwi_p (src_len)
1702 && tree_fits_uhwi_p (len)
1703 && ! tree_int_cst_lt (len, src_len))
1705 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1706 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1707 if (!fn)
1708 return false;
1710 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1711 replace_call_with_call_and_fold (gsi, repl);
1712 return true;
1714 return false;
1717 /* If __builtin_strncat_chk is used, assume strncat is available. */
1718 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1719 if (!fn)
1720 return false;
1722 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1723 replace_call_with_call_and_fold (gsi, repl);
1724 return true;
1727 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1728 to the call. IGNORE is true if the value returned
1729 by the builtin will be ignored. UNLOCKED is true is true if this
1730 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1731 the known length of the string. Return NULL_TREE if no simplification
1732 was possible. */
1734 static bool
1735 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1736 tree arg0, tree arg1,
1737 bool unlocked)
1739 gimple stmt = gsi_stmt (*gsi);
1741 /* If we're using an unlocked function, assume the other unlocked
1742 functions exist explicitly. */
1743 tree const fn_fputc = (unlocked
1744 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1745 : builtin_decl_implicit (BUILT_IN_FPUTC));
1746 tree const fn_fwrite = (unlocked
1747 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1748 : builtin_decl_implicit (BUILT_IN_FWRITE));
1750 /* If the return value is used, don't do the transformation. */
1751 if (gimple_call_lhs (stmt))
1752 return false;
1754 /* Get the length of the string passed to fputs. If the length
1755 can't be determined, punt. */
1756 tree len = get_maxval_strlen (arg0, 0);
1757 if (!len
1758 || TREE_CODE (len) != INTEGER_CST)
1759 return false;
1761 switch (compare_tree_int (len, 1))
1763 case -1: /* length is 0, delete the call entirely . */
1764 replace_call_with_value (gsi, integer_zero_node);
1765 return true;
1767 case 0: /* length is 1, call fputc. */
1769 const char *p = c_getstr (arg0);
1770 if (p != NULL)
1772 if (!fn_fputc)
1773 return false;
1775 gimple repl = gimple_build_call (fn_fputc, 2,
1776 build_int_cst
1777 (integer_type_node, p[0]), arg1);
1778 replace_call_with_call_and_fold (gsi, repl);
1779 return true;
1782 /* FALLTHROUGH */
1783 case 1: /* length is greater than 1, call fwrite. */
1785 /* If optimizing for size keep fputs. */
1786 if (optimize_function_for_size_p (cfun))
1787 return false;
1788 /* New argument list transforming fputs(string, stream) to
1789 fwrite(string, 1, len, stream). */
1790 if (!fn_fwrite)
1791 return false;
1793 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1794 size_one_node, len, arg1);
1795 replace_call_with_call_and_fold (gsi, repl);
1796 return true;
1798 default:
1799 gcc_unreachable ();
1801 return false;
1804 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1805 DEST, SRC, LEN, and SIZE are the arguments to the call.
1806 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1807 code of the builtin. If MAXLEN is not NULL, it is maximum length
1808 passed as third argument. */
1810 static bool
1811 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1812 tree dest, tree src, tree len, tree size,
1813 enum built_in_function fcode)
1815 gimple stmt = gsi_stmt (*gsi);
1816 location_t loc = gimple_location (stmt);
1817 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1818 tree fn;
1820 /* If SRC and DEST are the same (and not volatile), return DEST
1821 (resp. DEST+LEN for __mempcpy_chk). */
1822 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1824 if (fcode != BUILT_IN_MEMPCPY_CHK)
1826 replace_call_with_value (gsi, dest);
1827 return true;
1829 else
1831 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1832 temp = force_gimple_operand_gsi (gsi, temp,
1833 false, NULL_TREE, true,
1834 GSI_SAME_STMT);
1835 replace_call_with_value (gsi, temp);
1836 return true;
1840 if (! tree_fits_uhwi_p (size))
1841 return false;
1843 tree maxlen = get_maxval_strlen (len, 2);
1844 if (! integer_all_onesp (size))
1846 if (! tree_fits_uhwi_p (len))
1848 /* If LEN is not constant, try MAXLEN too.
1849 For MAXLEN only allow optimizing into non-_ocs function
1850 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1851 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1853 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1855 /* (void) __mempcpy_chk () can be optimized into
1856 (void) __memcpy_chk (). */
1857 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1858 if (!fn)
1859 return false;
1861 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1862 replace_call_with_call_and_fold (gsi, repl);
1863 return true;
1865 return false;
1868 else
1869 maxlen = len;
1871 if (tree_int_cst_lt (size, maxlen))
1872 return false;
1875 fn = NULL_TREE;
1876 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1877 mem{cpy,pcpy,move,set} is available. */
1878 switch (fcode)
1880 case BUILT_IN_MEMCPY_CHK:
1881 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1882 break;
1883 case BUILT_IN_MEMPCPY_CHK:
1884 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1885 break;
1886 case BUILT_IN_MEMMOVE_CHK:
1887 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1888 break;
1889 case BUILT_IN_MEMSET_CHK:
1890 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1891 break;
1892 default:
1893 break;
1896 if (!fn)
1897 return false;
1899 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1900 replace_call_with_call_and_fold (gsi, repl);
1901 return true;
1904 /* Fold a call to the __st[rp]cpy_chk builtin.
1905 DEST, SRC, and SIZE are the arguments to the call.
1906 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1907 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1908 strings passed as second argument. */
1910 static bool
1911 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1912 tree dest,
1913 tree src, tree size,
1914 enum built_in_function fcode)
1916 gimple stmt = gsi_stmt (*gsi);
1917 location_t loc = gimple_location (stmt);
1918 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1919 tree len, fn;
1921 /* If SRC and DEST are the same (and not volatile), return DEST. */
1922 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1924 replace_call_with_value (gsi, dest);
1925 return true;
1928 if (! tree_fits_uhwi_p (size))
1929 return false;
1931 tree maxlen = get_maxval_strlen (src, 1);
1932 if (! integer_all_onesp (size))
1934 len = c_strlen (src, 1);
1935 if (! len || ! tree_fits_uhwi_p (len))
1937 /* If LEN is not constant, try MAXLEN too.
1938 For MAXLEN only allow optimizing into non-_ocs function
1939 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1940 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1942 if (fcode == BUILT_IN_STPCPY_CHK)
1944 if (! ignore)
1945 return false;
1947 /* If return value of __stpcpy_chk is ignored,
1948 optimize into __strcpy_chk. */
1949 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1950 if (!fn)
1951 return false;
1953 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1954 replace_call_with_call_and_fold (gsi, repl);
1955 return true;
1958 if (! len || TREE_SIDE_EFFECTS (len))
1959 return false;
1961 /* If c_strlen returned something, but not a constant,
1962 transform __strcpy_chk into __memcpy_chk. */
1963 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1964 if (!fn)
1965 return false;
1967 len = fold_convert_loc (loc, size_type_node, len);
1968 len = size_binop_loc (loc, PLUS_EXPR, len,
1969 build_int_cst (size_type_node, 1));
1970 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1971 true, GSI_SAME_STMT);
1972 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1973 replace_call_with_call_and_fold (gsi, repl);
1974 return true;
1977 else
1978 maxlen = len;
1980 if (! tree_int_cst_lt (maxlen, size))
1981 return false;
1984 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1985 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1986 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1987 if (!fn)
1988 return false;
1990 gimple repl = gimple_build_call (fn, 2, dest, src);
1991 replace_call_with_call_and_fold (gsi, repl);
1992 return true;
1995 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1996 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1997 length passed as third argument. IGNORE is true if return value can be
1998 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2000 static bool
2001 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2002 tree dest, tree src,
2003 tree len, tree size,
2004 enum built_in_function fcode)
2006 gimple stmt = gsi_stmt (*gsi);
2007 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2008 tree fn;
2010 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2012 /* If return value of __stpncpy_chk is ignored,
2013 optimize into __strncpy_chk. */
2014 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2015 if (fn)
2017 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2018 replace_call_with_call_and_fold (gsi, repl);
2019 return true;
2023 if (! tree_fits_uhwi_p (size))
2024 return false;
2026 tree maxlen = get_maxval_strlen (len, 2);
2027 if (! integer_all_onesp (size))
2029 if (! tree_fits_uhwi_p (len))
2031 /* If LEN is not constant, try MAXLEN too.
2032 For MAXLEN only allow optimizing into non-_ocs function
2033 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2034 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2035 return false;
2037 else
2038 maxlen = len;
2040 if (tree_int_cst_lt (size, maxlen))
2041 return false;
2044 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2045 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2046 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2047 if (!fn)
2048 return false;
2050 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2051 replace_call_with_call_and_fold (gsi, repl);
2052 return true;
2055 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2056 Return NULL_TREE if no simplification can be made. */
2058 static bool
2059 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2061 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2062 location_t loc = gimple_location (stmt);
2063 tree dest = gimple_call_arg (stmt, 0);
2064 tree src = gimple_call_arg (stmt, 1);
2065 tree fn, len, lenp1;
2067 /* If the result is unused, replace stpcpy with strcpy. */
2068 if (gimple_call_lhs (stmt) == NULL_TREE)
2070 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2071 if (!fn)
2072 return false;
2073 gimple_call_set_fndecl (stmt, fn);
2074 fold_stmt (gsi);
2075 return true;
2078 len = c_strlen (src, 1);
2079 if (!len
2080 || TREE_CODE (len) != INTEGER_CST)
2081 return false;
2083 if (optimize_function_for_size_p (cfun)
2084 /* If length is zero it's small enough. */
2085 && !integer_zerop (len))
2086 return false;
2088 /* If the source has a known length replace stpcpy with memcpy. */
2089 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2090 if (!fn)
2091 return false;
2093 gimple_seq stmts = NULL;
2094 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2095 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2096 tem, build_int_cst (size_type_node, 1));
2097 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2098 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2099 gimple_set_vuse (repl, gimple_vuse (stmt));
2100 gimple_set_vdef (repl, gimple_vdef (stmt));
2101 if (gimple_vdef (repl)
2102 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2103 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2104 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2105 /* Replace the result with dest + len. */
2106 stmts = NULL;
2107 tem = gimple_convert (&stmts, loc, sizetype, len);
2108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2109 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2110 POINTER_PLUS_EXPR, dest, tem);
2111 gsi_replace (gsi, ret, true);
2112 /* Finally fold the memcpy call. */
2113 gimple_stmt_iterator gsi2 = *gsi;
2114 gsi_prev (&gsi2);
2115 fold_stmt (&gsi2);
2116 return true;
2119 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2120 NULL_TREE if a normal call should be emitted rather than expanding
2121 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2122 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2123 passed as second argument. */
2125 static bool
2126 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2127 enum built_in_function fcode)
2129 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2130 tree dest, size, len, fn, fmt, flag;
2131 const char *fmt_str;
2133 /* Verify the required arguments in the original call. */
2134 if (gimple_call_num_args (stmt) < 5)
2135 return false;
2137 dest = gimple_call_arg (stmt, 0);
2138 len = gimple_call_arg (stmt, 1);
2139 flag = gimple_call_arg (stmt, 2);
2140 size = gimple_call_arg (stmt, 3);
2141 fmt = gimple_call_arg (stmt, 4);
2143 if (! tree_fits_uhwi_p (size))
2144 return false;
2146 if (! integer_all_onesp (size))
2148 tree maxlen = get_maxval_strlen (len, 2);
2149 if (! tree_fits_uhwi_p (len))
2151 /* If LEN is not constant, try MAXLEN too.
2152 For MAXLEN only allow optimizing into non-_ocs function
2153 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2154 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2155 return false;
2157 else
2158 maxlen = len;
2160 if (tree_int_cst_lt (size, maxlen))
2161 return false;
2164 if (!init_target_chars ())
2165 return false;
2167 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2168 or if format doesn't contain % chars or is "%s". */
2169 if (! integer_zerop (flag))
2171 fmt_str = c_getstr (fmt);
2172 if (fmt_str == NULL)
2173 return false;
2174 if (strchr (fmt_str, target_percent) != NULL
2175 && strcmp (fmt_str, target_percent_s))
2176 return false;
2179 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2180 available. */
2181 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2182 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2183 if (!fn)
2184 return false;
2186 /* Replace the called function and the first 5 argument by 3 retaining
2187 trailing varargs. */
2188 gimple_call_set_fndecl (stmt, fn);
2189 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2190 gimple_call_set_arg (stmt, 0, dest);
2191 gimple_call_set_arg (stmt, 1, len);
2192 gimple_call_set_arg (stmt, 2, fmt);
2193 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2194 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2195 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2196 fold_stmt (gsi);
2197 return true;
2200 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2201 Return NULL_TREE if a normal call should be emitted rather than
2202 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2203 or BUILT_IN_VSPRINTF_CHK. */
2205 static bool
2206 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2207 enum built_in_function fcode)
2209 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2210 tree dest, size, len, fn, fmt, flag;
2211 const char *fmt_str;
2212 unsigned nargs = gimple_call_num_args (stmt);
2214 /* Verify the required arguments in the original call. */
2215 if (nargs < 4)
2216 return false;
2217 dest = gimple_call_arg (stmt, 0);
2218 flag = gimple_call_arg (stmt, 1);
2219 size = gimple_call_arg (stmt, 2);
2220 fmt = gimple_call_arg (stmt, 3);
2222 if (! tree_fits_uhwi_p (size))
2223 return false;
2225 len = NULL_TREE;
2227 if (!init_target_chars ())
2228 return false;
2230 /* Check whether the format is a literal string constant. */
2231 fmt_str = c_getstr (fmt);
2232 if (fmt_str != NULL)
2234 /* If the format doesn't contain % args or %%, we know the size. */
2235 if (strchr (fmt_str, target_percent) == 0)
2237 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2238 len = build_int_cstu (size_type_node, strlen (fmt_str));
2240 /* If the format is "%s" and first ... argument is a string literal,
2241 we know the size too. */
2242 else if (fcode == BUILT_IN_SPRINTF_CHK
2243 && strcmp (fmt_str, target_percent_s) == 0)
2245 tree arg;
2247 if (nargs == 5)
2249 arg = gimple_call_arg (stmt, 4);
2250 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2252 len = c_strlen (arg, 1);
2253 if (! len || ! tree_fits_uhwi_p (len))
2254 len = NULL_TREE;
2260 if (! integer_all_onesp (size))
2262 if (! len || ! tree_int_cst_lt (len, size))
2263 return false;
2266 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2267 or if format doesn't contain % chars or is "%s". */
2268 if (! integer_zerop (flag))
2270 if (fmt_str == NULL)
2271 return false;
2272 if (strchr (fmt_str, target_percent) != NULL
2273 && strcmp (fmt_str, target_percent_s))
2274 return false;
2277 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2278 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2279 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2280 if (!fn)
2281 return false;
2283 /* Replace the called function and the first 4 argument by 2 retaining
2284 trailing varargs. */
2285 gimple_call_set_fndecl (stmt, fn);
2286 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2287 gimple_call_set_arg (stmt, 0, dest);
2288 gimple_call_set_arg (stmt, 1, fmt);
2289 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2290 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2291 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2292 fold_stmt (gsi);
2293 return true;
2296 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2297 ORIG may be null if this is a 2-argument call. We don't attempt to
2298 simplify calls with more than 3 arguments.
2300 Return NULL_TREE if no simplification was possible, otherwise return the
2301 simplified form of the call as a tree. If IGNORED is true, it means that
2302 the caller does not use the returned value of the function. */
2304 static bool
2305 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2307 gimple stmt = gsi_stmt (*gsi);
2308 tree dest = gimple_call_arg (stmt, 0);
2309 tree fmt = gimple_call_arg (stmt, 1);
2310 tree orig = NULL_TREE;
2311 const char *fmt_str = NULL;
2313 /* Verify the required arguments in the original call. We deal with two
2314 types of sprintf() calls: 'sprintf (str, fmt)' and
2315 'sprintf (dest, "%s", orig)'. */
2316 if (gimple_call_num_args (stmt) > 3)
2317 return false;
2319 if (gimple_call_num_args (stmt) == 3)
2320 orig = gimple_call_arg (stmt, 2);
2322 /* Check whether the format is a literal string constant. */
2323 fmt_str = c_getstr (fmt);
2324 if (fmt_str == NULL)
2325 return false;
2327 if (!init_target_chars ())
2328 return false;
2330 /* If the format doesn't contain % args or %%, use strcpy. */
2331 if (strchr (fmt_str, target_percent) == NULL)
2333 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2335 if (!fn)
2336 return false;
2338 /* Don't optimize sprintf (buf, "abc", ptr++). */
2339 if (orig)
2340 return false;
2342 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2343 'format' is known to contain no % formats. */
2344 gimple_seq stmts = NULL;
2345 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2346 gimple_seq_add_stmt_without_update (&stmts, repl);
2347 if (gimple_call_lhs (stmt))
2349 repl = gimple_build_assign (gimple_call_lhs (stmt),
2350 build_int_cst (integer_type_node,
2351 strlen (fmt_str)));
2352 gimple_seq_add_stmt_without_update (&stmts, repl);
2353 gsi_replace_with_seq_vops (gsi, stmts);
2354 /* gsi now points at the assignment to the lhs, get a
2355 stmt iterator to the memcpy call.
2356 ??? We can't use gsi_for_stmt as that doesn't work when the
2357 CFG isn't built yet. */
2358 gimple_stmt_iterator gsi2 = *gsi;
2359 gsi_prev (&gsi2);
2360 fold_stmt (&gsi2);
2362 else
2364 gsi_replace_with_seq_vops (gsi, stmts);
2365 fold_stmt (gsi);
2367 return true;
2370 /* If the format is "%s", use strcpy if the result isn't used. */
2371 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2373 tree fn;
2374 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2376 if (!fn)
2377 return false;
2379 /* Don't crash on sprintf (str1, "%s"). */
2380 if (!orig)
2381 return false;
2383 tree orig_len = NULL_TREE;
2384 if (gimple_call_lhs (stmt))
2386 orig_len = get_maxval_strlen (orig, 0);
2387 if (!orig_len)
2388 return false;
2391 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2392 gimple_seq stmts = NULL;
2393 gimple repl = gimple_build_call (fn, 2, dest, orig);
2394 gimple_seq_add_stmt_without_update (&stmts, repl);
2395 if (gimple_call_lhs (stmt))
2397 if (!useless_type_conversion_p (integer_type_node,
2398 TREE_TYPE (orig_len)))
2399 orig_len = fold_convert (integer_type_node, orig_len);
2400 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2401 gimple_seq_add_stmt_without_update (&stmts, repl);
2402 gsi_replace_with_seq_vops (gsi, stmts);
2403 /* gsi now points at the assignment to the lhs, get a
2404 stmt iterator to the memcpy call.
2405 ??? We can't use gsi_for_stmt as that doesn't work when the
2406 CFG isn't built yet. */
2407 gimple_stmt_iterator gsi2 = *gsi;
2408 gsi_prev (&gsi2);
2409 fold_stmt (&gsi2);
2411 else
2413 gsi_replace_with_seq_vops (gsi, stmts);
2414 fold_stmt (gsi);
2416 return true;
2418 return false;
2421 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2422 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2423 attempt to simplify calls with more than 4 arguments.
2425 Return NULL_TREE if no simplification was possible, otherwise return the
2426 simplified form of the call as a tree. If IGNORED is true, it means that
2427 the caller does not use the returned value of the function. */
2429 static bool
2430 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2432 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2433 tree dest = gimple_call_arg (stmt, 0);
2434 tree destsize = gimple_call_arg (stmt, 1);
2435 tree fmt = gimple_call_arg (stmt, 2);
2436 tree orig = NULL_TREE;
2437 const char *fmt_str = NULL;
2439 if (gimple_call_num_args (stmt) > 4)
2440 return false;
2442 if (gimple_call_num_args (stmt) == 4)
2443 orig = gimple_call_arg (stmt, 3);
2445 if (!tree_fits_uhwi_p (destsize))
2446 return false;
2447 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2449 /* Check whether the format is a literal string constant. */
2450 fmt_str = c_getstr (fmt);
2451 if (fmt_str == NULL)
2452 return false;
2454 if (!init_target_chars ())
2455 return false;
2457 /* If the format doesn't contain % args or %%, use strcpy. */
2458 if (strchr (fmt_str, target_percent) == NULL)
2460 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2461 if (!fn)
2462 return false;
2464 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2465 if (orig)
2466 return false;
2468 /* We could expand this as
2469 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2470 or to
2471 memcpy (str, fmt_with_nul_at_cstm1, cst);
2472 but in the former case that might increase code size
2473 and in the latter case grow .rodata section too much.
2474 So punt for now. */
2475 size_t len = strlen (fmt_str);
2476 if (len >= destlen)
2477 return false;
2479 gimple_seq stmts = NULL;
2480 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2481 gimple_seq_add_stmt_without_update (&stmts, repl);
2482 if (gimple_call_lhs (stmt))
2484 repl = gimple_build_assign (gimple_call_lhs (stmt),
2485 build_int_cst (integer_type_node, len));
2486 gimple_seq_add_stmt_without_update (&stmts, repl);
2487 gsi_replace_with_seq_vops (gsi, stmts);
2488 /* gsi now points at the assignment to the lhs, get a
2489 stmt iterator to the memcpy call.
2490 ??? We can't use gsi_for_stmt as that doesn't work when the
2491 CFG isn't built yet. */
2492 gimple_stmt_iterator gsi2 = *gsi;
2493 gsi_prev (&gsi2);
2494 fold_stmt (&gsi2);
2496 else
2498 gsi_replace_with_seq_vops (gsi, stmts);
2499 fold_stmt (gsi);
2501 return true;
2504 /* If the format is "%s", use strcpy if the result isn't used. */
2505 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2507 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2508 if (!fn)
2509 return false;
2511 /* Don't crash on snprintf (str1, cst, "%s"). */
2512 if (!orig)
2513 return false;
2515 tree orig_len = get_maxval_strlen (orig, 0);
2516 if (!orig_len)
2517 return false;
2519 /* We could expand this as
2520 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2521 or to
2522 memcpy (str1, str2_with_nul_at_cstm1, cst);
2523 but in the former case that might increase code size
2524 and in the latter case grow .rodata section too much.
2525 So punt for now. */
2526 if (compare_tree_int (orig_len, destlen) >= 0)
2527 return false;
2529 /* Convert snprintf (str1, cst, "%s", str2) into
2530 strcpy (str1, str2) if strlen (str2) < cst. */
2531 gimple_seq stmts = NULL;
2532 gimple repl = gimple_build_call (fn, 2, dest, orig);
2533 gimple_seq_add_stmt_without_update (&stmts, repl);
2534 if (gimple_call_lhs (stmt))
2536 if (!useless_type_conversion_p (integer_type_node,
2537 TREE_TYPE (orig_len)))
2538 orig_len = fold_convert (integer_type_node, orig_len);
2539 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2540 gimple_seq_add_stmt_without_update (&stmts, repl);
2541 gsi_replace_with_seq_vops (gsi, stmts);
2542 /* gsi now points at the assignment to the lhs, get a
2543 stmt iterator to the memcpy call.
2544 ??? We can't use gsi_for_stmt as that doesn't work when the
2545 CFG isn't built yet. */
2546 gimple_stmt_iterator gsi2 = *gsi;
2547 gsi_prev (&gsi2);
2548 fold_stmt (&gsi2);
2550 else
2552 gsi_replace_with_seq_vops (gsi, stmts);
2553 fold_stmt (gsi);
2555 return true;
2557 return false;
2560 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2561 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2562 more than 3 arguments, and ARG may be null in the 2-argument case.
2564 Return NULL_TREE if no simplification was possible, otherwise return the
2565 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2566 code of the function to be simplified. */
2568 static bool
2569 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2570 tree fp, tree fmt, tree arg,
2571 enum built_in_function fcode)
2573 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2574 tree fn_fputc, fn_fputs;
2575 const char *fmt_str = NULL;
2577 /* If the return value is used, don't do the transformation. */
2578 if (gimple_call_lhs (stmt) != NULL_TREE)
2579 return false;
2581 /* Check whether the format is a literal string constant. */
2582 fmt_str = c_getstr (fmt);
2583 if (fmt_str == NULL)
2584 return false;
2586 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2588 /* If we're using an unlocked function, assume the other
2589 unlocked functions exist explicitly. */
2590 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2591 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2593 else
2595 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2596 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2599 if (!init_target_chars ())
2600 return false;
2602 /* If the format doesn't contain % args or %%, use strcpy. */
2603 if (strchr (fmt_str, target_percent) == NULL)
2605 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2606 && arg)
2607 return false;
2609 /* If the format specifier was "", fprintf does nothing. */
2610 if (fmt_str[0] == '\0')
2612 replace_call_with_value (gsi, NULL_TREE);
2613 return true;
2616 /* When "string" doesn't contain %, replace all cases of
2617 fprintf (fp, string) with fputs (string, fp). The fputs
2618 builtin will take care of special cases like length == 1. */
2619 if (fn_fputs)
2621 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2622 replace_call_with_call_and_fold (gsi, repl);
2623 return true;
2627 /* The other optimizations can be done only on the non-va_list variants. */
2628 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2629 return false;
2631 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2632 else if (strcmp (fmt_str, target_percent_s) == 0)
2634 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2635 return false;
2636 if (fn_fputs)
2638 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2639 replace_call_with_call_and_fold (gsi, repl);
2640 return true;
2644 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2645 else if (strcmp (fmt_str, target_percent_c) == 0)
2647 if (!arg
2648 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2649 return false;
2650 if (fn_fputc)
2652 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2653 replace_call_with_call_and_fold (gsi, repl);
2654 return true;
2658 return false;
2661 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2662 FMT and ARG are the arguments to the call; we don't fold cases with
2663 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2665 Return NULL_TREE if no simplification was possible, otherwise return the
2666 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2667 code of the function to be simplified. */
2669 static bool
2670 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2671 tree arg, enum built_in_function fcode)
2673 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2674 tree fn_putchar, fn_puts, newarg;
2675 const char *fmt_str = NULL;
2677 /* If the return value is used, don't do the transformation. */
2678 if (gimple_call_lhs (stmt) != NULL_TREE)
2679 return false;
2681 /* Check whether the format is a literal string constant. */
2682 fmt_str = c_getstr (fmt);
2683 if (fmt_str == NULL)
2684 return false;
2686 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2688 /* If we're using an unlocked function, assume the other
2689 unlocked functions exist explicitly. */
2690 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2691 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2693 else
2695 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2696 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2699 if (!init_target_chars ())
2700 return false;
2702 if (strcmp (fmt_str, target_percent_s) == 0
2703 || strchr (fmt_str, target_percent) == NULL)
2705 const char *str;
2707 if (strcmp (fmt_str, target_percent_s) == 0)
2709 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2710 return false;
2712 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2713 return false;
2715 str = c_getstr (arg);
2716 if (str == NULL)
2717 return false;
2719 else
2721 /* The format specifier doesn't contain any '%' characters. */
2722 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2723 && arg)
2724 return false;
2725 str = fmt_str;
2728 /* If the string was "", printf does nothing. */
2729 if (str[0] == '\0')
2731 replace_call_with_value (gsi, NULL_TREE);
2732 return true;
2735 /* If the string has length of 1, call putchar. */
2736 if (str[1] == '\0')
2738 /* Given printf("c"), (where c is any one character,)
2739 convert "c"[0] to an int and pass that to the replacement
2740 function. */
2741 newarg = build_int_cst (integer_type_node, str[0]);
2742 if (fn_putchar)
2744 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2745 replace_call_with_call_and_fold (gsi, repl);
2746 return true;
2749 else
2751 /* If the string was "string\n", call puts("string"). */
2752 size_t len = strlen (str);
2753 if ((unsigned char)str[len - 1] == target_newline
2754 && (size_t) (int) len == len
2755 && (int) len > 0)
2757 char *newstr;
2758 tree offset_node, string_cst;
2760 /* Create a NUL-terminated string that's one char shorter
2761 than the original, stripping off the trailing '\n'. */
2762 newarg = build_string_literal (len, str);
2763 string_cst = string_constant (newarg, &offset_node);
2764 gcc_checking_assert (string_cst
2765 && (TREE_STRING_LENGTH (string_cst)
2766 == (int) len)
2767 && integer_zerop (offset_node)
2768 && (unsigned char)
2769 TREE_STRING_POINTER (string_cst)[len - 1]
2770 == target_newline);
2771 /* build_string_literal creates a new STRING_CST,
2772 modify it in place to avoid double copying. */
2773 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2774 newstr[len - 1] = '\0';
2775 if (fn_puts)
2777 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2778 replace_call_with_call_and_fold (gsi, repl);
2779 return true;
2782 else
2783 /* We'd like to arrange to call fputs(string,stdout) here,
2784 but we need stdout and don't have a way to get it yet. */
2785 return false;
2789 /* The other optimizations can be done only on the non-va_list variants. */
2790 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2791 return false;
2793 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2794 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2796 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2797 return false;
2798 if (fn_puts)
2800 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2801 replace_call_with_call_and_fold (gsi, repl);
2802 return true;
2806 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2807 else if (strcmp (fmt_str, target_percent_c) == 0)
2809 if (!arg || ! useless_type_conversion_p (integer_type_node,
2810 TREE_TYPE (arg)))
2811 return false;
2812 if (fn_putchar)
2814 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2815 replace_call_with_call_and_fold (gsi, repl);
2816 return true;
2820 return false;
2825 /* Fold a call to __builtin_strlen with known length LEN. */
2827 static bool
2828 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2830 gimple stmt = gsi_stmt (*gsi);
2831 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2832 if (!len)
2833 return false;
2834 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2835 replace_call_with_value (gsi, len);
2836 return true;
2840 /* Fold the non-target builtin at *GSI and return whether any simplification
2841 was made. */
2843 static bool
2844 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2846 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2847 tree callee = gimple_call_fndecl (stmt);
2849 /* Give up for always_inline inline builtins until they are
2850 inlined. */
2851 if (avoid_folding_inline_builtin (callee))
2852 return false;
2854 unsigned n = gimple_call_num_args (stmt);
2855 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2856 switch (fcode)
2858 case BUILT_IN_BZERO:
2859 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2860 gimple_call_arg (stmt, 1));
2861 case BUILT_IN_MEMSET:
2862 return gimple_fold_builtin_memset (gsi,
2863 gimple_call_arg (stmt, 1),
2864 gimple_call_arg (stmt, 2));
2865 case BUILT_IN_BCOPY:
2866 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2867 gimple_call_arg (stmt, 0), 3);
2868 case BUILT_IN_MEMCPY:
2869 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2870 gimple_call_arg (stmt, 1), 0);
2871 case BUILT_IN_MEMPCPY:
2872 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2873 gimple_call_arg (stmt, 1), 1);
2874 case BUILT_IN_MEMMOVE:
2875 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2876 gimple_call_arg (stmt, 1), 3);
2877 case BUILT_IN_SPRINTF_CHK:
2878 case BUILT_IN_VSPRINTF_CHK:
2879 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2880 case BUILT_IN_STRCAT_CHK:
2881 return gimple_fold_builtin_strcat_chk (gsi);
2882 case BUILT_IN_STRNCAT_CHK:
2883 return gimple_fold_builtin_strncat_chk (gsi);
2884 case BUILT_IN_STRLEN:
2885 return gimple_fold_builtin_strlen (gsi);
2886 case BUILT_IN_STRCPY:
2887 return gimple_fold_builtin_strcpy (gsi,
2888 gimple_call_arg (stmt, 0),
2889 gimple_call_arg (stmt, 1));
2890 case BUILT_IN_STRNCPY:
2891 return gimple_fold_builtin_strncpy (gsi,
2892 gimple_call_arg (stmt, 0),
2893 gimple_call_arg (stmt, 1),
2894 gimple_call_arg (stmt, 2));
2895 case BUILT_IN_STRCAT:
2896 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2897 gimple_call_arg (stmt, 1));
2898 case BUILT_IN_STRNCAT:
2899 return gimple_fold_builtin_strncat (gsi);
2900 case BUILT_IN_FPUTS:
2901 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2902 gimple_call_arg (stmt, 1), false);
2903 case BUILT_IN_FPUTS_UNLOCKED:
2904 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2905 gimple_call_arg (stmt, 1), true);
2906 case BUILT_IN_MEMCPY_CHK:
2907 case BUILT_IN_MEMPCPY_CHK:
2908 case BUILT_IN_MEMMOVE_CHK:
2909 case BUILT_IN_MEMSET_CHK:
2910 return gimple_fold_builtin_memory_chk (gsi,
2911 gimple_call_arg (stmt, 0),
2912 gimple_call_arg (stmt, 1),
2913 gimple_call_arg (stmt, 2),
2914 gimple_call_arg (stmt, 3),
2915 fcode);
2916 case BUILT_IN_STPCPY:
2917 return gimple_fold_builtin_stpcpy (gsi);
2918 case BUILT_IN_STRCPY_CHK:
2919 case BUILT_IN_STPCPY_CHK:
2920 return gimple_fold_builtin_stxcpy_chk (gsi,
2921 gimple_call_arg (stmt, 0),
2922 gimple_call_arg (stmt, 1),
2923 gimple_call_arg (stmt, 2),
2924 fcode);
2925 case BUILT_IN_STRNCPY_CHK:
2926 case BUILT_IN_STPNCPY_CHK:
2927 return gimple_fold_builtin_stxncpy_chk (gsi,
2928 gimple_call_arg (stmt, 0),
2929 gimple_call_arg (stmt, 1),
2930 gimple_call_arg (stmt, 2),
2931 gimple_call_arg (stmt, 3),
2932 fcode);
2933 case BUILT_IN_SNPRINTF_CHK:
2934 case BUILT_IN_VSNPRINTF_CHK:
2935 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2936 case BUILT_IN_SNPRINTF:
2937 return gimple_fold_builtin_snprintf (gsi);
2938 case BUILT_IN_SPRINTF:
2939 return gimple_fold_builtin_sprintf (gsi);
2940 case BUILT_IN_FPRINTF:
2941 case BUILT_IN_FPRINTF_UNLOCKED:
2942 case BUILT_IN_VFPRINTF:
2943 if (n == 2 || n == 3)
2944 return gimple_fold_builtin_fprintf (gsi,
2945 gimple_call_arg (stmt, 0),
2946 gimple_call_arg (stmt, 1),
2947 n == 3
2948 ? gimple_call_arg (stmt, 2)
2949 : NULL_TREE,
2950 fcode);
2951 break;
2952 case BUILT_IN_FPRINTF_CHK:
2953 case BUILT_IN_VFPRINTF_CHK:
2954 if (n == 3 || n == 4)
2955 return gimple_fold_builtin_fprintf (gsi,
2956 gimple_call_arg (stmt, 0),
2957 gimple_call_arg (stmt, 2),
2958 n == 4
2959 ? gimple_call_arg (stmt, 3)
2960 : NULL_TREE,
2961 fcode);
2962 break;
2963 case BUILT_IN_PRINTF:
2964 case BUILT_IN_PRINTF_UNLOCKED:
2965 case BUILT_IN_VPRINTF:
2966 if (n == 1 || n == 2)
2967 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2968 n == 2
2969 ? gimple_call_arg (stmt, 1)
2970 : NULL_TREE, fcode);
2971 break;
2972 case BUILT_IN_PRINTF_CHK:
2973 case BUILT_IN_VPRINTF_CHK:
2974 if (n == 2 || n == 3)
2975 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2976 n == 3
2977 ? gimple_call_arg (stmt, 2)
2978 : NULL_TREE, fcode);
2979 default:;
2982 /* Try the generic builtin folder. */
2983 bool ignore = (gimple_call_lhs (stmt) == NULL);
2984 tree result = fold_call_stmt (stmt, ignore);
2985 if (result)
2987 if (ignore)
2988 STRIP_NOPS (result);
2989 else
2990 result = fold_convert (gimple_call_return_type (stmt), result);
2991 if (!update_call_from_tree (gsi, result))
2992 gimplify_and_update_call_from_tree (gsi, result);
2993 return true;
2996 return false;
2999 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3000 doesn't fit into TYPE. The test for overflow should be regardless of
3001 -fwrapv, and even for unsigned types. */
3003 bool
3004 arith_overflowed_p (enum tree_code code, const_tree type,
3005 const_tree arg0, const_tree arg1)
3007 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3008 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3009 widest2_int_cst;
3010 widest2_int warg0 = widest2_int_cst (arg0);
3011 widest2_int warg1 = widest2_int_cst (arg1);
3012 widest2_int wres;
3013 switch (code)
3015 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3016 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3017 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3018 default: gcc_unreachable ();
3020 signop sign = TYPE_SIGN (type);
3021 if (sign == UNSIGNED && wi::neg_p (wres))
3022 return true;
3023 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3026 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3027 The statement may be replaced by another statement, e.g., if the call
3028 simplifies to a constant value. Return true if any changes were made.
3029 It is assumed that the operands have been previously folded. */
3031 static bool
3032 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3034 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3035 tree callee;
3036 bool changed = false;
3037 unsigned i;
3039 /* Fold *& in call arguments. */
3040 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3041 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3043 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3044 if (tmp)
3046 gimple_call_set_arg (stmt, i, tmp);
3047 changed = true;
3051 /* Check for virtual calls that became direct calls. */
3052 callee = gimple_call_fn (stmt);
3053 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3055 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3057 if (dump_file && virtual_method_call_p (callee)
3058 && !possible_polymorphic_call_target_p
3059 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3060 (OBJ_TYPE_REF_EXPR (callee)))))
3062 fprintf (dump_file,
3063 "Type inheritance inconsistent devirtualization of ");
3064 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3065 fprintf (dump_file, " to ");
3066 print_generic_expr (dump_file, callee, TDF_SLIM);
3067 fprintf (dump_file, "\n");
3070 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3071 changed = true;
3073 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3075 bool final;
3076 vec <cgraph_node *>targets
3077 = possible_polymorphic_call_targets (callee, stmt, &final);
3078 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3080 tree lhs = gimple_call_lhs (stmt);
3081 if (dump_enabled_p ())
3083 location_t loc = gimple_location_safe (stmt);
3084 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3085 "folding virtual function call to %s\n",
3086 targets.length () == 1
3087 ? targets[0]->name ()
3088 : "__builtin_unreachable");
3090 if (targets.length () == 1)
3092 gimple_call_set_fndecl (stmt, targets[0]->decl);
3093 changed = true;
3094 /* If the call becomes noreturn, remove the lhs. */
3095 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3097 if (TREE_CODE (lhs) == SSA_NAME)
3099 tree var = create_tmp_var (TREE_TYPE (lhs));
3100 tree def = get_or_create_ssa_default_def (cfun, var);
3101 gimple new_stmt = gimple_build_assign (lhs, def);
3102 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3104 gimple_call_set_lhs (stmt, NULL_TREE);
3107 else
3109 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3110 gimple new_stmt = gimple_build_call (fndecl, 0);
3111 gimple_set_location (new_stmt, gimple_location (stmt));
3112 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3114 tree var = create_tmp_var (TREE_TYPE (lhs));
3115 tree def = get_or_create_ssa_default_def (cfun, var);
3117 /* To satisfy condition for
3118 cgraph_update_edges_for_call_stmt_node,
3119 we need to preserve GIMPLE_CALL statement
3120 at position of GSI iterator. */
3121 update_call_from_tree (gsi, def);
3122 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3124 else
3126 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3127 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3128 gsi_replace (gsi, new_stmt, false);
3130 return true;
3136 /* Check for indirect calls that became direct calls, and then
3137 no longer require a static chain. */
3138 if (gimple_call_chain (stmt))
3140 tree fn = gimple_call_fndecl (stmt);
3141 if (fn && !DECL_STATIC_CHAIN (fn))
3143 gimple_call_set_chain (stmt, NULL);
3144 changed = true;
3146 else
3148 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3149 if (tmp)
3151 gimple_call_set_chain (stmt, tmp);
3152 changed = true;
3157 if (inplace)
3158 return changed;
3160 /* Check for builtins that CCP can handle using information not
3161 available in the generic fold routines. */
3162 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3164 if (gimple_fold_builtin (gsi))
3165 changed = true;
3167 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3169 changed |= targetm.gimple_fold_builtin (gsi);
3171 else if (gimple_call_internal_p (stmt))
3173 enum tree_code subcode = ERROR_MARK;
3174 tree result = NULL_TREE;
3175 bool cplx_result = false;
3176 tree overflow = NULL_TREE;
3177 switch (gimple_call_internal_fn (stmt))
3179 case IFN_BUILTIN_EXPECT:
3180 result = fold_builtin_expect (gimple_location (stmt),
3181 gimple_call_arg (stmt, 0),
3182 gimple_call_arg (stmt, 1),
3183 gimple_call_arg (stmt, 2));
3184 break;
3185 case IFN_UBSAN_OBJECT_SIZE:
3186 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3187 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3188 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3189 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3190 gimple_call_arg (stmt, 2))))
3192 gsi_replace (gsi, gimple_build_nop (), true);
3193 unlink_stmt_vdef (stmt);
3194 release_defs (stmt);
3195 return true;
3197 break;
3198 case IFN_UBSAN_CHECK_ADD:
3199 subcode = PLUS_EXPR;
3200 break;
3201 case IFN_UBSAN_CHECK_SUB:
3202 subcode = MINUS_EXPR;
3203 break;
3204 case IFN_UBSAN_CHECK_MUL:
3205 subcode = MULT_EXPR;
3206 break;
3207 case IFN_ADD_OVERFLOW:
3208 subcode = PLUS_EXPR;
3209 cplx_result = true;
3210 break;
3211 case IFN_SUB_OVERFLOW:
3212 subcode = MINUS_EXPR;
3213 cplx_result = true;
3214 break;
3215 case IFN_MUL_OVERFLOW:
3216 subcode = MULT_EXPR;
3217 cplx_result = true;
3218 break;
3219 default:
3220 break;
3222 if (subcode != ERROR_MARK)
3224 tree arg0 = gimple_call_arg (stmt, 0);
3225 tree arg1 = gimple_call_arg (stmt, 1);
3226 tree type = TREE_TYPE (arg0);
3227 if (cplx_result)
3229 tree lhs = gimple_call_lhs (stmt);
3230 if (lhs == NULL_TREE)
3231 type = NULL_TREE;
3232 else
3233 type = TREE_TYPE (TREE_TYPE (lhs));
3235 if (type == NULL_TREE)
3237 /* x = y + 0; x = y - 0; x = y * 0; */
3238 else if (integer_zerop (arg1))
3239 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3240 /* x = 0 + y; x = 0 * y; */
3241 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3242 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3243 /* x = y - y; */
3244 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3245 result = integer_zero_node;
3246 /* x = y * 1; x = 1 * y; */
3247 else if (subcode == MULT_EXPR && integer_onep (arg1))
3248 result = arg0;
3249 else if (subcode == MULT_EXPR && integer_onep (arg0))
3250 result = arg1;
3251 else if (TREE_CODE (arg0) == INTEGER_CST
3252 && TREE_CODE (arg1) == INTEGER_CST)
3254 if (cplx_result)
3255 result = int_const_binop (subcode, fold_convert (type, arg0),
3256 fold_convert (type, arg1));
3257 else
3258 result = int_const_binop (subcode, arg0, arg1);
3259 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3261 if (cplx_result)
3262 overflow = build_one_cst (type);
3263 else
3264 result = NULL_TREE;
3267 if (result)
3269 if (result == integer_zero_node)
3270 result = build_zero_cst (type);
3271 else if (cplx_result && TREE_TYPE (result) != type)
3273 if (TREE_CODE (result) == INTEGER_CST)
3275 if (arith_overflowed_p (PLUS_EXPR, type, result,
3276 integer_zero_node))
3277 overflow = build_one_cst (type);
3279 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3280 && TYPE_UNSIGNED (type))
3281 || (TYPE_PRECISION (type)
3282 < (TYPE_PRECISION (TREE_TYPE (result))
3283 + (TYPE_UNSIGNED (TREE_TYPE (result))
3284 && !TYPE_UNSIGNED (type)))))
3285 result = NULL_TREE;
3286 if (result)
3287 result = fold_convert (type, result);
3292 if (result)
3294 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3295 result = drop_tree_overflow (result);
3296 if (cplx_result)
3298 if (overflow == NULL_TREE)
3299 overflow = build_zero_cst (TREE_TYPE (result));
3300 tree ctype = build_complex_type (TREE_TYPE (result));
3301 if (TREE_CODE (result) == INTEGER_CST
3302 && TREE_CODE (overflow) == INTEGER_CST)
3303 result = build_complex (ctype, result, overflow);
3304 else
3305 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3306 ctype, result, overflow);
3308 if (!update_call_from_tree (gsi, result))
3309 gimplify_and_update_call_from_tree (gsi, result);
3310 changed = true;
3314 return changed;
3318 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3319 gimple_simplify.
3321 Replaces *GSI with the simplification result in RCODE and OPS
3322 and the associated statements in *SEQ. Does the replacement
3323 according to INPLACE and returns true if the operation succeeded. */
3325 static bool
3326 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3327 code_helper rcode, tree *ops,
3328 gimple_seq *seq, bool inplace)
3330 gimple stmt = gsi_stmt (*gsi);
3332 /* Play safe and do not allow abnormals to be mentioned in
3333 newly created statements. See also maybe_push_res_to_seq. */
3334 if ((TREE_CODE (ops[0]) == SSA_NAME
3335 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3336 || (ops[1]
3337 && TREE_CODE (ops[1]) == SSA_NAME
3338 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3339 || (ops[2]
3340 && TREE_CODE (ops[2]) == SSA_NAME
3341 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3342 return false;
3344 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3346 gcc_assert (rcode.is_tree_code ());
3347 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3348 /* GIMPLE_CONDs condition may not throw. */
3349 && (!flag_exceptions
3350 || !cfun->can_throw_non_call_exceptions
3351 || !operation_could_trap_p (rcode,
3352 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3353 false, NULL_TREE)))
3354 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3355 else if (rcode == SSA_NAME)
3356 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3357 build_zero_cst (TREE_TYPE (ops[0])));
3358 else if (rcode == INTEGER_CST)
3360 if (integer_zerop (ops[0]))
3361 gimple_cond_make_false (cond_stmt);
3362 else
3363 gimple_cond_make_true (cond_stmt);
3365 else if (!inplace)
3367 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3368 ops, seq);
3369 if (!res)
3370 return false;
3371 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3372 build_zero_cst (TREE_TYPE (res)));
3374 else
3375 return false;
3376 if (dump_file && (dump_flags & TDF_DETAILS))
3378 fprintf (dump_file, "gimple_simplified to ");
3379 if (!gimple_seq_empty_p (*seq))
3380 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3381 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3382 0, TDF_SLIM);
3384 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3385 return true;
3387 else if (is_gimple_assign (stmt)
3388 && rcode.is_tree_code ())
3390 if (!inplace
3391 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3393 maybe_build_generic_op (rcode,
3394 TREE_TYPE (gimple_assign_lhs (stmt)),
3395 &ops[0], ops[1], ops[2]);
3396 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3397 if (dump_file && (dump_flags & TDF_DETAILS))
3399 fprintf (dump_file, "gimple_simplified to ");
3400 if (!gimple_seq_empty_p (*seq))
3401 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3402 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3403 0, TDF_SLIM);
3405 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3406 return true;
3409 else if (!inplace)
3411 if (gimple_has_lhs (stmt))
3413 tree lhs = gimple_get_lhs (stmt);
3414 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3415 ops, seq, lhs))
3416 return false;
3417 if (dump_file && (dump_flags & TDF_DETAILS))
3419 fprintf (dump_file, "gimple_simplified to ");
3420 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3422 gsi_replace_with_seq_vops (gsi, *seq);
3423 return true;
3425 else
3426 gcc_unreachable ();
3429 return false;
3432 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3434 static bool
3435 maybe_canonicalize_mem_ref_addr (tree *t)
3437 bool res = false;
3439 if (TREE_CODE (*t) == ADDR_EXPR)
3440 t = &TREE_OPERAND (*t, 0);
3442 while (handled_component_p (*t))
3443 t = &TREE_OPERAND (*t, 0);
3445 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3446 of invariant addresses into a SSA name MEM_REF address. */
3447 if (TREE_CODE (*t) == MEM_REF
3448 || TREE_CODE (*t) == TARGET_MEM_REF)
3450 tree addr = TREE_OPERAND (*t, 0);
3451 if (TREE_CODE (addr) == ADDR_EXPR
3452 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3453 || handled_component_p (TREE_OPERAND (addr, 0))))
3455 tree base;
3456 HOST_WIDE_INT coffset;
3457 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3458 &coffset);
3459 if (!base)
3460 gcc_unreachable ();
3462 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3463 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3464 TREE_OPERAND (*t, 1),
3465 size_int (coffset));
3466 res = true;
3468 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3469 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3472 /* Canonicalize back MEM_REFs to plain reference trees if the object
3473 accessed is a decl that has the same access semantics as the MEM_REF. */
3474 if (TREE_CODE (*t) == MEM_REF
3475 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3476 && integer_zerop (TREE_OPERAND (*t, 1))
3477 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3479 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3480 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3481 if (/* Same volatile qualification. */
3482 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3483 /* Same TBAA behavior with -fstrict-aliasing. */
3484 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3485 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3486 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3487 /* Same alignment. */
3488 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3489 /* We have to look out here to not drop a required conversion
3490 from the rhs to the lhs if *t appears on the lhs or vice-versa
3491 if it appears on the rhs. Thus require strict type
3492 compatibility. */
3493 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3495 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3496 res = true;
3500 /* Canonicalize TARGET_MEM_REF in particular with respect to
3501 the indexes becoming constant. */
3502 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3504 tree tem = maybe_fold_tmr (*t);
3505 if (tem)
3507 *t = tem;
3508 res = true;
3512 return res;
3515 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3516 distinguishes both cases. */
3518 static bool
3519 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3521 bool changed = false;
3522 gimple stmt = gsi_stmt (*gsi);
3523 unsigned i;
3525 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3526 after propagation.
3527 ??? This shouldn't be done in generic folding but in the
3528 propagation helpers which also know whether an address was
3529 propagated. */
3530 switch (gimple_code (stmt))
3532 case GIMPLE_ASSIGN:
3533 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3535 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3536 if ((REFERENCE_CLASS_P (*rhs)
3537 || TREE_CODE (*rhs) == ADDR_EXPR)
3538 && maybe_canonicalize_mem_ref_addr (rhs))
3539 changed = true;
3540 tree *lhs = gimple_assign_lhs_ptr (stmt);
3541 if (REFERENCE_CLASS_P (*lhs)
3542 && maybe_canonicalize_mem_ref_addr (lhs))
3543 changed = true;
3545 break;
3546 case GIMPLE_CALL:
3548 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3550 tree *arg = gimple_call_arg_ptr (stmt, i);
3551 if (REFERENCE_CLASS_P (*arg)
3552 && maybe_canonicalize_mem_ref_addr (arg))
3553 changed = true;
3555 tree *lhs = gimple_call_lhs_ptr (stmt);
3556 if (*lhs
3557 && REFERENCE_CLASS_P (*lhs)
3558 && maybe_canonicalize_mem_ref_addr (lhs))
3559 changed = true;
3560 break;
3562 case GIMPLE_ASM:
3564 gasm *asm_stmt = as_a <gasm *> (stmt);
3565 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3567 tree link = gimple_asm_output_op (asm_stmt, i);
3568 tree op = TREE_VALUE (link);
3569 if (REFERENCE_CLASS_P (op)
3570 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3571 changed = true;
3573 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3575 tree link = gimple_asm_input_op (asm_stmt, i);
3576 tree op = TREE_VALUE (link);
3577 if ((REFERENCE_CLASS_P (op)
3578 || TREE_CODE (op) == ADDR_EXPR)
3579 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3580 changed = true;
3583 break;
3584 case GIMPLE_DEBUG:
3585 if (gimple_debug_bind_p (stmt))
3587 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3588 if (*val
3589 && (REFERENCE_CLASS_P (*val)
3590 || TREE_CODE (*val) == ADDR_EXPR)
3591 && maybe_canonicalize_mem_ref_addr (val))
3592 changed = true;
3594 break;
3595 default:;
3598 /* Dispatch to pattern-based folding. */
3599 if (!inplace
3600 || is_gimple_assign (stmt)
3601 || gimple_code (stmt) == GIMPLE_COND)
3603 gimple_seq seq = NULL;
3604 code_helper rcode;
3605 tree ops[3] = {};
3606 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3608 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3609 changed = true;
3610 else
3611 gimple_seq_discard (seq);
3615 stmt = gsi_stmt (*gsi);
3617 /* Fold the main computation performed by the statement. */
3618 switch (gimple_code (stmt))
3620 case GIMPLE_ASSIGN:
3622 unsigned old_num_ops = gimple_num_ops (stmt);
3623 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3624 tree lhs = gimple_assign_lhs (stmt);
3625 tree new_rhs;
3626 /* First canonicalize operand order. This avoids building new
3627 trees if this is the only thing fold would later do. */
3628 if ((commutative_tree_code (subcode)
3629 || commutative_ternary_tree_code (subcode))
3630 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3631 gimple_assign_rhs2 (stmt), false))
3633 tree tem = gimple_assign_rhs1 (stmt);
3634 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3635 gimple_assign_set_rhs2 (stmt, tem);
3636 changed = true;
3638 new_rhs = fold_gimple_assign (gsi);
3639 if (new_rhs
3640 && !useless_type_conversion_p (TREE_TYPE (lhs),
3641 TREE_TYPE (new_rhs)))
3642 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3643 if (new_rhs
3644 && (!inplace
3645 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3647 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3648 changed = true;
3650 break;
3653 case GIMPLE_COND:
3654 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
3655 break;
3657 case GIMPLE_CALL:
3658 changed |= gimple_fold_call (gsi, inplace);
3659 break;
3661 case GIMPLE_ASM:
3662 /* Fold *& in asm operands. */
3664 gasm *asm_stmt = as_a <gasm *> (stmt);
3665 size_t noutputs;
3666 const char **oconstraints;
3667 const char *constraint;
3668 bool allows_mem, allows_reg;
3670 noutputs = gimple_asm_noutputs (asm_stmt);
3671 oconstraints = XALLOCAVEC (const char *, noutputs);
3673 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3675 tree link = gimple_asm_output_op (asm_stmt, i);
3676 tree op = TREE_VALUE (link);
3677 oconstraints[i]
3678 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3679 if (REFERENCE_CLASS_P (op)
3680 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3682 TREE_VALUE (link) = op;
3683 changed = true;
3686 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3688 tree link = gimple_asm_input_op (asm_stmt, i);
3689 tree op = TREE_VALUE (link);
3690 constraint
3691 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3692 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3693 oconstraints, &allows_mem, &allows_reg);
3694 if (REFERENCE_CLASS_P (op)
3695 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3696 != NULL_TREE)
3698 TREE_VALUE (link) = op;
3699 changed = true;
3703 break;
3705 case GIMPLE_DEBUG:
3706 if (gimple_debug_bind_p (stmt))
3708 tree val = gimple_debug_bind_get_value (stmt);
3709 if (val
3710 && REFERENCE_CLASS_P (val))
3712 tree tem = maybe_fold_reference (val, false);
3713 if (tem)
3715 gimple_debug_bind_set_value (stmt, tem);
3716 changed = true;
3719 else if (val
3720 && TREE_CODE (val) == ADDR_EXPR)
3722 tree ref = TREE_OPERAND (val, 0);
3723 tree tem = maybe_fold_reference (ref, false);
3724 if (tem)
3726 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3727 gimple_debug_bind_set_value (stmt, tem);
3728 changed = true;
3732 break;
3734 default:;
3737 stmt = gsi_stmt (*gsi);
3739 /* Fold *& on the lhs. */
3740 if (gimple_has_lhs (stmt))
3742 tree lhs = gimple_get_lhs (stmt);
3743 if (lhs && REFERENCE_CLASS_P (lhs))
3745 tree new_lhs = maybe_fold_reference (lhs, true);
3746 if (new_lhs)
3748 gimple_set_lhs (stmt, new_lhs);
3749 changed = true;
3754 return changed;
3757 /* Valueziation callback that ends up not following SSA edges. */
3759 tree
3760 no_follow_ssa_edges (tree)
3762 return NULL_TREE;
3765 /* Valueization callback that ends up following single-use SSA edges only. */
3767 tree
3768 follow_single_use_edges (tree val)
3770 if (TREE_CODE (val) == SSA_NAME
3771 && !has_single_use (val))
3772 return NULL_TREE;
3773 return val;
3776 /* Fold the statement pointed to by GSI. In some cases, this function may
3777 replace the whole statement with a new one. Returns true iff folding
3778 makes any changes.
3779 The statement pointed to by GSI should be in valid gimple form but may
3780 be in unfolded state as resulting from for example constant propagation
3781 which can produce *&x = 0. */
3783 bool
3784 fold_stmt (gimple_stmt_iterator *gsi)
3786 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3789 bool
3790 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3792 return fold_stmt_1 (gsi, false, valueize);
3795 /* Perform the minimal folding on statement *GSI. Only operations like
3796 *&x created by constant propagation are handled. The statement cannot
3797 be replaced with a new one. Return true if the statement was
3798 changed, false otherwise.
3799 The statement *GSI should be in valid gimple form but may
3800 be in unfolded state as resulting from for example constant propagation
3801 which can produce *&x = 0. */
3803 bool
3804 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3806 gimple stmt = gsi_stmt (*gsi);
3807 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3808 gcc_assert (gsi_stmt (*gsi) == stmt);
3809 return changed;
3812 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3813 if EXPR is null or we don't know how.
3814 If non-null, the result always has boolean type. */
3816 static tree
3817 canonicalize_bool (tree expr, bool invert)
3819 if (!expr)
3820 return NULL_TREE;
3821 else if (invert)
3823 if (integer_nonzerop (expr))
3824 return boolean_false_node;
3825 else if (integer_zerop (expr))
3826 return boolean_true_node;
3827 else if (TREE_CODE (expr) == SSA_NAME)
3828 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3829 build_int_cst (TREE_TYPE (expr), 0));
3830 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3831 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3832 boolean_type_node,
3833 TREE_OPERAND (expr, 0),
3834 TREE_OPERAND (expr, 1));
3835 else
3836 return NULL_TREE;
3838 else
3840 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3841 return expr;
3842 if (integer_nonzerop (expr))
3843 return boolean_true_node;
3844 else if (integer_zerop (expr))
3845 return boolean_false_node;
3846 else if (TREE_CODE (expr) == SSA_NAME)
3847 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3848 build_int_cst (TREE_TYPE (expr), 0));
3849 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3850 return fold_build2 (TREE_CODE (expr),
3851 boolean_type_node,
3852 TREE_OPERAND (expr, 0),
3853 TREE_OPERAND (expr, 1));
3854 else
3855 return NULL_TREE;
3859 /* Check to see if a boolean expression EXPR is logically equivalent to the
3860 comparison (OP1 CODE OP2). Check for various identities involving
3861 SSA_NAMEs. */
3863 static bool
3864 same_bool_comparison_p (const_tree expr, enum tree_code code,
3865 const_tree op1, const_tree op2)
3867 gimple s;
3869 /* The obvious case. */
3870 if (TREE_CODE (expr) == code
3871 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3872 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3873 return true;
3875 /* Check for comparing (name, name != 0) and the case where expr
3876 is an SSA_NAME with a definition matching the comparison. */
3877 if (TREE_CODE (expr) == SSA_NAME
3878 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3880 if (operand_equal_p (expr, op1, 0))
3881 return ((code == NE_EXPR && integer_zerop (op2))
3882 || (code == EQ_EXPR && integer_nonzerop (op2)));
3883 s = SSA_NAME_DEF_STMT (expr);
3884 if (is_gimple_assign (s)
3885 && gimple_assign_rhs_code (s) == code
3886 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3887 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3888 return true;
3891 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3892 of name is a comparison, recurse. */
3893 if (TREE_CODE (op1) == SSA_NAME
3894 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3896 s = SSA_NAME_DEF_STMT (op1);
3897 if (is_gimple_assign (s)
3898 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3900 enum tree_code c = gimple_assign_rhs_code (s);
3901 if ((c == NE_EXPR && integer_zerop (op2))
3902 || (c == EQ_EXPR && integer_nonzerop (op2)))
3903 return same_bool_comparison_p (expr, c,
3904 gimple_assign_rhs1 (s),
3905 gimple_assign_rhs2 (s));
3906 if ((c == EQ_EXPR && integer_zerop (op2))
3907 || (c == NE_EXPR && integer_nonzerop (op2)))
3908 return same_bool_comparison_p (expr,
3909 invert_tree_comparison (c, false),
3910 gimple_assign_rhs1 (s),
3911 gimple_assign_rhs2 (s));
3914 return false;
3917 /* Check to see if two boolean expressions OP1 and OP2 are logically
3918 equivalent. */
3920 static bool
3921 same_bool_result_p (const_tree op1, const_tree op2)
3923 /* Simple cases first. */
3924 if (operand_equal_p (op1, op2, 0))
3925 return true;
3927 /* Check the cases where at least one of the operands is a comparison.
3928 These are a bit smarter than operand_equal_p in that they apply some
3929 identifies on SSA_NAMEs. */
3930 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3931 && same_bool_comparison_p (op1, TREE_CODE (op2),
3932 TREE_OPERAND (op2, 0),
3933 TREE_OPERAND (op2, 1)))
3934 return true;
3935 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3936 && same_bool_comparison_p (op2, TREE_CODE (op1),
3937 TREE_OPERAND (op1, 0),
3938 TREE_OPERAND (op1, 1)))
3939 return true;
3941 /* Default case. */
3942 return false;
3945 /* Forward declarations for some mutually recursive functions. */
3947 static tree
3948 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3949 enum tree_code code2, tree op2a, tree op2b);
3950 static tree
3951 and_var_with_comparison (tree var, bool invert,
3952 enum tree_code code2, tree op2a, tree op2b);
3953 static tree
3954 and_var_with_comparison_1 (gimple stmt,
3955 enum tree_code code2, tree op2a, tree op2b);
3956 static tree
3957 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3958 enum tree_code code2, tree op2a, tree op2b);
3959 static tree
3960 or_var_with_comparison (tree var, bool invert,
3961 enum tree_code code2, tree op2a, tree op2b);
3962 static tree
3963 or_var_with_comparison_1 (gimple stmt,
3964 enum tree_code code2, tree op2a, tree op2b);
3966 /* Helper function for and_comparisons_1: try to simplify the AND of the
3967 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3968 If INVERT is true, invert the value of the VAR before doing the AND.
3969 Return NULL_EXPR if we can't simplify this to a single expression. */
3971 static tree
3972 and_var_with_comparison (tree var, bool invert,
3973 enum tree_code code2, tree op2a, tree op2b)
3975 tree t;
3976 gimple stmt = SSA_NAME_DEF_STMT (var);
3978 /* We can only deal with variables whose definitions are assignments. */
3979 if (!is_gimple_assign (stmt))
3980 return NULL_TREE;
3982 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3983 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3984 Then we only have to consider the simpler non-inverted cases. */
3985 if (invert)
3986 t = or_var_with_comparison_1 (stmt,
3987 invert_tree_comparison (code2, false),
3988 op2a, op2b);
3989 else
3990 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3991 return canonicalize_bool (t, invert);
3994 /* Try to simplify the AND of the ssa variable defined by the assignment
3995 STMT with the comparison specified by (OP2A CODE2 OP2B).
3996 Return NULL_EXPR if we can't simplify this to a single expression. */
3998 static tree
3999 and_var_with_comparison_1 (gimple stmt,
4000 enum tree_code code2, tree op2a, tree op2b)
4002 tree var = gimple_assign_lhs (stmt);
4003 tree true_test_var = NULL_TREE;
4004 tree false_test_var = NULL_TREE;
4005 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4007 /* Check for identities like (var AND (var == 0)) => false. */
4008 if (TREE_CODE (op2a) == SSA_NAME
4009 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4011 if ((code2 == NE_EXPR && integer_zerop (op2b))
4012 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4014 true_test_var = op2a;
4015 if (var == true_test_var)
4016 return var;
4018 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4019 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4021 false_test_var = op2a;
4022 if (var == false_test_var)
4023 return boolean_false_node;
4027 /* If the definition is a comparison, recurse on it. */
4028 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4030 tree t = and_comparisons_1 (innercode,
4031 gimple_assign_rhs1 (stmt),
4032 gimple_assign_rhs2 (stmt),
4033 code2,
4034 op2a,
4035 op2b);
4036 if (t)
4037 return t;
4040 /* If the definition is an AND or OR expression, we may be able to
4041 simplify by reassociating. */
4042 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4043 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4045 tree inner1 = gimple_assign_rhs1 (stmt);
4046 tree inner2 = gimple_assign_rhs2 (stmt);
4047 gimple s;
4048 tree t;
4049 tree partial = NULL_TREE;
4050 bool is_and = (innercode == BIT_AND_EXPR);
4052 /* Check for boolean identities that don't require recursive examination
4053 of inner1/inner2:
4054 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4055 inner1 AND (inner1 OR inner2) => inner1
4056 !inner1 AND (inner1 AND inner2) => false
4057 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4058 Likewise for similar cases involving inner2. */
4059 if (inner1 == true_test_var)
4060 return (is_and ? var : inner1);
4061 else if (inner2 == true_test_var)
4062 return (is_and ? var : inner2);
4063 else if (inner1 == false_test_var)
4064 return (is_and
4065 ? boolean_false_node
4066 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4067 else if (inner2 == false_test_var)
4068 return (is_and
4069 ? boolean_false_node
4070 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4072 /* Next, redistribute/reassociate the AND across the inner tests.
4073 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4074 if (TREE_CODE (inner1) == SSA_NAME
4075 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4076 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4077 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4078 gimple_assign_rhs1 (s),
4079 gimple_assign_rhs2 (s),
4080 code2, op2a, op2b)))
4082 /* Handle the AND case, where we are reassociating:
4083 (inner1 AND inner2) AND (op2a code2 op2b)
4084 => (t AND inner2)
4085 If the partial result t is a constant, we win. Otherwise
4086 continue on to try reassociating with the other inner test. */
4087 if (is_and)
4089 if (integer_onep (t))
4090 return inner2;
4091 else if (integer_zerop (t))
4092 return boolean_false_node;
4095 /* Handle the OR case, where we are redistributing:
4096 (inner1 OR inner2) AND (op2a code2 op2b)
4097 => (t OR (inner2 AND (op2a code2 op2b))) */
4098 else if (integer_onep (t))
4099 return boolean_true_node;
4101 /* Save partial result for later. */
4102 partial = t;
4105 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4106 if (TREE_CODE (inner2) == SSA_NAME
4107 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4108 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4109 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4110 gimple_assign_rhs1 (s),
4111 gimple_assign_rhs2 (s),
4112 code2, op2a, op2b)))
4114 /* Handle the AND case, where we are reassociating:
4115 (inner1 AND inner2) AND (op2a code2 op2b)
4116 => (inner1 AND t) */
4117 if (is_and)
4119 if (integer_onep (t))
4120 return inner1;
4121 else if (integer_zerop (t))
4122 return boolean_false_node;
4123 /* If both are the same, we can apply the identity
4124 (x AND x) == x. */
4125 else if (partial && same_bool_result_p (t, partial))
4126 return t;
4129 /* Handle the OR case. where we are redistributing:
4130 (inner1 OR inner2) AND (op2a code2 op2b)
4131 => (t OR (inner1 AND (op2a code2 op2b)))
4132 => (t OR partial) */
4133 else
4135 if (integer_onep (t))
4136 return boolean_true_node;
4137 else if (partial)
4139 /* We already got a simplification for the other
4140 operand to the redistributed OR expression. The
4141 interesting case is when at least one is false.
4142 Or, if both are the same, we can apply the identity
4143 (x OR x) == x. */
4144 if (integer_zerop (partial))
4145 return t;
4146 else if (integer_zerop (t))
4147 return partial;
4148 else if (same_bool_result_p (t, partial))
4149 return t;
4154 return NULL_TREE;
4157 /* Try to simplify the AND of two comparisons defined by
4158 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4159 If this can be done without constructing an intermediate value,
4160 return the resulting tree; otherwise NULL_TREE is returned.
4161 This function is deliberately asymmetric as it recurses on SSA_DEFs
4162 in the first comparison but not the second. */
4164 static tree
4165 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4166 enum tree_code code2, tree op2a, tree op2b)
4168 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4170 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4171 if (operand_equal_p (op1a, op2a, 0)
4172 && operand_equal_p (op1b, op2b, 0))
4174 /* Result will be either NULL_TREE, or a combined comparison. */
4175 tree t = combine_comparisons (UNKNOWN_LOCATION,
4176 TRUTH_ANDIF_EXPR, code1, code2,
4177 truth_type, op1a, op1b);
4178 if (t)
4179 return t;
4182 /* Likewise the swapped case of the above. */
4183 if (operand_equal_p (op1a, op2b, 0)
4184 && operand_equal_p (op1b, op2a, 0))
4186 /* Result will be either NULL_TREE, or a combined comparison. */
4187 tree t = combine_comparisons (UNKNOWN_LOCATION,
4188 TRUTH_ANDIF_EXPR, code1,
4189 swap_tree_comparison (code2),
4190 truth_type, op1a, op1b);
4191 if (t)
4192 return t;
4195 /* If both comparisons are of the same value against constants, we might
4196 be able to merge them. */
4197 if (operand_equal_p (op1a, op2a, 0)
4198 && TREE_CODE (op1b) == INTEGER_CST
4199 && TREE_CODE (op2b) == INTEGER_CST)
4201 int cmp = tree_int_cst_compare (op1b, op2b);
4203 /* If we have (op1a == op1b), we should either be able to
4204 return that or FALSE, depending on whether the constant op1b
4205 also satisfies the other comparison against op2b. */
4206 if (code1 == EQ_EXPR)
4208 bool done = true;
4209 bool val;
4210 switch (code2)
4212 case EQ_EXPR: val = (cmp == 0); break;
4213 case NE_EXPR: val = (cmp != 0); break;
4214 case LT_EXPR: val = (cmp < 0); break;
4215 case GT_EXPR: val = (cmp > 0); break;
4216 case LE_EXPR: val = (cmp <= 0); break;
4217 case GE_EXPR: val = (cmp >= 0); break;
4218 default: done = false;
4220 if (done)
4222 if (val)
4223 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4224 else
4225 return boolean_false_node;
4228 /* Likewise if the second comparison is an == comparison. */
4229 else if (code2 == EQ_EXPR)
4231 bool done = true;
4232 bool val;
4233 switch (code1)
4235 case EQ_EXPR: val = (cmp == 0); break;
4236 case NE_EXPR: val = (cmp != 0); break;
4237 case LT_EXPR: val = (cmp > 0); break;
4238 case GT_EXPR: val = (cmp < 0); break;
4239 case LE_EXPR: val = (cmp >= 0); break;
4240 case GE_EXPR: val = (cmp <= 0); break;
4241 default: done = false;
4243 if (done)
4245 if (val)
4246 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4247 else
4248 return boolean_false_node;
4252 /* Same business with inequality tests. */
4253 else if (code1 == NE_EXPR)
4255 bool val;
4256 switch (code2)
4258 case EQ_EXPR: val = (cmp != 0); break;
4259 case NE_EXPR: val = (cmp == 0); break;
4260 case LT_EXPR: val = (cmp >= 0); break;
4261 case GT_EXPR: val = (cmp <= 0); break;
4262 case LE_EXPR: val = (cmp > 0); break;
4263 case GE_EXPR: val = (cmp < 0); break;
4264 default:
4265 val = false;
4267 if (val)
4268 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4270 else if (code2 == NE_EXPR)
4272 bool val;
4273 switch (code1)
4275 case EQ_EXPR: val = (cmp == 0); break;
4276 case NE_EXPR: val = (cmp != 0); break;
4277 case LT_EXPR: val = (cmp <= 0); break;
4278 case GT_EXPR: val = (cmp >= 0); break;
4279 case LE_EXPR: val = (cmp < 0); break;
4280 case GE_EXPR: val = (cmp > 0); break;
4281 default:
4282 val = false;
4284 if (val)
4285 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4288 /* Chose the more restrictive of two < or <= comparisons. */
4289 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4290 && (code2 == LT_EXPR || code2 == LE_EXPR))
4292 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4293 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4294 else
4295 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4298 /* Likewise chose the more restrictive of two > or >= comparisons. */
4299 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4300 && (code2 == GT_EXPR || code2 == GE_EXPR))
4302 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4303 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4304 else
4305 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4308 /* Check for singleton ranges. */
4309 else if (cmp == 0
4310 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4311 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4312 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4314 /* Check for disjoint ranges. */
4315 else if (cmp <= 0
4316 && (code1 == LT_EXPR || code1 == LE_EXPR)
4317 && (code2 == GT_EXPR || code2 == GE_EXPR))
4318 return boolean_false_node;
4319 else if (cmp >= 0
4320 && (code1 == GT_EXPR || code1 == GE_EXPR)
4321 && (code2 == LT_EXPR || code2 == LE_EXPR))
4322 return boolean_false_node;
4325 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4326 NAME's definition is a truth value. See if there are any simplifications
4327 that can be done against the NAME's definition. */
4328 if (TREE_CODE (op1a) == SSA_NAME
4329 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4330 && (integer_zerop (op1b) || integer_onep (op1b)))
4332 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4333 || (code1 == NE_EXPR && integer_onep (op1b)));
4334 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4335 switch (gimple_code (stmt))
4337 case GIMPLE_ASSIGN:
4338 /* Try to simplify by copy-propagating the definition. */
4339 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4341 case GIMPLE_PHI:
4342 /* If every argument to the PHI produces the same result when
4343 ANDed with the second comparison, we win.
4344 Do not do this unless the type is bool since we need a bool
4345 result here anyway. */
4346 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4348 tree result = NULL_TREE;
4349 unsigned i;
4350 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4352 tree arg = gimple_phi_arg_def (stmt, i);
4354 /* If this PHI has itself as an argument, ignore it.
4355 If all the other args produce the same result,
4356 we're still OK. */
4357 if (arg == gimple_phi_result (stmt))
4358 continue;
4359 else if (TREE_CODE (arg) == INTEGER_CST)
4361 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4363 if (!result)
4364 result = boolean_false_node;
4365 else if (!integer_zerop (result))
4366 return NULL_TREE;
4368 else if (!result)
4369 result = fold_build2 (code2, boolean_type_node,
4370 op2a, op2b);
4371 else if (!same_bool_comparison_p (result,
4372 code2, op2a, op2b))
4373 return NULL_TREE;
4375 else if (TREE_CODE (arg) == SSA_NAME
4376 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4378 tree temp;
4379 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4380 /* In simple cases we can look through PHI nodes,
4381 but we have to be careful with loops.
4382 See PR49073. */
4383 if (! dom_info_available_p (CDI_DOMINATORS)
4384 || gimple_bb (def_stmt) == gimple_bb (stmt)
4385 || dominated_by_p (CDI_DOMINATORS,
4386 gimple_bb (def_stmt),
4387 gimple_bb (stmt)))
4388 return NULL_TREE;
4389 temp = and_var_with_comparison (arg, invert, code2,
4390 op2a, op2b);
4391 if (!temp)
4392 return NULL_TREE;
4393 else if (!result)
4394 result = temp;
4395 else if (!same_bool_result_p (result, temp))
4396 return NULL_TREE;
4398 else
4399 return NULL_TREE;
4401 return result;
4404 default:
4405 break;
4408 return NULL_TREE;
4411 /* Try to simplify the AND of two comparisons, specified by
4412 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4413 If this can be simplified to a single expression (without requiring
4414 introducing more SSA variables to hold intermediate values),
4415 return the resulting tree. Otherwise return NULL_TREE.
4416 If the result expression is non-null, it has boolean type. */
4418 tree
4419 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4420 enum tree_code code2, tree op2a, tree op2b)
4422 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4423 if (t)
4424 return t;
4425 else
4426 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4429 /* Helper function for or_comparisons_1: try to simplify the OR of the
4430 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4431 If INVERT is true, invert the value of VAR before doing the OR.
4432 Return NULL_EXPR if we can't simplify this to a single expression. */
4434 static tree
4435 or_var_with_comparison (tree var, bool invert,
4436 enum tree_code code2, tree op2a, tree op2b)
4438 tree t;
4439 gimple stmt = SSA_NAME_DEF_STMT (var);
4441 /* We can only deal with variables whose definitions are assignments. */
4442 if (!is_gimple_assign (stmt))
4443 return NULL_TREE;
4445 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4446 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4447 Then we only have to consider the simpler non-inverted cases. */
4448 if (invert)
4449 t = and_var_with_comparison_1 (stmt,
4450 invert_tree_comparison (code2, false),
4451 op2a, op2b);
4452 else
4453 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4454 return canonicalize_bool (t, invert);
4457 /* Try to simplify the OR of the ssa variable defined by the assignment
4458 STMT with the comparison specified by (OP2A CODE2 OP2B).
4459 Return NULL_EXPR if we can't simplify this to a single expression. */
4461 static tree
4462 or_var_with_comparison_1 (gimple stmt,
4463 enum tree_code code2, tree op2a, tree op2b)
4465 tree var = gimple_assign_lhs (stmt);
4466 tree true_test_var = NULL_TREE;
4467 tree false_test_var = NULL_TREE;
4468 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4470 /* Check for identities like (var OR (var != 0)) => true . */
4471 if (TREE_CODE (op2a) == SSA_NAME
4472 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4474 if ((code2 == NE_EXPR && integer_zerop (op2b))
4475 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4477 true_test_var = op2a;
4478 if (var == true_test_var)
4479 return var;
4481 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4482 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4484 false_test_var = op2a;
4485 if (var == false_test_var)
4486 return boolean_true_node;
4490 /* If the definition is a comparison, recurse on it. */
4491 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4493 tree t = or_comparisons_1 (innercode,
4494 gimple_assign_rhs1 (stmt),
4495 gimple_assign_rhs2 (stmt),
4496 code2,
4497 op2a,
4498 op2b);
4499 if (t)
4500 return t;
4503 /* If the definition is an AND or OR expression, we may be able to
4504 simplify by reassociating. */
4505 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4506 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4508 tree inner1 = gimple_assign_rhs1 (stmt);
4509 tree inner2 = gimple_assign_rhs2 (stmt);
4510 gimple s;
4511 tree t;
4512 tree partial = NULL_TREE;
4513 bool is_or = (innercode == BIT_IOR_EXPR);
4515 /* Check for boolean identities that don't require recursive examination
4516 of inner1/inner2:
4517 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4518 inner1 OR (inner1 AND inner2) => inner1
4519 !inner1 OR (inner1 OR inner2) => true
4520 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4522 if (inner1 == true_test_var)
4523 return (is_or ? var : inner1);
4524 else if (inner2 == true_test_var)
4525 return (is_or ? var : inner2);
4526 else if (inner1 == false_test_var)
4527 return (is_or
4528 ? boolean_true_node
4529 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4530 else if (inner2 == false_test_var)
4531 return (is_or
4532 ? boolean_true_node
4533 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4535 /* Next, redistribute/reassociate the OR across the inner tests.
4536 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4537 if (TREE_CODE (inner1) == SSA_NAME
4538 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4539 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4540 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4541 gimple_assign_rhs1 (s),
4542 gimple_assign_rhs2 (s),
4543 code2, op2a, op2b)))
4545 /* Handle the OR case, where we are reassociating:
4546 (inner1 OR inner2) OR (op2a code2 op2b)
4547 => (t OR inner2)
4548 If the partial result t is a constant, we win. Otherwise
4549 continue on to try reassociating with the other inner test. */
4550 if (is_or)
4552 if (integer_onep (t))
4553 return boolean_true_node;
4554 else if (integer_zerop (t))
4555 return inner2;
4558 /* Handle the AND case, where we are redistributing:
4559 (inner1 AND inner2) OR (op2a code2 op2b)
4560 => (t AND (inner2 OR (op2a code op2b))) */
4561 else if (integer_zerop (t))
4562 return boolean_false_node;
4564 /* Save partial result for later. */
4565 partial = t;
4568 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4569 if (TREE_CODE (inner2) == SSA_NAME
4570 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4571 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4572 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4573 gimple_assign_rhs1 (s),
4574 gimple_assign_rhs2 (s),
4575 code2, op2a, op2b)))
4577 /* Handle the OR case, where we are reassociating:
4578 (inner1 OR inner2) OR (op2a code2 op2b)
4579 => (inner1 OR t)
4580 => (t OR partial) */
4581 if (is_or)
4583 if (integer_zerop (t))
4584 return inner1;
4585 else if (integer_onep (t))
4586 return boolean_true_node;
4587 /* If both are the same, we can apply the identity
4588 (x OR x) == x. */
4589 else if (partial && same_bool_result_p (t, partial))
4590 return t;
4593 /* Handle the AND case, where we are redistributing:
4594 (inner1 AND inner2) OR (op2a code2 op2b)
4595 => (t AND (inner1 OR (op2a code2 op2b)))
4596 => (t AND partial) */
4597 else
4599 if (integer_zerop (t))
4600 return boolean_false_node;
4601 else if (partial)
4603 /* We already got a simplification for the other
4604 operand to the redistributed AND expression. The
4605 interesting case is when at least one is true.
4606 Or, if both are the same, we can apply the identity
4607 (x AND x) == x. */
4608 if (integer_onep (partial))
4609 return t;
4610 else if (integer_onep (t))
4611 return partial;
4612 else if (same_bool_result_p (t, partial))
4613 return t;
4618 return NULL_TREE;
4621 /* Try to simplify the OR of two comparisons defined by
4622 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4623 If this can be done without constructing an intermediate value,
4624 return the resulting tree; otherwise NULL_TREE is returned.
4625 This function is deliberately asymmetric as it recurses on SSA_DEFs
4626 in the first comparison but not the second. */
4628 static tree
4629 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4630 enum tree_code code2, tree op2a, tree op2b)
4632 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4634 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4635 if (operand_equal_p (op1a, op2a, 0)
4636 && operand_equal_p (op1b, op2b, 0))
4638 /* Result will be either NULL_TREE, or a combined comparison. */
4639 tree t = combine_comparisons (UNKNOWN_LOCATION,
4640 TRUTH_ORIF_EXPR, code1, code2,
4641 truth_type, op1a, op1b);
4642 if (t)
4643 return t;
4646 /* Likewise the swapped case of the above. */
4647 if (operand_equal_p (op1a, op2b, 0)
4648 && operand_equal_p (op1b, op2a, 0))
4650 /* Result will be either NULL_TREE, or a combined comparison. */
4651 tree t = combine_comparisons (UNKNOWN_LOCATION,
4652 TRUTH_ORIF_EXPR, code1,
4653 swap_tree_comparison (code2),
4654 truth_type, op1a, op1b);
4655 if (t)
4656 return t;
4659 /* If both comparisons are of the same value against constants, we might
4660 be able to merge them. */
4661 if (operand_equal_p (op1a, op2a, 0)
4662 && TREE_CODE (op1b) == INTEGER_CST
4663 && TREE_CODE (op2b) == INTEGER_CST)
4665 int cmp = tree_int_cst_compare (op1b, op2b);
4667 /* If we have (op1a != op1b), we should either be able to
4668 return that or TRUE, depending on whether the constant op1b
4669 also satisfies the other comparison against op2b. */
4670 if (code1 == NE_EXPR)
4672 bool done = true;
4673 bool val;
4674 switch (code2)
4676 case EQ_EXPR: val = (cmp == 0); break;
4677 case NE_EXPR: val = (cmp != 0); break;
4678 case LT_EXPR: val = (cmp < 0); break;
4679 case GT_EXPR: val = (cmp > 0); break;
4680 case LE_EXPR: val = (cmp <= 0); break;
4681 case GE_EXPR: val = (cmp >= 0); break;
4682 default: done = false;
4684 if (done)
4686 if (val)
4687 return boolean_true_node;
4688 else
4689 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4692 /* Likewise if the second comparison is a != comparison. */
4693 else if (code2 == NE_EXPR)
4695 bool done = true;
4696 bool val;
4697 switch (code1)
4699 case EQ_EXPR: val = (cmp == 0); break;
4700 case NE_EXPR: val = (cmp != 0); break;
4701 case LT_EXPR: val = (cmp > 0); break;
4702 case GT_EXPR: val = (cmp < 0); break;
4703 case LE_EXPR: val = (cmp >= 0); break;
4704 case GE_EXPR: val = (cmp <= 0); break;
4705 default: done = false;
4707 if (done)
4709 if (val)
4710 return boolean_true_node;
4711 else
4712 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4716 /* See if an equality test is redundant with the other comparison. */
4717 else if (code1 == EQ_EXPR)
4719 bool val;
4720 switch (code2)
4722 case EQ_EXPR: val = (cmp == 0); break;
4723 case NE_EXPR: val = (cmp != 0); break;
4724 case LT_EXPR: val = (cmp < 0); break;
4725 case GT_EXPR: val = (cmp > 0); break;
4726 case LE_EXPR: val = (cmp <= 0); break;
4727 case GE_EXPR: val = (cmp >= 0); break;
4728 default:
4729 val = false;
4731 if (val)
4732 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4734 else if (code2 == EQ_EXPR)
4736 bool val;
4737 switch (code1)
4739 case EQ_EXPR: val = (cmp == 0); break;
4740 case NE_EXPR: val = (cmp != 0); break;
4741 case LT_EXPR: val = (cmp > 0); break;
4742 case GT_EXPR: val = (cmp < 0); break;
4743 case LE_EXPR: val = (cmp >= 0); break;
4744 case GE_EXPR: val = (cmp <= 0); break;
4745 default:
4746 val = false;
4748 if (val)
4749 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4752 /* Chose the less restrictive of two < or <= comparisons. */
4753 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4754 && (code2 == LT_EXPR || code2 == LE_EXPR))
4756 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4757 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4758 else
4759 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4762 /* Likewise chose the less restrictive of two > or >= comparisons. */
4763 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4764 && (code2 == GT_EXPR || code2 == GE_EXPR))
4766 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4767 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4768 else
4769 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4772 /* Check for singleton ranges. */
4773 else if (cmp == 0
4774 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4775 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4776 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4778 /* Check for less/greater pairs that don't restrict the range at all. */
4779 else if (cmp >= 0
4780 && (code1 == LT_EXPR || code1 == LE_EXPR)
4781 && (code2 == GT_EXPR || code2 == GE_EXPR))
4782 return boolean_true_node;
4783 else if (cmp <= 0
4784 && (code1 == GT_EXPR || code1 == GE_EXPR)
4785 && (code2 == LT_EXPR || code2 == LE_EXPR))
4786 return boolean_true_node;
4789 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4790 NAME's definition is a truth value. See if there are any simplifications
4791 that can be done against the NAME's definition. */
4792 if (TREE_CODE (op1a) == SSA_NAME
4793 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4794 && (integer_zerop (op1b) || integer_onep (op1b)))
4796 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4797 || (code1 == NE_EXPR && integer_onep (op1b)));
4798 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4799 switch (gimple_code (stmt))
4801 case GIMPLE_ASSIGN:
4802 /* Try to simplify by copy-propagating the definition. */
4803 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4805 case GIMPLE_PHI:
4806 /* If every argument to the PHI produces the same result when
4807 ORed with the second comparison, we win.
4808 Do not do this unless the type is bool since we need a bool
4809 result here anyway. */
4810 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4812 tree result = NULL_TREE;
4813 unsigned i;
4814 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4816 tree arg = gimple_phi_arg_def (stmt, i);
4818 /* If this PHI has itself as an argument, ignore it.
4819 If all the other args produce the same result,
4820 we're still OK. */
4821 if (arg == gimple_phi_result (stmt))
4822 continue;
4823 else if (TREE_CODE (arg) == INTEGER_CST)
4825 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4827 if (!result)
4828 result = boolean_true_node;
4829 else if (!integer_onep (result))
4830 return NULL_TREE;
4832 else if (!result)
4833 result = fold_build2 (code2, boolean_type_node,
4834 op2a, op2b);
4835 else if (!same_bool_comparison_p (result,
4836 code2, op2a, op2b))
4837 return NULL_TREE;
4839 else if (TREE_CODE (arg) == SSA_NAME
4840 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4842 tree temp;
4843 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4844 /* In simple cases we can look through PHI nodes,
4845 but we have to be careful with loops.
4846 See PR49073. */
4847 if (! dom_info_available_p (CDI_DOMINATORS)
4848 || gimple_bb (def_stmt) == gimple_bb (stmt)
4849 || dominated_by_p (CDI_DOMINATORS,
4850 gimple_bb (def_stmt),
4851 gimple_bb (stmt)))
4852 return NULL_TREE;
4853 temp = or_var_with_comparison (arg, invert, code2,
4854 op2a, op2b);
4855 if (!temp)
4856 return NULL_TREE;
4857 else if (!result)
4858 result = temp;
4859 else if (!same_bool_result_p (result, temp))
4860 return NULL_TREE;
4862 else
4863 return NULL_TREE;
4865 return result;
4868 default:
4869 break;
4872 return NULL_TREE;
4875 /* Try to simplify the OR of two comparisons, specified by
4876 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4877 If this can be simplified to a single expression (without requiring
4878 introducing more SSA variables to hold intermediate values),
4879 return the resulting tree. Otherwise return NULL_TREE.
4880 If the result expression is non-null, it has boolean type. */
4882 tree
4883 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4884 enum tree_code code2, tree op2a, tree op2b)
4886 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4887 if (t)
4888 return t;
4889 else
4890 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4894 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4896 Either NULL_TREE, a simplified but non-constant or a constant
4897 is returned.
4899 ??? This should go into a gimple-fold-inline.h file to be eventually
4900 privatized with the single valueize function used in the various TUs
4901 to avoid the indirect function call overhead. */
4903 tree
4904 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4905 tree (*gvalueize) (tree))
4907 code_helper rcode;
4908 tree ops[3] = {};
4909 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4910 edges if there are intermediate VARYING defs. For this reason
4911 do not follow SSA edges here even though SCCVN can technically
4912 just deal fine with that. */
4913 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize)
4914 && rcode.is_tree_code ()
4915 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4916 || ((tree_code) rcode) == ADDR_EXPR)
4917 && is_gimple_val (ops[0]))
4919 tree res = ops[0];
4920 if (dump_file && dump_flags & TDF_DETAILS)
4922 fprintf (dump_file, "Match-and-simplified ");
4923 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4924 fprintf (dump_file, " to ");
4925 print_generic_expr (dump_file, res, 0);
4926 fprintf (dump_file, "\n");
4928 return res;
4931 location_t loc = gimple_location (stmt);
4932 switch (gimple_code (stmt))
4934 case GIMPLE_ASSIGN:
4936 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4938 switch (get_gimple_rhs_class (subcode))
4940 case GIMPLE_SINGLE_RHS:
4942 tree rhs = gimple_assign_rhs1 (stmt);
4943 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4945 if (TREE_CODE (rhs) == SSA_NAME)
4947 /* If the RHS is an SSA_NAME, return its known constant value,
4948 if any. */
4949 return (*valueize) (rhs);
4951 /* Handle propagating invariant addresses into address
4952 operations. */
4953 else if (TREE_CODE (rhs) == ADDR_EXPR
4954 && !is_gimple_min_invariant (rhs))
4956 HOST_WIDE_INT offset = 0;
4957 tree base;
4958 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4959 &offset,
4960 valueize);
4961 if (base
4962 && (CONSTANT_CLASS_P (base)
4963 || decl_address_invariant_p (base)))
4964 return build_invariant_address (TREE_TYPE (rhs),
4965 base, offset);
4967 else if (TREE_CODE (rhs) == CONSTRUCTOR
4968 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4969 && (CONSTRUCTOR_NELTS (rhs)
4970 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4972 unsigned i;
4973 tree val, *vec;
4975 vec = XALLOCAVEC (tree,
4976 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4977 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4979 val = (*valueize) (val);
4980 if (TREE_CODE (val) == INTEGER_CST
4981 || TREE_CODE (val) == REAL_CST
4982 || TREE_CODE (val) == FIXED_CST)
4983 vec[i] = val;
4984 else
4985 return NULL_TREE;
4988 return build_vector (TREE_TYPE (rhs), vec);
4990 if (subcode == OBJ_TYPE_REF)
4992 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4993 /* If callee is constant, we can fold away the wrapper. */
4994 if (is_gimple_min_invariant (val))
4995 return val;
4998 if (kind == tcc_reference)
5000 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5001 || TREE_CODE (rhs) == REALPART_EXPR
5002 || TREE_CODE (rhs) == IMAGPART_EXPR)
5003 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5005 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5006 return fold_unary_loc (EXPR_LOCATION (rhs),
5007 TREE_CODE (rhs),
5008 TREE_TYPE (rhs), val);
5010 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5011 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5013 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5014 return fold_ternary_loc (EXPR_LOCATION (rhs),
5015 TREE_CODE (rhs),
5016 TREE_TYPE (rhs), val,
5017 TREE_OPERAND (rhs, 1),
5018 TREE_OPERAND (rhs, 2));
5020 else if (TREE_CODE (rhs) == MEM_REF
5021 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5023 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5024 if (TREE_CODE (val) == ADDR_EXPR
5025 && is_gimple_min_invariant (val))
5027 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5028 unshare_expr (val),
5029 TREE_OPERAND (rhs, 1));
5030 if (tem)
5031 rhs = tem;
5034 return fold_const_aggregate_ref_1 (rhs, valueize);
5036 else if (kind == tcc_declaration)
5037 return get_symbol_constant_value (rhs);
5038 return rhs;
5041 case GIMPLE_UNARY_RHS:
5042 return NULL_TREE;
5044 case GIMPLE_BINARY_RHS:
5046 /* Handle binary operators that can appear in GIMPLE form. */
5047 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5048 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5050 /* Translate &x + CST into an invariant form suitable for
5051 further propagation. */
5052 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
5053 && TREE_CODE (op0) == ADDR_EXPR
5054 && TREE_CODE (op1) == INTEGER_CST)
5056 tree off = fold_convert (ptr_type_node, op1);
5057 return build_fold_addr_expr_loc
5058 (loc,
5059 fold_build2 (MEM_REF,
5060 TREE_TYPE (TREE_TYPE (op0)),
5061 unshare_expr (op0), off));
5064 return fold_binary_loc (loc, subcode,
5065 gimple_expr_type (stmt), op0, op1);
5068 case GIMPLE_TERNARY_RHS:
5070 /* Handle ternary operators that can appear in GIMPLE form. */
5071 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5072 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5073 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5075 /* Fold embedded expressions in ternary codes. */
5076 if ((subcode == COND_EXPR
5077 || subcode == VEC_COND_EXPR)
5078 && COMPARISON_CLASS_P (op0))
5080 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
5081 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
5082 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
5083 TREE_TYPE (op0), op00, op01);
5084 if (tem)
5085 op0 = tem;
5088 return fold_ternary_loc (loc, subcode,
5089 gimple_expr_type (stmt), op0, op1, op2);
5092 default:
5093 gcc_unreachable ();
5097 case GIMPLE_CALL:
5099 tree fn;
5100 gcall *call_stmt = as_a <gcall *> (stmt);
5102 if (gimple_call_internal_p (stmt))
5104 enum tree_code subcode = ERROR_MARK;
5105 switch (gimple_call_internal_fn (stmt))
5107 case IFN_UBSAN_CHECK_ADD:
5108 subcode = PLUS_EXPR;
5109 break;
5110 case IFN_UBSAN_CHECK_SUB:
5111 subcode = MINUS_EXPR;
5112 break;
5113 case IFN_UBSAN_CHECK_MUL:
5114 subcode = MULT_EXPR;
5115 break;
5116 default:
5117 return NULL_TREE;
5119 tree arg0 = gimple_call_arg (stmt, 0);
5120 tree arg1 = gimple_call_arg (stmt, 1);
5121 tree op0 = (*valueize) (arg0);
5122 tree op1 = (*valueize) (arg1);
5124 if (TREE_CODE (op0) != INTEGER_CST
5125 || TREE_CODE (op1) != INTEGER_CST)
5127 switch (subcode)
5129 case MULT_EXPR:
5130 /* x * 0 = 0 * x = 0 without overflow. */
5131 if (integer_zerop (op0) || integer_zerop (op1))
5132 return build_zero_cst (TREE_TYPE (arg0));
5133 break;
5134 case MINUS_EXPR:
5135 /* y - y = 0 without overflow. */
5136 if (operand_equal_p (op0, op1, 0))
5137 return build_zero_cst (TREE_TYPE (arg0));
5138 break;
5139 default:
5140 break;
5143 tree res
5144 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5145 if (res
5146 && TREE_CODE (res) == INTEGER_CST
5147 && !TREE_OVERFLOW (res))
5148 return res;
5149 return NULL_TREE;
5152 fn = (*valueize) (gimple_call_fn (stmt));
5153 if (TREE_CODE (fn) == ADDR_EXPR
5154 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5155 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5156 && gimple_builtin_call_types_compatible_p (stmt,
5157 TREE_OPERAND (fn, 0)))
5159 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5160 tree retval;
5161 unsigned i;
5162 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5163 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5164 retval = fold_builtin_call_array (loc,
5165 gimple_call_return_type (call_stmt),
5166 fn, gimple_call_num_args (stmt), args);
5167 if (retval)
5169 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5170 STRIP_NOPS (retval);
5171 retval = fold_convert (gimple_call_return_type (call_stmt),
5172 retval);
5174 return retval;
5176 return NULL_TREE;
5179 default:
5180 return NULL_TREE;
5184 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5185 Returns NULL_TREE if folding to a constant is not possible, otherwise
5186 returns a constant according to is_gimple_min_invariant. */
5188 tree
5189 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5191 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5192 if (res && is_gimple_min_invariant (res))
5193 return res;
5194 return NULL_TREE;
5198 /* The following set of functions are supposed to fold references using
5199 their constant initializers. */
5201 /* See if we can find constructor defining value of BASE.
5202 When we know the consructor with constant offset (such as
5203 base is array[40] and we do know constructor of array), then
5204 BIT_OFFSET is adjusted accordingly.
5206 As a special case, return error_mark_node when constructor
5207 is not explicitly available, but it is known to be zero
5208 such as 'static const int a;'. */
5209 static tree
5210 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5211 tree (*valueize)(tree))
5213 HOST_WIDE_INT bit_offset2, size, max_size;
5214 if (TREE_CODE (base) == MEM_REF)
5216 if (!integer_zerop (TREE_OPERAND (base, 1)))
5218 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5219 return NULL_TREE;
5220 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5221 * BITS_PER_UNIT);
5224 if (valueize
5225 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5226 base = valueize (TREE_OPERAND (base, 0));
5227 if (!base || TREE_CODE (base) != ADDR_EXPR)
5228 return NULL_TREE;
5229 base = TREE_OPERAND (base, 0);
5232 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5233 DECL_INITIAL. If BASE is a nested reference into another
5234 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5235 the inner reference. */
5236 switch (TREE_CODE (base))
5238 case VAR_DECL:
5239 case CONST_DECL:
5241 tree init = ctor_for_folding (base);
5243 /* Our semantic is exact opposite of ctor_for_folding;
5244 NULL means unknown, while error_mark_node is 0. */
5245 if (init == error_mark_node)
5246 return NULL_TREE;
5247 if (!init)
5248 return error_mark_node;
5249 return init;
5252 case ARRAY_REF:
5253 case COMPONENT_REF:
5254 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5255 if (max_size == -1 || size != max_size)
5256 return NULL_TREE;
5257 *bit_offset += bit_offset2;
5258 return get_base_constructor (base, bit_offset, valueize);
5260 case STRING_CST:
5261 case CONSTRUCTOR:
5262 return base;
5264 default:
5265 return NULL_TREE;
5269 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5270 SIZE to the memory at bit OFFSET. */
5272 static tree
5273 fold_array_ctor_reference (tree type, tree ctor,
5274 unsigned HOST_WIDE_INT offset,
5275 unsigned HOST_WIDE_INT size,
5276 tree from_decl)
5278 unsigned HOST_WIDE_INT cnt;
5279 tree cfield, cval;
5280 offset_int low_bound;
5281 offset_int elt_size;
5282 offset_int index, max_index;
5283 offset_int access_index;
5284 tree domain_type = NULL_TREE, index_type = NULL_TREE;
5285 HOST_WIDE_INT inner_offset;
5287 /* Compute low bound and elt size. */
5288 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5289 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5290 if (domain_type && TYPE_MIN_VALUE (domain_type))
5292 /* Static constructors for variably sized objects makes no sense. */
5293 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5294 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5295 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5297 else
5298 low_bound = 0;
5299 /* Static constructors for variably sized objects makes no sense. */
5300 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5301 == INTEGER_CST);
5302 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5304 /* We can handle only constantly sized accesses that are known to not
5305 be larger than size of array element. */
5306 if (!TYPE_SIZE_UNIT (type)
5307 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5308 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5309 || elt_size == 0)
5310 return NULL_TREE;
5312 /* Compute the array index we look for. */
5313 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5314 elt_size);
5315 access_index += low_bound;
5316 if (index_type)
5317 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5318 TYPE_SIGN (index_type));
5320 /* And offset within the access. */
5321 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5323 /* See if the array field is large enough to span whole access. We do not
5324 care to fold accesses spanning multiple array indexes. */
5325 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5326 return NULL_TREE;
5328 index = low_bound - 1;
5329 if (index_type)
5330 index = wi::ext (index, TYPE_PRECISION (index_type),
5331 TYPE_SIGN (index_type));
5333 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5335 /* Array constructor might explicitely set index, or specify range
5336 or leave index NULL meaning that it is next index after previous
5337 one. */
5338 if (cfield)
5340 if (TREE_CODE (cfield) == INTEGER_CST)
5341 max_index = index = wi::to_offset (cfield);
5342 else
5344 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5345 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5346 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5349 else
5351 index += 1;
5352 if (index_type)
5353 index = wi::ext (index, TYPE_PRECISION (index_type),
5354 TYPE_SIGN (index_type));
5355 max_index = index;
5358 /* Do we have match? */
5359 if (wi::cmpu (access_index, index) >= 0
5360 && wi::cmpu (access_index, max_index) <= 0)
5361 return fold_ctor_reference (type, cval, inner_offset, size,
5362 from_decl);
5364 /* When memory is not explicitely mentioned in constructor,
5365 it is 0 (or out of range). */
5366 return build_zero_cst (type);
5369 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5370 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5372 static tree
5373 fold_nonarray_ctor_reference (tree type, tree ctor,
5374 unsigned HOST_WIDE_INT offset,
5375 unsigned HOST_WIDE_INT size,
5376 tree from_decl)
5378 unsigned HOST_WIDE_INT cnt;
5379 tree cfield, cval;
5381 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5382 cval)
5384 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5385 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5386 tree field_size = DECL_SIZE (cfield);
5387 offset_int bitoffset;
5388 offset_int bitoffset_end, access_end;
5390 /* Variable sized objects in static constructors makes no sense,
5391 but field_size can be NULL for flexible array members. */
5392 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5393 && TREE_CODE (byte_offset) == INTEGER_CST
5394 && (field_size != NULL_TREE
5395 ? TREE_CODE (field_size) == INTEGER_CST
5396 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5398 /* Compute bit offset of the field. */
5399 bitoffset = (wi::to_offset (field_offset)
5400 + wi::lshift (wi::to_offset (byte_offset),
5401 LOG2_BITS_PER_UNIT));
5402 /* Compute bit offset where the field ends. */
5403 if (field_size != NULL_TREE)
5404 bitoffset_end = bitoffset + wi::to_offset (field_size);
5405 else
5406 bitoffset_end = 0;
5408 access_end = offset_int (offset) + size;
5410 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5411 [BITOFFSET, BITOFFSET_END)? */
5412 if (wi::cmps (access_end, bitoffset) > 0
5413 && (field_size == NULL_TREE
5414 || wi::lts_p (offset, bitoffset_end)))
5416 offset_int inner_offset = offset_int (offset) - bitoffset;
5417 /* We do have overlap. Now see if field is large enough to
5418 cover the access. Give up for accesses spanning multiple
5419 fields. */
5420 if (wi::cmps (access_end, bitoffset_end) > 0)
5421 return NULL_TREE;
5422 if (wi::lts_p (offset, bitoffset))
5423 return NULL_TREE;
5424 return fold_ctor_reference (type, cval,
5425 inner_offset.to_uhwi (), size,
5426 from_decl);
5429 /* When memory is not explicitely mentioned in constructor, it is 0. */
5430 return build_zero_cst (type);
5433 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5434 to the memory at bit OFFSET. */
5436 tree
5437 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5438 unsigned HOST_WIDE_INT size, tree from_decl)
5440 tree ret;
5442 /* We found the field with exact match. */
5443 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5444 && !offset)
5445 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5447 /* We are at the end of walk, see if we can view convert the
5448 result. */
5449 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5450 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5451 && !compare_tree_int (TYPE_SIZE (type), size)
5452 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5454 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5455 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5456 if (ret)
5457 STRIP_NOPS (ret);
5458 return ret;
5460 /* For constants and byte-aligned/sized reads try to go through
5461 native_encode/interpret. */
5462 if (CONSTANT_CLASS_P (ctor)
5463 && BITS_PER_UNIT == 8
5464 && offset % BITS_PER_UNIT == 0
5465 && size % BITS_PER_UNIT == 0
5466 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5468 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5469 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5470 offset / BITS_PER_UNIT) > 0)
5471 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5473 if (TREE_CODE (ctor) == CONSTRUCTOR)
5476 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5477 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5478 return fold_array_ctor_reference (type, ctor, offset, size,
5479 from_decl);
5480 else
5481 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5482 from_decl);
5485 return NULL_TREE;
5488 /* Return the tree representing the element referenced by T if T is an
5489 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5490 names using VALUEIZE. Return NULL_TREE otherwise. */
5492 tree
5493 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5495 tree ctor, idx, base;
5496 HOST_WIDE_INT offset, size, max_size;
5497 tree tem;
5499 if (TREE_THIS_VOLATILE (t))
5500 return NULL_TREE;
5502 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
5503 return get_symbol_constant_value (t);
5505 tem = fold_read_from_constant_string (t);
5506 if (tem)
5507 return tem;
5509 switch (TREE_CODE (t))
5511 case ARRAY_REF:
5512 case ARRAY_RANGE_REF:
5513 /* Constant indexes are handled well by get_base_constructor.
5514 Only special case variable offsets.
5515 FIXME: This code can't handle nested references with variable indexes
5516 (they will be handled only by iteration of ccp). Perhaps we can bring
5517 get_ref_base_and_extent here and make it use a valueize callback. */
5518 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5519 && valueize
5520 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5521 && TREE_CODE (idx) == INTEGER_CST)
5523 tree low_bound, unit_size;
5525 /* If the resulting bit-offset is constant, track it. */
5526 if ((low_bound = array_ref_low_bound (t),
5527 TREE_CODE (low_bound) == INTEGER_CST)
5528 && (unit_size = array_ref_element_size (t),
5529 tree_fits_uhwi_p (unit_size)))
5531 offset_int woffset
5532 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5533 TYPE_PRECISION (TREE_TYPE (idx)));
5535 if (wi::fits_shwi_p (woffset))
5537 offset = woffset.to_shwi ();
5538 /* TODO: This code seems wrong, multiply then check
5539 to see if it fits. */
5540 offset *= tree_to_uhwi (unit_size);
5541 offset *= BITS_PER_UNIT;
5543 base = TREE_OPERAND (t, 0);
5544 ctor = get_base_constructor (base, &offset, valueize);
5545 /* Empty constructor. Always fold to 0. */
5546 if (ctor == error_mark_node)
5547 return build_zero_cst (TREE_TYPE (t));
5548 /* Out of bound array access. Value is undefined,
5549 but don't fold. */
5550 if (offset < 0)
5551 return NULL_TREE;
5552 /* We can not determine ctor. */
5553 if (!ctor)
5554 return NULL_TREE;
5555 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5556 tree_to_uhwi (unit_size)
5557 * BITS_PER_UNIT,
5558 base);
5562 /* Fallthru. */
5564 case COMPONENT_REF:
5565 case BIT_FIELD_REF:
5566 case TARGET_MEM_REF:
5567 case MEM_REF:
5568 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5569 ctor = get_base_constructor (base, &offset, valueize);
5571 /* Empty constructor. Always fold to 0. */
5572 if (ctor == error_mark_node)
5573 return build_zero_cst (TREE_TYPE (t));
5574 /* We do not know precise address. */
5575 if (max_size == -1 || max_size != size)
5576 return NULL_TREE;
5577 /* We can not determine ctor. */
5578 if (!ctor)
5579 return NULL_TREE;
5581 /* Out of bound array access. Value is undefined, but don't fold. */
5582 if (offset < 0)
5583 return NULL_TREE;
5585 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5586 base);
5588 case REALPART_EXPR:
5589 case IMAGPART_EXPR:
5591 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5592 if (c && TREE_CODE (c) == COMPLEX_CST)
5593 return fold_build1_loc (EXPR_LOCATION (t),
5594 TREE_CODE (t), TREE_TYPE (t), c);
5595 break;
5598 default:
5599 break;
5602 return NULL_TREE;
5605 tree
5606 fold_const_aggregate_ref (tree t)
5608 return fold_const_aggregate_ref_1 (t, NULL);
5611 /* Lookup virtual method with index TOKEN in a virtual table V
5612 at OFFSET.
5613 Set CAN_REFER if non-NULL to false if method
5614 is not referable or if the virtual table is ill-formed (such as rewriten
5615 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5617 tree
5618 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5619 tree v,
5620 unsigned HOST_WIDE_INT offset,
5621 bool *can_refer)
5623 tree vtable = v, init, fn;
5624 unsigned HOST_WIDE_INT size;
5625 unsigned HOST_WIDE_INT elt_size, access_index;
5626 tree domain_type;
5628 if (can_refer)
5629 *can_refer = true;
5631 /* First of all double check we have virtual table. */
5632 if (TREE_CODE (v) != VAR_DECL
5633 || !DECL_VIRTUAL_P (v))
5635 gcc_assert (in_lto_p);
5636 /* Pass down that we lost track of the target. */
5637 if (can_refer)
5638 *can_refer = false;
5639 return NULL_TREE;
5642 init = ctor_for_folding (v);
5644 /* The virtual tables should always be born with constructors
5645 and we always should assume that they are avaialble for
5646 folding. At the moment we do not stream them in all cases,
5647 but it should never happen that ctor seem unreachable. */
5648 gcc_assert (init);
5649 if (init == error_mark_node)
5651 gcc_assert (in_lto_p);
5652 /* Pass down that we lost track of the target. */
5653 if (can_refer)
5654 *can_refer = false;
5655 return NULL_TREE;
5657 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5658 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5659 offset *= BITS_PER_UNIT;
5660 offset += token * size;
5662 /* Lookup the value in the constructor that is assumed to be array.
5663 This is equivalent to
5664 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5665 offset, size, NULL);
5666 but in a constant time. We expect that frontend produced a simple
5667 array without indexed initializers. */
5669 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5670 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5671 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5672 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5674 access_index = offset / BITS_PER_UNIT / elt_size;
5675 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5677 /* This code makes an assumption that there are no
5678 indexed fileds produced by C++ FE, so we can directly index the array. */
5679 if (access_index < CONSTRUCTOR_NELTS (init))
5681 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5682 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5683 STRIP_NOPS (fn);
5685 else
5686 fn = NULL;
5688 /* For type inconsistent program we may end up looking up virtual method
5689 in virtual table that does not contain TOKEN entries. We may overrun
5690 the virtual table and pick up a constant or RTTI info pointer.
5691 In any case the call is undefined. */
5692 if (!fn
5693 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5694 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5695 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5696 else
5698 fn = TREE_OPERAND (fn, 0);
5700 /* When cgraph node is missing and function is not public, we cannot
5701 devirtualize. This can happen in WHOPR when the actual method
5702 ends up in other partition, because we found devirtualization
5703 possibility too late. */
5704 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5706 if (can_refer)
5708 *can_refer = false;
5709 return fn;
5711 return NULL_TREE;
5715 /* Make sure we create a cgraph node for functions we'll reference.
5716 They can be non-existent if the reference comes from an entry
5717 of an external vtable for example. */
5718 cgraph_node::get_create (fn);
5720 return fn;
5723 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5724 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5725 KNOWN_BINFO carries the binfo describing the true type of
5726 OBJ_TYPE_REF_OBJECT(REF).
5727 Set CAN_REFER if non-NULL to false if method
5728 is not referable or if the virtual table is ill-formed (such as rewriten
5729 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5731 tree
5732 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5733 bool *can_refer)
5735 unsigned HOST_WIDE_INT offset;
5736 tree v;
5738 v = BINFO_VTABLE (known_binfo);
5739 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5740 if (!v)
5741 return NULL_TREE;
5743 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5745 if (can_refer)
5746 *can_refer = false;
5747 return NULL_TREE;
5749 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5752 /* Return true iff VAL is a gimple expression that is known to be
5753 non-negative. Restricted to floating-point inputs. */
5755 bool
5756 gimple_val_nonnegative_real_p (tree val)
5758 gimple def_stmt;
5760 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5762 /* Use existing logic for non-gimple trees. */
5763 if (tree_expr_nonnegative_p (val))
5764 return true;
5766 if (TREE_CODE (val) != SSA_NAME)
5767 return false;
5769 /* Currently we look only at the immediately defining statement
5770 to make this determination, since recursion on defining
5771 statements of operands can lead to quadratic behavior in the
5772 worst case. This is expected to catch almost all occurrences
5773 in practice. It would be possible to implement limited-depth
5774 recursion if important cases are lost. Alternatively, passes
5775 that need this information (such as the pow/powi lowering code
5776 in the cse_sincos pass) could be revised to provide it through
5777 dataflow propagation. */
5779 def_stmt = SSA_NAME_DEF_STMT (val);
5781 if (is_gimple_assign (def_stmt))
5783 tree op0, op1;
5785 /* See fold-const.c:tree_expr_nonnegative_p for additional
5786 cases that could be handled with recursion. */
5788 switch (gimple_assign_rhs_code (def_stmt))
5790 case ABS_EXPR:
5791 /* Always true for floating-point operands. */
5792 return true;
5794 case MULT_EXPR:
5795 /* True if the two operands are identical (since we are
5796 restricted to floating-point inputs). */
5797 op0 = gimple_assign_rhs1 (def_stmt);
5798 op1 = gimple_assign_rhs2 (def_stmt);
5800 if (op0 == op1
5801 || operand_equal_p (op0, op1, 0))
5802 return true;
5804 default:
5805 return false;
5808 else if (is_gimple_call (def_stmt))
5810 tree fndecl = gimple_call_fndecl (def_stmt);
5811 if (fndecl
5812 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5814 tree arg1;
5816 switch (DECL_FUNCTION_CODE (fndecl))
5818 CASE_FLT_FN (BUILT_IN_ACOS):
5819 CASE_FLT_FN (BUILT_IN_ACOSH):
5820 CASE_FLT_FN (BUILT_IN_CABS):
5821 CASE_FLT_FN (BUILT_IN_COSH):
5822 CASE_FLT_FN (BUILT_IN_ERFC):
5823 CASE_FLT_FN (BUILT_IN_EXP):
5824 CASE_FLT_FN (BUILT_IN_EXP10):
5825 CASE_FLT_FN (BUILT_IN_EXP2):
5826 CASE_FLT_FN (BUILT_IN_FABS):
5827 CASE_FLT_FN (BUILT_IN_FDIM):
5828 CASE_FLT_FN (BUILT_IN_HYPOT):
5829 CASE_FLT_FN (BUILT_IN_POW10):
5830 return true;
5832 CASE_FLT_FN (BUILT_IN_SQRT):
5833 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5834 nonnegative inputs. */
5835 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5836 return true;
5838 break;
5840 CASE_FLT_FN (BUILT_IN_POWI):
5841 /* True if the second argument is an even integer. */
5842 arg1 = gimple_call_arg (def_stmt, 1);
5844 if (TREE_CODE (arg1) == INTEGER_CST
5845 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5846 return true;
5848 break;
5850 CASE_FLT_FN (BUILT_IN_POW):
5851 /* True if the second argument is an even integer-valued
5852 real. */
5853 arg1 = gimple_call_arg (def_stmt, 1);
5855 if (TREE_CODE (arg1) == REAL_CST)
5857 REAL_VALUE_TYPE c;
5858 HOST_WIDE_INT n;
5860 c = TREE_REAL_CST (arg1);
5861 n = real_to_integer (&c);
5863 if ((n & 1) == 0)
5865 REAL_VALUE_TYPE cint;
5866 real_from_integer (&cint, VOIDmode, n, SIGNED);
5867 if (real_identical (&c, &cint))
5868 return true;
5872 break;
5874 default:
5875 return false;
5880 return false;
5883 /* Given a pointer value OP0, return a simplified version of an
5884 indirection through OP0, or NULL_TREE if no simplification is
5885 possible. Note that the resulting type may be different from
5886 the type pointed to in the sense that it is still compatible
5887 from the langhooks point of view. */
5889 tree
5890 gimple_fold_indirect_ref (tree t)
5892 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5893 tree sub = t;
5894 tree subtype;
5896 STRIP_NOPS (sub);
5897 subtype = TREE_TYPE (sub);
5898 if (!POINTER_TYPE_P (subtype))
5899 return NULL_TREE;
5901 if (TREE_CODE (sub) == ADDR_EXPR)
5903 tree op = TREE_OPERAND (sub, 0);
5904 tree optype = TREE_TYPE (op);
5905 /* *&p => p */
5906 if (useless_type_conversion_p (type, optype))
5907 return op;
5909 /* *(foo *)&fooarray => fooarray[0] */
5910 if (TREE_CODE (optype) == ARRAY_TYPE
5911 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5912 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5914 tree type_domain = TYPE_DOMAIN (optype);
5915 tree min_val = size_zero_node;
5916 if (type_domain && TYPE_MIN_VALUE (type_domain))
5917 min_val = TYPE_MIN_VALUE (type_domain);
5918 if (TREE_CODE (min_val) == INTEGER_CST)
5919 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5921 /* *(foo *)&complexfoo => __real__ complexfoo */
5922 else if (TREE_CODE (optype) == COMPLEX_TYPE
5923 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5924 return fold_build1 (REALPART_EXPR, type, op);
5925 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5926 else if (TREE_CODE (optype) == VECTOR_TYPE
5927 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5929 tree part_width = TYPE_SIZE (type);
5930 tree index = bitsize_int (0);
5931 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5935 /* *(p + CST) -> ... */
5936 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5937 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5939 tree addr = TREE_OPERAND (sub, 0);
5940 tree off = TREE_OPERAND (sub, 1);
5941 tree addrtype;
5943 STRIP_NOPS (addr);
5944 addrtype = TREE_TYPE (addr);
5946 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5947 if (TREE_CODE (addr) == ADDR_EXPR
5948 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5949 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5950 && tree_fits_uhwi_p (off))
5952 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5953 tree part_width = TYPE_SIZE (type);
5954 unsigned HOST_WIDE_INT part_widthi
5955 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5956 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5957 tree index = bitsize_int (indexi);
5958 if (offset / part_widthi
5959 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5960 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5961 part_width, index);
5964 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5965 if (TREE_CODE (addr) == ADDR_EXPR
5966 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5967 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5969 tree size = TYPE_SIZE_UNIT (type);
5970 if (tree_int_cst_equal (size, off))
5971 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5974 /* *(p + CST) -> MEM_REF <p, CST>. */
5975 if (TREE_CODE (addr) != ADDR_EXPR
5976 || DECL_P (TREE_OPERAND (addr, 0)))
5977 return fold_build2 (MEM_REF, type,
5978 addr,
5979 wide_int_to_tree (ptype, off));
5982 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5983 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5984 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5985 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5987 tree type_domain;
5988 tree min_val = size_zero_node;
5989 tree osub = sub;
5990 sub = gimple_fold_indirect_ref (sub);
5991 if (! sub)
5992 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5993 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5994 if (type_domain && TYPE_MIN_VALUE (type_domain))
5995 min_val = TYPE_MIN_VALUE (type_domain);
5996 if (TREE_CODE (min_val) == INTEGER_CST)
5997 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6000 return NULL_TREE;
6003 /* Return true if CODE is an operation that when operating on signed
6004 integer types involves undefined behavior on overflow and the
6005 operation can be expressed with unsigned arithmetic. */
6007 bool
6008 arith_code_with_undefined_signed_overflow (tree_code code)
6010 switch (code)
6012 case PLUS_EXPR:
6013 case MINUS_EXPR:
6014 case MULT_EXPR:
6015 case NEGATE_EXPR:
6016 case POINTER_PLUS_EXPR:
6017 return true;
6018 default:
6019 return false;
6023 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6024 operation that can be transformed to unsigned arithmetic by converting
6025 its operand, carrying out the operation in the corresponding unsigned
6026 type and converting the result back to the original type.
6028 Returns a sequence of statements that replace STMT and also contain
6029 a modified form of STMT itself. */
6031 gimple_seq
6032 rewrite_to_defined_overflow (gimple stmt)
6034 if (dump_file && (dump_flags & TDF_DETAILS))
6036 fprintf (dump_file, "rewriting stmt with undefined signed "
6037 "overflow ");
6038 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6041 tree lhs = gimple_assign_lhs (stmt);
6042 tree type = unsigned_type_for (TREE_TYPE (lhs));
6043 gimple_seq stmts = NULL;
6044 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6046 gimple_seq stmts2 = NULL;
6047 gimple_set_op (stmt, i,
6048 force_gimple_operand (fold_convert (type,
6049 gimple_op (stmt, i)),
6050 &stmts2, true, NULL_TREE));
6051 gimple_seq_add_seq (&stmts, stmts2);
6053 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6054 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6055 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6056 gimple_seq_add_stmt (&stmts, stmt);
6057 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6058 gimple_seq_add_stmt (&stmts, cvt);
6060 return stmts;
6064 /* Build the expression CODE OP0 of type TYPE with location LOC,
6065 simplifying it first if possible using VALUEIZE if not NULL.
6066 OP0 is expected to be valueized already. Returns the built
6067 expression value and appends statements possibly defining it
6068 to SEQ. */
6070 tree
6071 gimple_build (gimple_seq *seq, location_t loc,
6072 enum tree_code code, tree type, tree op0,
6073 tree (*valueize)(tree))
6075 tree res = gimple_simplify (code, type, op0, seq, valueize);
6076 if (!res)
6078 if (gimple_in_ssa_p (cfun))
6079 res = make_ssa_name (type);
6080 else
6081 res = create_tmp_reg (type);
6082 gimple stmt;
6083 if (code == REALPART_EXPR
6084 || code == IMAGPART_EXPR
6085 || code == VIEW_CONVERT_EXPR)
6086 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6087 else
6088 stmt = gimple_build_assign (res, code, op0);
6089 gimple_set_location (stmt, loc);
6090 gimple_seq_add_stmt_without_update (seq, stmt);
6092 return res;
6095 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6096 simplifying it first if possible using VALUEIZE if not NULL.
6097 OP0 and OP1 are expected to be valueized already. Returns the built
6098 expression value and appends statements possibly defining it
6099 to SEQ. */
6101 tree
6102 gimple_build (gimple_seq *seq, location_t loc,
6103 enum tree_code code, tree type, tree op0, tree op1,
6104 tree (*valueize)(tree))
6106 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
6107 if (!res)
6109 if (gimple_in_ssa_p (cfun))
6110 res = make_ssa_name (type);
6111 else
6112 res = create_tmp_reg (type);
6113 gimple stmt = gimple_build_assign (res, code, op0, op1);
6114 gimple_set_location (stmt, loc);
6115 gimple_seq_add_stmt_without_update (seq, stmt);
6117 return res;
6120 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6121 simplifying it first if possible using VALUEIZE if not NULL.
6122 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
6123 expression value and appends statements possibly defining it
6124 to SEQ. */
6126 tree
6127 gimple_build (gimple_seq *seq, location_t loc,
6128 enum tree_code code, tree type, tree op0, tree op1, tree op2,
6129 tree (*valueize)(tree))
6131 tree res = gimple_simplify (code, type, op0, op1, op2,
6132 seq, valueize);
6133 if (!res)
6135 if (gimple_in_ssa_p (cfun))
6136 res = make_ssa_name (type);
6137 else
6138 res = create_tmp_reg (type);
6139 gimple stmt;
6140 if (code == BIT_FIELD_REF)
6141 stmt = gimple_build_assign (res, code,
6142 build3 (code, type, op0, op1, op2));
6143 else
6144 stmt = gimple_build_assign (res, code, op0, op1, op2);
6145 gimple_set_location (stmt, loc);
6146 gimple_seq_add_stmt_without_update (seq, stmt);
6148 return res;
6151 /* Build the call FN (ARG0) with a result of type TYPE
6152 (or no result if TYPE is void) with location LOC,
6153 simplifying it first if possible using VALUEIZE if not NULL.
6154 ARG0 is expected to be valueized already. Returns the built
6155 expression value (or NULL_TREE if TYPE is void) and appends
6156 statements possibly defining it to SEQ. */
6158 tree
6159 gimple_build (gimple_seq *seq, location_t loc,
6160 enum built_in_function fn, tree type, tree arg0,
6161 tree (*valueize)(tree))
6163 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
6164 if (!res)
6166 tree decl = builtin_decl_implicit (fn);
6167 gimple stmt = gimple_build_call (decl, 1, arg0);
6168 if (!VOID_TYPE_P (type))
6170 if (gimple_in_ssa_p (cfun))
6171 res = make_ssa_name (type);
6172 else
6173 res = create_tmp_reg (type);
6174 gimple_call_set_lhs (stmt, res);
6176 gimple_set_location (stmt, loc);
6177 gimple_seq_add_stmt_without_update (seq, stmt);
6179 return res;
6182 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6183 (or no result if TYPE is void) with location LOC,
6184 simplifying it first if possible using VALUEIZE if not NULL.
6185 ARG0 is expected to be valueized already. Returns the built
6186 expression value (or NULL_TREE if TYPE is void) and appends
6187 statements possibly defining it to SEQ. */
6189 tree
6190 gimple_build (gimple_seq *seq, location_t loc,
6191 enum built_in_function fn, tree type, tree arg0, tree arg1,
6192 tree (*valueize)(tree))
6194 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
6195 if (!res)
6197 tree decl = builtin_decl_implicit (fn);
6198 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6199 if (!VOID_TYPE_P (type))
6201 if (gimple_in_ssa_p (cfun))
6202 res = make_ssa_name (type);
6203 else
6204 res = create_tmp_reg (type);
6205 gimple_call_set_lhs (stmt, res);
6207 gimple_set_location (stmt, loc);
6208 gimple_seq_add_stmt_without_update (seq, stmt);
6210 return res;
6213 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6214 (or no result if TYPE is void) with location LOC,
6215 simplifying it first if possible using VALUEIZE if not NULL.
6216 ARG0 is expected to be valueized already. Returns the built
6217 expression value (or NULL_TREE if TYPE is void) and appends
6218 statements possibly defining it to SEQ. */
6220 tree
6221 gimple_build (gimple_seq *seq, location_t loc,
6222 enum built_in_function fn, tree type,
6223 tree arg0, tree arg1, tree arg2,
6224 tree (*valueize)(tree))
6226 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
6227 if (!res)
6229 tree decl = builtin_decl_implicit (fn);
6230 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6231 if (!VOID_TYPE_P (type))
6233 if (gimple_in_ssa_p (cfun))
6234 res = make_ssa_name (type);
6235 else
6236 res = create_tmp_reg (type);
6237 gimple_call_set_lhs (stmt, res);
6239 gimple_set_location (stmt, loc);
6240 gimple_seq_add_stmt_without_update (seq, stmt);
6242 return res;
6245 /* Build the conversion (TYPE) OP with a result of type TYPE
6246 with location LOC if such conversion is neccesary in GIMPLE,
6247 simplifying it first.
6248 Returns the built expression value and appends
6249 statements possibly defining it to SEQ. */
6251 tree
6252 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6254 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6255 return op;
6256 return gimple_build (seq, loc, NOP_EXPR, type, op);