libgomp: Use pthread mutexes in the nvptx plugin.
[official-gcc.git] / gcc / gimple-fold.c
blob49b31f1deb3a1a6a873ba081d6e9ced10e91a47b
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "expr.h"
38 #include "stmt.h"
39 #include "stor-layout.h"
40 #include "flags.h"
41 #include "hashtab.h"
42 #include "hash-set.h"
43 #include "vec.h"
44 #include "machmode.h"
45 #include "hard-reg-set.h"
46 #include "input.h"
47 #include "function.h"
48 #include "dumpfile.h"
49 #include "bitmap.h"
50 #include "predict.h"
51 #include "dominance.h"
52 #include "basic-block.h"
53 #include "tree-ssa-alias.h"
54 #include "internal-fn.h"
55 #include "gimple-fold.h"
56 #include "gimple-expr.h"
57 #include "is-a.h"
58 #include "gimple.h"
59 #include "gimplify.h"
60 #include "gimple-iterator.h"
61 #include "gimple-ssa.h"
62 #include "tree-ssanames.h"
63 #include "tree-into-ssa.h"
64 #include "tree-dfa.h"
65 #include "tree-ssa.h"
66 #include "tree-ssa-propagate.h"
67 #include "target.h"
68 #include "hash-map.h"
69 #include "plugin-api.h"
70 #include "ipa-ref.h"
71 #include "cgraph.h"
72 #include "ipa-utils.h"
73 #include "gimple-pretty-print.h"
74 #include "tree-ssa-address.h"
75 #include "langhooks.h"
76 #include "gimplify-me.h"
77 #include "dbgcnt.h"
78 #include "builtins.h"
79 #include "output.h"
80 #include "tree-eh.h"
81 #include "gimple-match.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
85 /* Return true when DECL can be referenced from current unit.
86 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
87 We can get declarations that are not possible to reference for various
88 reasons:
90 1) When analyzing C++ virtual tables.
91 C++ virtual tables do have known constructors even
92 when they are keyed to other compilation unit.
93 Those tables can contain pointers to methods and vars
94 in other units. Those methods have both STATIC and EXTERNAL
95 set.
96 2) In WHOPR mode devirtualization might lead to reference
97 to method that was partitioned elsehwere.
98 In this case we have static VAR_DECL or FUNCTION_DECL
99 that has no corresponding callgraph/varpool node
100 declaring the body.
101 3) COMDAT functions referred by external vtables that
102 we devirtualize only during final compilation stage.
103 At this time we already decided that we will not output
104 the function body and thus we can't reference the symbol
105 directly. */
107 static bool
108 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
110 varpool_node *vnode;
111 struct cgraph_node *node;
112 symtab_node *snode;
114 if (DECL_ABSTRACT_P (decl))
115 return false;
117 /* We are concerned only about static/external vars and functions. */
118 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
119 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
120 return true;
122 /* Static objects can be referred only if they was not optimized out yet. */
123 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
125 /* Before we start optimizing unreachable code we can be sure all
126 static objects are defined. */
127 if (symtab->function_flags_ready)
128 return true;
129 snode = symtab_node::get (decl);
130 if (!snode || !snode->definition)
131 return false;
132 node = dyn_cast <cgraph_node *> (snode);
133 return !node || !node->global.inlined_to;
136 /* We will later output the initializer, so we can refer to it.
137 So we are concerned only when DECL comes from initializer of
138 external var or var that has been optimized out. */
139 if (!from_decl
140 || TREE_CODE (from_decl) != VAR_DECL
141 || (!DECL_EXTERNAL (from_decl)
142 && (vnode = varpool_node::get (from_decl)) != NULL
143 && vnode->definition)
144 || (flag_ltrans
145 && (vnode = varpool_node::get (from_decl)) != NULL
146 && vnode->in_other_partition))
147 return true;
148 /* We are folding reference from external vtable. The vtable may reffer
149 to a symbol keyed to other compilation unit. The other compilation
150 unit may be in separate DSO and the symbol may be hidden. */
151 if (DECL_VISIBILITY_SPECIFIED (decl)
152 && DECL_EXTERNAL (decl)
153 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
154 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
155 return false;
156 /* When function is public, we always can introduce new reference.
157 Exception are the COMDAT functions where introducing a direct
158 reference imply need to include function body in the curren tunit. */
159 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
160 return true;
161 /* We have COMDAT. We are going to check if we still have definition
162 or if the definition is going to be output in other partition.
163 Bypass this when gimplifying; all needed functions will be produced.
165 As observed in PR20991 for already optimized out comdat virtual functions
166 it may be tempting to not necessarily give up because the copy will be
167 output elsewhere when corresponding vtable is output.
168 This is however not possible - ABI specify that COMDATs are output in
169 units where they are used and when the other unit was compiled with LTO
170 it is possible that vtable was kept public while the function itself
171 was privatized. */
172 if (!symtab->function_flags_ready)
173 return true;
175 snode = symtab_node::get (decl);
176 if (!snode
177 || ((!snode->definition || DECL_EXTERNAL (decl))
178 && (!snode->in_other_partition
179 || (!snode->forced_by_abi && !snode->force_output))))
180 return false;
181 node = dyn_cast <cgraph_node *> (snode);
182 return !node || !node->global.inlined_to;
185 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
186 acceptable form for is_gimple_min_invariant.
187 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
189 tree
190 canonicalize_constructor_val (tree cval, tree from_decl)
192 tree orig_cval = cval;
193 STRIP_NOPS (cval);
194 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
195 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
197 tree ptr = TREE_OPERAND (cval, 0);
198 if (is_gimple_min_invariant (ptr))
199 cval = build1_loc (EXPR_LOCATION (cval),
200 ADDR_EXPR, TREE_TYPE (ptr),
201 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
202 ptr,
203 fold_convert (ptr_type_node,
204 TREE_OPERAND (cval, 1))));
206 if (TREE_CODE (cval) == ADDR_EXPR)
208 tree base = NULL_TREE;
209 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
211 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
212 if (base)
213 TREE_OPERAND (cval, 0) = base;
215 else
216 base = get_base_address (TREE_OPERAND (cval, 0));
217 if (!base)
218 return NULL_TREE;
220 if ((TREE_CODE (base) == VAR_DECL
221 || TREE_CODE (base) == FUNCTION_DECL)
222 && !can_refer_decl_in_current_unit_p (base, from_decl))
223 return NULL_TREE;
224 if (TREE_CODE (base) == VAR_DECL)
225 TREE_ADDRESSABLE (base) = 1;
226 else if (TREE_CODE (base) == FUNCTION_DECL)
228 /* Make sure we create a cgraph node for functions we'll reference.
229 They can be non-existent if the reference comes from an entry
230 of an external vtable for example. */
231 cgraph_node::get_create (base);
233 /* Fixup types in global initializers. */
234 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
235 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
237 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
238 cval = fold_convert (TREE_TYPE (orig_cval), cval);
239 return cval;
241 if (TREE_OVERFLOW_P (cval))
242 return drop_tree_overflow (cval);
243 return orig_cval;
246 /* If SYM is a constant variable with known value, return the value.
247 NULL_TREE is returned otherwise. */
249 tree
250 get_symbol_constant_value (tree sym)
252 tree val = ctor_for_folding (sym);
253 if (val != error_mark_node)
255 if (val)
257 val = canonicalize_constructor_val (unshare_expr (val), sym);
258 if (val && is_gimple_min_invariant (val))
259 return val;
260 else
261 return NULL_TREE;
263 /* Variables declared 'const' without an initializer
264 have zero as the initializer if they may not be
265 overridden at link or run time. */
266 if (!val
267 && is_gimple_reg_type (TREE_TYPE (sym)))
268 return build_zero_cst (TREE_TYPE (sym));
271 return NULL_TREE;
276 /* Subroutine of fold_stmt. We perform several simplifications of the
277 memory reference tree EXPR and make sure to re-gimplify them properly
278 after propagation of constant addresses. IS_LHS is true if the
279 reference is supposed to be an lvalue. */
281 static tree
282 maybe_fold_reference (tree expr, bool is_lhs)
284 tree result;
286 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
287 || TREE_CODE (expr) == REALPART_EXPR
288 || TREE_CODE (expr) == IMAGPART_EXPR)
289 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
290 return fold_unary_loc (EXPR_LOCATION (expr),
291 TREE_CODE (expr),
292 TREE_TYPE (expr),
293 TREE_OPERAND (expr, 0));
294 else if (TREE_CODE (expr) == BIT_FIELD_REF
295 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
296 return fold_ternary_loc (EXPR_LOCATION (expr),
297 TREE_CODE (expr),
298 TREE_TYPE (expr),
299 TREE_OPERAND (expr, 0),
300 TREE_OPERAND (expr, 1),
301 TREE_OPERAND (expr, 2));
303 if (!is_lhs
304 && (result = fold_const_aggregate_ref (expr))
305 && is_gimple_min_invariant (result))
306 return result;
308 return NULL_TREE;
312 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
313 replacement rhs for the statement or NULL_TREE if no simplification
314 could be made. It is assumed that the operands have been previously
315 folded. */
317 static tree
318 fold_gimple_assign (gimple_stmt_iterator *si)
320 gimple stmt = gsi_stmt (*si);
321 enum tree_code subcode = gimple_assign_rhs_code (stmt);
322 location_t loc = gimple_location (stmt);
324 tree result = NULL_TREE;
326 switch (get_gimple_rhs_class (subcode))
328 case GIMPLE_SINGLE_RHS:
330 tree rhs = gimple_assign_rhs1 (stmt);
332 if (TREE_CLOBBER_P (rhs))
333 return NULL_TREE;
335 if (REFERENCE_CLASS_P (rhs))
336 return maybe_fold_reference (rhs, false);
338 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
340 tree val = OBJ_TYPE_REF_EXPR (rhs);
341 if (is_gimple_min_invariant (val))
342 return val;
343 else if (flag_devirtualize && virtual_method_call_p (rhs))
345 bool final;
346 vec <cgraph_node *>targets
347 = possible_polymorphic_call_targets (rhs, stmt, &final);
348 if (final && targets.length () <= 1 && dbg_cnt (devirt))
350 if (dump_enabled_p ())
352 location_t loc = gimple_location_safe (stmt);
353 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
354 "resolving virtual function address "
355 "reference to function %s\n",
356 targets.length () == 1
357 ? targets[0]->name ()
358 : "NULL");
360 if (targets.length () == 1)
362 val = fold_convert (TREE_TYPE (val),
363 build_fold_addr_expr_loc
364 (loc, targets[0]->decl));
365 STRIP_USELESS_TYPE_CONVERSION (val);
367 else
368 /* We can not use __builtin_unreachable here because it
369 can not have address taken. */
370 val = build_int_cst (TREE_TYPE (val), 0);
371 return val;
376 else if (TREE_CODE (rhs) == ADDR_EXPR)
378 tree ref = TREE_OPERAND (rhs, 0);
379 tree tem = maybe_fold_reference (ref, true);
380 if (tem
381 && TREE_CODE (tem) == MEM_REF
382 && integer_zerop (TREE_OPERAND (tem, 1)))
383 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
384 else if (tem)
385 result = fold_convert (TREE_TYPE (rhs),
386 build_fold_addr_expr_loc (loc, tem));
387 else if (TREE_CODE (ref) == MEM_REF
388 && integer_zerop (TREE_OPERAND (ref, 1)))
389 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
392 else if (TREE_CODE (rhs) == CONSTRUCTOR
393 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
394 && (CONSTRUCTOR_NELTS (rhs)
395 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
397 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
398 unsigned i;
399 tree val;
401 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
402 if (TREE_CODE (val) != INTEGER_CST
403 && TREE_CODE (val) != REAL_CST
404 && TREE_CODE (val) != FIXED_CST)
405 return NULL_TREE;
407 return build_vector_from_ctor (TREE_TYPE (rhs),
408 CONSTRUCTOR_ELTS (rhs));
411 else if (DECL_P (rhs))
412 return get_symbol_constant_value (rhs);
414 /* If we couldn't fold the RHS, hand over to the generic
415 fold routines. */
416 if (result == NULL_TREE)
417 result = fold (rhs);
419 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
420 that may have been added by fold, and "useless" type
421 conversions that might now be apparent due to propagation. */
422 STRIP_USELESS_TYPE_CONVERSION (result);
424 if (result != rhs && valid_gimple_rhs_p (result))
425 return result;
427 return NULL_TREE;
429 break;
431 case GIMPLE_UNARY_RHS:
432 break;
434 case GIMPLE_BINARY_RHS:
435 /* Try to canonicalize for boolean-typed X the comparisons
436 X == 0, X == 1, X != 0, and X != 1. */
437 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
438 || gimple_assign_rhs_code (stmt) == NE_EXPR)
440 tree lhs = gimple_assign_lhs (stmt);
441 tree op1 = gimple_assign_rhs1 (stmt);
442 tree op2 = gimple_assign_rhs2 (stmt);
443 tree type = TREE_TYPE (op1);
445 /* Check whether the comparison operands are of the same boolean
446 type as the result type is.
447 Check that second operand is an integer-constant with value
448 one or zero. */
449 if (TREE_CODE (op2) == INTEGER_CST
450 && (integer_zerop (op2) || integer_onep (op2))
451 && useless_type_conversion_p (TREE_TYPE (lhs), type))
453 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
454 bool is_logical_not = false;
456 /* X == 0 and X != 1 is a logical-not.of X
457 X == 1 and X != 0 is X */
458 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
459 || (cmp_code == NE_EXPR && integer_onep (op2)))
460 is_logical_not = true;
462 if (is_logical_not == false)
463 result = op1;
464 /* Only for one-bit precision typed X the transformation
465 !X -> ~X is valied. */
466 else if (TYPE_PRECISION (type) == 1)
467 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
468 type, op1);
469 /* Otherwise we use !X -> X ^ 1. */
470 else
471 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
472 type, op1, build_int_cst (type, 1));
477 if (!result)
478 result = fold_binary_loc (loc, subcode,
479 TREE_TYPE (gimple_assign_lhs (stmt)),
480 gimple_assign_rhs1 (stmt),
481 gimple_assign_rhs2 (stmt));
483 if (result)
485 STRIP_USELESS_TYPE_CONVERSION (result);
486 if (valid_gimple_rhs_p (result))
487 return result;
489 break;
491 case GIMPLE_TERNARY_RHS:
492 /* Try to fold a conditional expression. */
493 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
495 tree op0 = gimple_assign_rhs1 (stmt);
496 tree tem;
497 bool set = false;
498 location_t cond_loc = gimple_location (stmt);
500 if (COMPARISON_CLASS_P (op0))
502 fold_defer_overflow_warnings ();
503 tem = fold_binary_loc (cond_loc,
504 TREE_CODE (op0), TREE_TYPE (op0),
505 TREE_OPERAND (op0, 0),
506 TREE_OPERAND (op0, 1));
507 /* This is actually a conditional expression, not a GIMPLE
508 conditional statement, however, the valid_gimple_rhs_p
509 test still applies. */
510 set = (tem && is_gimple_condexpr (tem)
511 && valid_gimple_rhs_p (tem));
512 fold_undefer_overflow_warnings (set, stmt, 0);
514 else if (is_gimple_min_invariant (op0))
516 tem = op0;
517 set = true;
519 else
520 return NULL_TREE;
522 if (set)
523 result = fold_build3_loc (cond_loc, COND_EXPR,
524 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
525 gimple_assign_rhs2 (stmt),
526 gimple_assign_rhs3 (stmt));
529 if (!result)
530 result = fold_ternary_loc (loc, subcode,
531 TREE_TYPE (gimple_assign_lhs (stmt)),
532 gimple_assign_rhs1 (stmt),
533 gimple_assign_rhs2 (stmt),
534 gimple_assign_rhs3 (stmt));
536 if (result)
538 STRIP_USELESS_TYPE_CONVERSION (result);
539 if (valid_gimple_rhs_p (result))
540 return result;
542 break;
544 case GIMPLE_INVALID_RHS:
545 gcc_unreachable ();
548 return NULL_TREE;
551 /* Attempt to fold a conditional statement. Return true if any changes were
552 made. We only attempt to fold the condition expression, and do not perform
553 any transformation that would require alteration of the cfg. It is
554 assumed that the operands have been previously folded. */
556 static bool
557 fold_gimple_cond (gcond *stmt)
559 tree result = fold_binary_loc (gimple_location (stmt),
560 gimple_cond_code (stmt),
561 boolean_type_node,
562 gimple_cond_lhs (stmt),
563 gimple_cond_rhs (stmt));
565 if (result)
567 STRIP_USELESS_TYPE_CONVERSION (result);
568 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
570 gimple_cond_set_condition_from_tree (stmt, result);
571 return true;
575 return false;
579 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
580 adjusting the replacement stmts location and virtual operands.
581 If the statement has a lhs the last stmt in the sequence is expected
582 to assign to that lhs. */
584 static void
585 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
587 gimple stmt = gsi_stmt (*si_p);
589 if (gimple_has_location (stmt))
590 annotate_all_with_location (stmts, gimple_location (stmt));
592 /* First iterate over the replacement statements backward, assigning
593 virtual operands to their defining statements. */
594 gimple laststore = NULL;
595 for (gimple_stmt_iterator i = gsi_last (stmts);
596 !gsi_end_p (i); gsi_prev (&i))
598 gimple new_stmt = gsi_stmt (i);
599 if ((gimple_assign_single_p (new_stmt)
600 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
601 || (is_gimple_call (new_stmt)
602 && (gimple_call_flags (new_stmt)
603 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
605 tree vdef;
606 if (!laststore)
607 vdef = gimple_vdef (stmt);
608 else
609 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
610 gimple_set_vdef (new_stmt, vdef);
611 if (vdef && TREE_CODE (vdef) == SSA_NAME)
612 SSA_NAME_DEF_STMT (vdef) = new_stmt;
613 laststore = new_stmt;
617 /* Second iterate over the statements forward, assigning virtual
618 operands to their uses. */
619 tree reaching_vuse = gimple_vuse (stmt);
620 for (gimple_stmt_iterator i = gsi_start (stmts);
621 !gsi_end_p (i); gsi_next (&i))
623 gimple new_stmt = gsi_stmt (i);
624 /* If the new statement possibly has a VUSE, update it with exact SSA
625 name we know will reach this one. */
626 if (gimple_has_mem_ops (new_stmt))
627 gimple_set_vuse (new_stmt, reaching_vuse);
628 gimple_set_modified (new_stmt, true);
629 if (gimple_vdef (new_stmt))
630 reaching_vuse = gimple_vdef (new_stmt);
633 /* If the new sequence does not do a store release the virtual
634 definition of the original statement. */
635 if (reaching_vuse
636 && reaching_vuse == gimple_vuse (stmt))
638 tree vdef = gimple_vdef (stmt);
639 if (vdef
640 && TREE_CODE (vdef) == SSA_NAME)
642 unlink_stmt_vdef (stmt);
643 release_ssa_name (vdef);
647 /* Finally replace the original statement with the sequence. */
648 gsi_replace_with_seq (si_p, stmts, false);
651 /* Convert EXPR into a GIMPLE value suitable for substitution on the
652 RHS of an assignment. Insert the necessary statements before
653 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
654 is replaced. If the call is expected to produces a result, then it
655 is replaced by an assignment of the new RHS to the result variable.
656 If the result is to be ignored, then the call is replaced by a
657 GIMPLE_NOP. A proper VDEF chain is retained by making the first
658 VUSE and the last VDEF of the whole sequence be the same as the replaced
659 statement and using new SSA names for stores in between. */
661 void
662 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
664 tree lhs;
665 gimple stmt, new_stmt;
666 gimple_stmt_iterator i;
667 gimple_seq stmts = NULL;
669 stmt = gsi_stmt (*si_p);
671 gcc_assert (is_gimple_call (stmt));
673 push_gimplify_context (gimple_in_ssa_p (cfun));
675 lhs = gimple_call_lhs (stmt);
676 if (lhs == NULL_TREE)
678 gimplify_and_add (expr, &stmts);
679 /* We can end up with folding a memcpy of an empty class assignment
680 which gets optimized away by C++ gimplification. */
681 if (gimple_seq_empty_p (stmts))
683 pop_gimplify_context (NULL);
684 if (gimple_in_ssa_p (cfun))
686 unlink_stmt_vdef (stmt);
687 release_defs (stmt);
689 gsi_replace (si_p, gimple_build_nop (), true);
690 return;
693 else
695 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
696 new_stmt = gimple_build_assign (lhs, tmp);
697 i = gsi_last (stmts);
698 gsi_insert_after_without_update (&i, new_stmt,
699 GSI_CONTINUE_LINKING);
702 pop_gimplify_context (NULL);
704 gsi_replace_with_seq_vops (si_p, stmts);
708 /* Replace the call at *GSI with the gimple value VAL. */
710 static void
711 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
713 gimple stmt = gsi_stmt (*gsi);
714 tree lhs = gimple_call_lhs (stmt);
715 gimple repl;
716 if (lhs)
718 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
719 val = fold_convert (TREE_TYPE (lhs), val);
720 repl = gimple_build_assign (lhs, val);
722 else
723 repl = gimple_build_nop ();
724 tree vdef = gimple_vdef (stmt);
725 if (vdef && TREE_CODE (vdef) == SSA_NAME)
727 unlink_stmt_vdef (stmt);
728 release_ssa_name (vdef);
730 gsi_replace (gsi, repl, true);
733 /* Replace the call at *GSI with the new call REPL and fold that
734 again. */
736 static void
737 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
739 gimple stmt = gsi_stmt (*gsi);
740 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
741 gimple_set_location (repl, gimple_location (stmt));
742 if (gimple_vdef (stmt)
743 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
745 gimple_set_vdef (repl, gimple_vdef (stmt));
746 gimple_set_vuse (repl, gimple_vuse (stmt));
747 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
749 gsi_replace (gsi, repl, true);
750 fold_stmt (gsi);
753 /* Return true if VAR is a VAR_DECL or a component thereof. */
755 static bool
756 var_decl_component_p (tree var)
758 tree inner = var;
759 while (handled_component_p (inner))
760 inner = TREE_OPERAND (inner, 0);
761 return SSA_VAR_P (inner);
764 /* Fold function call to builtin mem{{,p}cpy,move}. Return
765 NULL_TREE if no simplification can be made.
766 If ENDP is 0, return DEST (like memcpy).
767 If ENDP is 1, return DEST+LEN (like mempcpy).
768 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
769 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
770 (memmove). */
772 static bool
773 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
774 tree dest, tree src, int endp)
776 gimple stmt = gsi_stmt (*gsi);
777 tree lhs = gimple_call_lhs (stmt);
778 tree len = gimple_call_arg (stmt, 2);
779 tree destvar, srcvar;
780 location_t loc = gimple_location (stmt);
782 /* If the LEN parameter is zero, return DEST. */
783 if (integer_zerop (len))
785 gimple repl;
786 if (gimple_call_lhs (stmt))
787 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
788 else
789 repl = gimple_build_nop ();
790 tree vdef = gimple_vdef (stmt);
791 if (vdef && TREE_CODE (vdef) == SSA_NAME)
793 unlink_stmt_vdef (stmt);
794 release_ssa_name (vdef);
796 gsi_replace (gsi, repl, true);
797 return true;
800 /* If SRC and DEST are the same (and not volatile), return
801 DEST{,+LEN,+LEN-1}. */
802 if (operand_equal_p (src, dest, 0))
804 unlink_stmt_vdef (stmt);
805 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
806 release_ssa_name (gimple_vdef (stmt));
807 if (!lhs)
809 gsi_replace (gsi, gimple_build_nop (), true);
810 return true;
812 goto done;
814 else
816 tree srctype, desttype;
817 unsigned int src_align, dest_align;
818 tree off0;
820 /* Build accesses at offset zero with a ref-all character type. */
821 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
822 ptr_mode, true), 0);
824 /* If we can perform the copy efficiently with first doing all loads
825 and then all stores inline it that way. Currently efficiently
826 means that we can load all the memory into a single integer
827 register which is what MOVE_MAX gives us. */
828 src_align = get_pointer_alignment (src);
829 dest_align = get_pointer_alignment (dest);
830 if (tree_fits_uhwi_p (len)
831 && compare_tree_int (len, MOVE_MAX) <= 0
832 /* ??? Don't transform copies from strings with known length this
833 confuses the tree-ssa-strlen.c. This doesn't handle
834 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
835 reason. */
836 && !c_strlen (src, 2))
838 unsigned ilen = tree_to_uhwi (len);
839 if (exact_log2 (ilen) != -1)
841 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
842 if (type
843 && TYPE_MODE (type) != BLKmode
844 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
845 == ilen * 8)
846 /* If the destination pointer is not aligned we must be able
847 to emit an unaligned store. */
848 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
849 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
851 tree srctype = type;
852 tree desttype = type;
853 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
854 srctype = build_aligned_type (type, src_align);
855 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
856 tree tem = fold_const_aggregate_ref (srcmem);
857 if (tem)
858 srcmem = tem;
859 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
860 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
861 src_align))
862 srcmem = NULL_TREE;
863 if (srcmem)
865 gimple new_stmt;
866 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
868 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
869 if (gimple_in_ssa_p (cfun))
870 srcmem = make_ssa_name (TREE_TYPE (srcmem),
871 new_stmt);
872 else
873 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
874 gimple_assign_set_lhs (new_stmt, srcmem);
875 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
876 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
878 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
879 desttype = build_aligned_type (type, dest_align);
880 new_stmt
881 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
882 dest, off0),
883 srcmem);
884 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
885 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
886 if (gimple_vdef (new_stmt)
887 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
888 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
889 if (!lhs)
891 gsi_replace (gsi, new_stmt, true);
892 return true;
894 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
895 goto done;
901 if (endp == 3)
903 /* Both DEST and SRC must be pointer types.
904 ??? This is what old code did. Is the testing for pointer types
905 really mandatory?
907 If either SRC is readonly or length is 1, we can use memcpy. */
908 if (!dest_align || !src_align)
909 return false;
910 if (readonly_data_expr (src)
911 || (tree_fits_uhwi_p (len)
912 && (MIN (src_align, dest_align) / BITS_PER_UNIT
913 >= tree_to_uhwi (len))))
915 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
916 if (!fn)
917 return false;
918 gimple_call_set_fndecl (stmt, fn);
919 gimple_call_set_arg (stmt, 0, dest);
920 gimple_call_set_arg (stmt, 1, src);
921 fold_stmt (gsi);
922 return true;
925 /* If *src and *dest can't overlap, optimize into memcpy as well. */
926 if (TREE_CODE (src) == ADDR_EXPR
927 && TREE_CODE (dest) == ADDR_EXPR)
929 tree src_base, dest_base, fn;
930 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
931 HOST_WIDE_INT size = -1;
932 HOST_WIDE_INT maxsize = -1;
934 srcvar = TREE_OPERAND (src, 0);
935 src_base = get_ref_base_and_extent (srcvar, &src_offset,
936 &size, &maxsize);
937 destvar = TREE_OPERAND (dest, 0);
938 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
939 &size, &maxsize);
940 if (tree_fits_uhwi_p (len))
941 maxsize = tree_to_uhwi (len);
942 else
943 maxsize = -1;
944 src_offset /= BITS_PER_UNIT;
945 dest_offset /= BITS_PER_UNIT;
946 if (SSA_VAR_P (src_base)
947 && SSA_VAR_P (dest_base))
949 if (operand_equal_p (src_base, dest_base, 0)
950 && ranges_overlap_p (src_offset, maxsize,
951 dest_offset, maxsize))
952 return false;
954 else if (TREE_CODE (src_base) == MEM_REF
955 && TREE_CODE (dest_base) == MEM_REF)
957 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
958 TREE_OPERAND (dest_base, 0), 0))
959 return false;
960 offset_int off = mem_ref_offset (src_base) + src_offset;
961 if (!wi::fits_shwi_p (off))
962 return false;
963 src_offset = off.to_shwi ();
965 off = mem_ref_offset (dest_base) + dest_offset;
966 if (!wi::fits_shwi_p (off))
967 return false;
968 dest_offset = off.to_shwi ();
969 if (ranges_overlap_p (src_offset, maxsize,
970 dest_offset, maxsize))
971 return false;
973 else
974 return false;
976 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
977 if (!fn)
978 return false;
979 gimple_call_set_fndecl (stmt, fn);
980 gimple_call_set_arg (stmt, 0, dest);
981 gimple_call_set_arg (stmt, 1, src);
982 fold_stmt (gsi);
983 return true;
986 /* If the destination and source do not alias optimize into
987 memcpy as well. */
988 if ((is_gimple_min_invariant (dest)
989 || TREE_CODE (dest) == SSA_NAME)
990 && (is_gimple_min_invariant (src)
991 || TREE_CODE (src) == SSA_NAME))
993 ao_ref destr, srcr;
994 ao_ref_init_from_ptr_and_size (&destr, dest, len);
995 ao_ref_init_from_ptr_and_size (&srcr, src, len);
996 if (!refs_may_alias_p_1 (&destr, &srcr, false))
998 tree fn;
999 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1000 if (!fn)
1001 return false;
1002 gimple_call_set_fndecl (stmt, fn);
1003 gimple_call_set_arg (stmt, 0, dest);
1004 gimple_call_set_arg (stmt, 1, src);
1005 fold_stmt (gsi);
1006 return true;
1010 return false;
1013 if (!tree_fits_shwi_p (len))
1014 return false;
1015 /* FIXME:
1016 This logic lose for arguments like (type *)malloc (sizeof (type)),
1017 since we strip the casts of up to VOID return value from malloc.
1018 Perhaps we ought to inherit type from non-VOID argument here? */
1019 STRIP_NOPS (src);
1020 STRIP_NOPS (dest);
1021 if (!POINTER_TYPE_P (TREE_TYPE (src))
1022 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1023 return false;
1024 /* In the following try to find a type that is most natural to be
1025 used for the memcpy source and destination and that allows
1026 the most optimization when memcpy is turned into a plain assignment
1027 using that type. In theory we could always use a char[len] type
1028 but that only gains us that the destination and source possibly
1029 no longer will have their address taken. */
1030 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1031 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1033 tree tem = TREE_OPERAND (src, 0);
1034 STRIP_NOPS (tem);
1035 if (tem != TREE_OPERAND (src, 0))
1036 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1038 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1040 tree tem = TREE_OPERAND (dest, 0);
1041 STRIP_NOPS (tem);
1042 if (tem != TREE_OPERAND (dest, 0))
1043 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1045 srctype = TREE_TYPE (TREE_TYPE (src));
1046 if (TREE_CODE (srctype) == ARRAY_TYPE
1047 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1049 srctype = TREE_TYPE (srctype);
1050 STRIP_NOPS (src);
1051 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1053 desttype = TREE_TYPE (TREE_TYPE (dest));
1054 if (TREE_CODE (desttype) == ARRAY_TYPE
1055 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1057 desttype = TREE_TYPE (desttype);
1058 STRIP_NOPS (dest);
1059 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1061 if (TREE_ADDRESSABLE (srctype)
1062 || TREE_ADDRESSABLE (desttype))
1063 return false;
1065 /* Make sure we are not copying using a floating-point mode or
1066 a type whose size possibly does not match its precision. */
1067 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1068 || TREE_CODE (desttype) == BOOLEAN_TYPE
1069 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1070 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1071 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1072 || TREE_CODE (srctype) == BOOLEAN_TYPE
1073 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1074 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1075 if (!srctype)
1076 srctype = desttype;
1077 if (!desttype)
1078 desttype = srctype;
1079 if (!srctype)
1080 return false;
1082 src_align = get_pointer_alignment (src);
1083 dest_align = get_pointer_alignment (dest);
1084 if (dest_align < TYPE_ALIGN (desttype)
1085 || src_align < TYPE_ALIGN (srctype))
1086 return false;
1088 destvar = dest;
1089 STRIP_NOPS (destvar);
1090 if (TREE_CODE (destvar) == ADDR_EXPR
1091 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1092 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1093 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1094 else
1095 destvar = NULL_TREE;
1097 srcvar = src;
1098 STRIP_NOPS (srcvar);
1099 if (TREE_CODE (srcvar) == ADDR_EXPR
1100 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1101 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1103 if (!destvar
1104 || src_align >= TYPE_ALIGN (desttype))
1105 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1106 srcvar, off0);
1107 else if (!STRICT_ALIGNMENT)
1109 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1110 src_align);
1111 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1113 else
1114 srcvar = NULL_TREE;
1116 else
1117 srcvar = NULL_TREE;
1119 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1120 return false;
1122 if (srcvar == NULL_TREE)
1124 STRIP_NOPS (src);
1125 if (src_align >= TYPE_ALIGN (desttype))
1126 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1127 else
1129 if (STRICT_ALIGNMENT)
1130 return false;
1131 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1132 src_align);
1133 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1136 else if (destvar == NULL_TREE)
1138 STRIP_NOPS (dest);
1139 if (dest_align >= TYPE_ALIGN (srctype))
1140 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1141 else
1143 if (STRICT_ALIGNMENT)
1144 return false;
1145 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1146 dest_align);
1147 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1151 gimple new_stmt;
1152 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1154 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1155 if (gimple_in_ssa_p (cfun))
1156 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1157 else
1158 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1159 gimple_assign_set_lhs (new_stmt, srcvar);
1160 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1161 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1163 new_stmt = gimple_build_assign (destvar, srcvar);
1164 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1165 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1166 if (gimple_vdef (new_stmt)
1167 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1168 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1169 if (!lhs)
1171 gsi_replace (gsi, new_stmt, true);
1172 return true;
1174 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1177 done:
1178 if (endp == 0 || endp == 3)
1179 len = NULL_TREE;
1180 else if (endp == 2)
1181 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1182 ssize_int (1));
1183 if (endp == 2 || endp == 1)
1184 dest = fold_build_pointer_plus_loc (loc, dest, len);
1186 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1187 GSI_SAME_STMT);
1188 gimple repl = gimple_build_assign (lhs, dest);
1189 gsi_replace (gsi, repl, true);
1190 return true;
1193 /* Fold function call to builtin memset or bzero at *GSI setting the
1194 memory of size LEN to VAL. Return whether a simplification was made. */
1196 static bool
1197 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1199 gimple stmt = gsi_stmt (*gsi);
1200 tree etype;
1201 unsigned HOST_WIDE_INT length, cval;
1203 /* If the LEN parameter is zero, return DEST. */
1204 if (integer_zerop (len))
1206 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1207 return true;
1210 if (! tree_fits_uhwi_p (len))
1211 return false;
1213 if (TREE_CODE (c) != INTEGER_CST)
1214 return false;
1216 tree dest = gimple_call_arg (stmt, 0);
1217 tree var = dest;
1218 if (TREE_CODE (var) != ADDR_EXPR)
1219 return false;
1221 var = TREE_OPERAND (var, 0);
1222 if (TREE_THIS_VOLATILE (var))
1223 return false;
1225 etype = TREE_TYPE (var);
1226 if (TREE_CODE (etype) == ARRAY_TYPE)
1227 etype = TREE_TYPE (etype);
1229 if (!INTEGRAL_TYPE_P (etype)
1230 && !POINTER_TYPE_P (etype))
1231 return NULL_TREE;
1233 if (! var_decl_component_p (var))
1234 return NULL_TREE;
1236 length = tree_to_uhwi (len);
1237 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1238 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1239 return NULL_TREE;
1241 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1242 return NULL_TREE;
1244 if (integer_zerop (c))
1245 cval = 0;
1246 else
1248 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1249 return NULL_TREE;
1251 cval = TREE_INT_CST_LOW (c);
1252 cval &= 0xff;
1253 cval |= cval << 8;
1254 cval |= cval << 16;
1255 cval |= (cval << 31) << 1;
1258 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1259 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1260 gimple_set_vuse (store, gimple_vuse (stmt));
1261 tree vdef = gimple_vdef (stmt);
1262 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1264 gimple_set_vdef (store, gimple_vdef (stmt));
1265 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1267 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1268 if (gimple_call_lhs (stmt))
1270 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1271 gsi_replace (gsi, asgn, true);
1273 else
1275 gimple_stmt_iterator gsi2 = *gsi;
1276 gsi_prev (gsi);
1277 gsi_remove (&gsi2, true);
1280 return true;
1284 /* Return the string length, maximum string length or maximum value of
1285 ARG in LENGTH.
1286 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1287 is not NULL and, for TYPE == 0, its value is not equal to the length
1288 we determine or if we are unable to determine the length or value,
1289 return false. VISITED is a bitmap of visited variables.
1290 TYPE is 0 if string length should be returned, 1 for maximum string
1291 length and 2 for maximum value ARG can have. */
1293 static bool
1294 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1296 tree var, val;
1297 gimple def_stmt;
1299 if (TREE_CODE (arg) != SSA_NAME)
1301 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1302 if (TREE_CODE (arg) == ADDR_EXPR
1303 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1304 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1306 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1307 if (TREE_CODE (aop0) == INDIRECT_REF
1308 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1309 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1310 length, visited, type);
1313 if (type == 2)
1315 val = arg;
1316 if (TREE_CODE (val) != INTEGER_CST
1317 || tree_int_cst_sgn (val) < 0)
1318 return false;
1320 else
1321 val = c_strlen (arg, 1);
1322 if (!val)
1323 return false;
1325 if (*length)
1327 if (type > 0)
1329 if (TREE_CODE (*length) != INTEGER_CST
1330 || TREE_CODE (val) != INTEGER_CST)
1331 return false;
1333 if (tree_int_cst_lt (*length, val))
1334 *length = val;
1335 return true;
1337 else if (simple_cst_equal (val, *length) != 1)
1338 return false;
1341 *length = val;
1342 return true;
1345 /* If ARG is registered for SSA update we cannot look at its defining
1346 statement. */
1347 if (name_registered_for_update_p (arg))
1348 return false;
1350 /* If we were already here, break the infinite cycle. */
1351 if (!*visited)
1352 *visited = BITMAP_ALLOC (NULL);
1353 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1354 return true;
1356 var = arg;
1357 def_stmt = SSA_NAME_DEF_STMT (var);
1359 switch (gimple_code (def_stmt))
1361 case GIMPLE_ASSIGN:
1362 /* The RHS of the statement defining VAR must either have a
1363 constant length or come from another SSA_NAME with a constant
1364 length. */
1365 if (gimple_assign_single_p (def_stmt)
1366 || gimple_assign_unary_nop_p (def_stmt))
1368 tree rhs = gimple_assign_rhs1 (def_stmt);
1369 return get_maxval_strlen (rhs, length, visited, type);
1371 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1373 tree op2 = gimple_assign_rhs2 (def_stmt);
1374 tree op3 = gimple_assign_rhs3 (def_stmt);
1375 return get_maxval_strlen (op2, length, visited, type)
1376 && get_maxval_strlen (op3, length, visited, type);
1378 return false;
1380 case GIMPLE_PHI:
1382 /* All the arguments of the PHI node must have the same constant
1383 length. */
1384 unsigned i;
1386 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1388 tree arg = gimple_phi_arg (def_stmt, i)->def;
1390 /* If this PHI has itself as an argument, we cannot
1391 determine the string length of this argument. However,
1392 if we can find a constant string length for the other
1393 PHI args then we can still be sure that this is a
1394 constant string length. So be optimistic and just
1395 continue with the next argument. */
1396 if (arg == gimple_phi_result (def_stmt))
1397 continue;
1399 if (!get_maxval_strlen (arg, length, visited, type))
1400 return false;
1403 return true;
1405 default:
1406 return false;
1410 tree
1411 get_maxval_strlen (tree arg, int type)
1413 bitmap visited = NULL;
1414 tree len = NULL_TREE;
1415 if (!get_maxval_strlen (arg, &len, &visited, type))
1416 len = NULL_TREE;
1417 if (visited)
1418 BITMAP_FREE (visited);
1420 return len;
1424 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1425 If LEN is not NULL, it represents the length of the string to be
1426 copied. Return NULL_TREE if no simplification can be made. */
1428 static bool
1429 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1430 tree dest, tree src)
1432 location_t loc = gimple_location (gsi_stmt (*gsi));
1433 tree fn;
1435 /* If SRC and DEST are the same (and not volatile), return DEST. */
1436 if (operand_equal_p (src, dest, 0))
1438 replace_call_with_value (gsi, dest);
1439 return true;
1442 if (optimize_function_for_size_p (cfun))
1443 return false;
1445 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1446 if (!fn)
1447 return false;
1449 tree len = get_maxval_strlen (src, 0);
1450 if (!len)
1451 return false;
1453 len = fold_convert_loc (loc, size_type_node, len);
1454 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1455 len = force_gimple_operand_gsi (gsi, len, true,
1456 NULL_TREE, true, GSI_SAME_STMT);
1457 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1458 replace_call_with_call_and_fold (gsi, repl);
1459 return true;
1462 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1463 If SLEN is not NULL, it represents the length of the source string.
1464 Return NULL_TREE if no simplification can be made. */
1466 static bool
1467 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1468 tree dest, tree src, tree len)
1470 location_t loc = gimple_location (gsi_stmt (*gsi));
1471 tree fn;
1473 /* If the LEN parameter is zero, return DEST. */
1474 if (integer_zerop (len))
1476 replace_call_with_value (gsi, dest);
1477 return true;
1480 /* We can't compare slen with len as constants below if len is not a
1481 constant. */
1482 if (TREE_CODE (len) != INTEGER_CST)
1483 return false;
1485 /* Now, we must be passed a constant src ptr parameter. */
1486 tree slen = get_maxval_strlen (src, 0);
1487 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1488 return false;
1490 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1492 /* We do not support simplification of this case, though we do
1493 support it when expanding trees into RTL. */
1494 /* FIXME: generate a call to __builtin_memset. */
1495 if (tree_int_cst_lt (slen, len))
1496 return false;
1498 /* OK transform into builtin memcpy. */
1499 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1500 if (!fn)
1501 return false;
1503 len = fold_convert_loc (loc, size_type_node, len);
1504 len = force_gimple_operand_gsi (gsi, len, true,
1505 NULL_TREE, true, GSI_SAME_STMT);
1506 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1507 replace_call_with_call_and_fold (gsi, repl);
1508 return true;
1511 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1512 to the call.
1514 Return NULL_TREE if no simplification was possible, otherwise return the
1515 simplified form of the call as a tree.
1517 The simplified form may be a constant or other expression which
1518 computes the same value, but in a more efficient manner (including
1519 calls to other builtin functions).
1521 The call may contain arguments which need to be evaluated, but
1522 which are not useful to determine the result of the call. In
1523 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1524 COMPOUND_EXPR will be an argument which must be evaluated.
1525 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1526 COMPOUND_EXPR in the chain will contain the tree for the simplified
1527 form of the builtin function call. */
1529 static bool
1530 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1532 gimple stmt = gsi_stmt (*gsi);
1533 location_t loc = gimple_location (stmt);
1535 const char *p = c_getstr (src);
1537 /* If the string length is zero, return the dst parameter. */
1538 if (p && *p == '\0')
1540 replace_call_with_value (gsi, dst);
1541 return true;
1544 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1545 return false;
1547 /* See if we can store by pieces into (dst + strlen(dst)). */
1548 tree newdst;
1549 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1550 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1552 if (!strlen_fn || !memcpy_fn)
1553 return false;
1555 /* If the length of the source string isn't computable don't
1556 split strcat into strlen and memcpy. */
1557 tree len = get_maxval_strlen (src, 0);
1558 if (! len)
1559 return false;
1561 /* Create strlen (dst). */
1562 gimple_seq stmts = NULL, stmts2;
1563 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1564 gimple_set_location (repl, loc);
1565 if (gimple_in_ssa_p (cfun))
1566 newdst = make_ssa_name (size_type_node);
1567 else
1568 newdst = create_tmp_reg (size_type_node);
1569 gimple_call_set_lhs (repl, newdst);
1570 gimple_seq_add_stmt_without_update (&stmts, repl);
1572 /* Create (dst p+ strlen (dst)). */
1573 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1574 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1575 gimple_seq_add_seq_without_update (&stmts, stmts2);
1577 len = fold_convert_loc (loc, size_type_node, len);
1578 len = size_binop_loc (loc, PLUS_EXPR, len,
1579 build_int_cst (size_type_node, 1));
1580 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1581 gimple_seq_add_seq_without_update (&stmts, stmts2);
1583 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1584 gimple_seq_add_stmt_without_update (&stmts, repl);
1585 if (gimple_call_lhs (stmt))
1587 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1588 gimple_seq_add_stmt_without_update (&stmts, repl);
1589 gsi_replace_with_seq_vops (gsi, stmts);
1590 /* gsi now points at the assignment to the lhs, get a
1591 stmt iterator to the memcpy call.
1592 ??? We can't use gsi_for_stmt as that doesn't work when the
1593 CFG isn't built yet. */
1594 gimple_stmt_iterator gsi2 = *gsi;
1595 gsi_prev (&gsi2);
1596 fold_stmt (&gsi2);
1598 else
1600 gsi_replace_with_seq_vops (gsi, stmts);
1601 fold_stmt (gsi);
1603 return true;
1606 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1607 are the arguments to the call. */
1609 static bool
1610 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1612 gimple stmt = gsi_stmt (*gsi);
1613 tree dest = gimple_call_arg (stmt, 0);
1614 tree src = gimple_call_arg (stmt, 1);
1615 tree size = gimple_call_arg (stmt, 2);
1616 tree fn;
1617 const char *p;
1620 p = c_getstr (src);
1621 /* If the SRC parameter is "", return DEST. */
1622 if (p && *p == '\0')
1624 replace_call_with_value (gsi, dest);
1625 return true;
1628 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1629 return false;
1631 /* If __builtin_strcat_chk is used, assume strcat is available. */
1632 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1633 if (!fn)
1634 return false;
1636 gimple repl = gimple_build_call (fn, 2, dest, src);
1637 replace_call_with_call_and_fold (gsi, repl);
1638 return true;
1641 /* Simplify a call to the strncat builtin. */
1643 static bool
1644 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1646 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1647 tree dst = gimple_call_arg (stmt, 0);
1648 tree src = gimple_call_arg (stmt, 1);
1649 tree len = gimple_call_arg (stmt, 2);
1651 const char *p = c_getstr (src);
1653 /* If the requested length is zero, or the src parameter string
1654 length is zero, return the dst parameter. */
1655 if (integer_zerop (len) || (p && *p == '\0'))
1657 replace_call_with_value (gsi, dst);
1658 return true;
1661 /* If the requested len is greater than or equal to the string
1662 length, call strcat. */
1663 if (TREE_CODE (len) == INTEGER_CST && p
1664 && compare_tree_int (len, strlen (p)) >= 0)
1666 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1668 /* If the replacement _DECL isn't initialized, don't do the
1669 transformation. */
1670 if (!fn)
1671 return false;
1673 gcall *repl = gimple_build_call (fn, 2, dst, src);
1674 replace_call_with_call_and_fold (gsi, repl);
1675 return true;
1678 return false;
1681 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1682 LEN, and SIZE. */
1684 static bool
1685 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1687 gimple stmt = gsi_stmt (*gsi);
1688 tree dest = gimple_call_arg (stmt, 0);
1689 tree src = gimple_call_arg (stmt, 1);
1690 tree len = gimple_call_arg (stmt, 2);
1691 tree size = gimple_call_arg (stmt, 3);
1692 tree fn;
1693 const char *p;
1695 p = c_getstr (src);
1696 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1697 if ((p && *p == '\0')
1698 || integer_zerop (len))
1700 replace_call_with_value (gsi, dest);
1701 return true;
1704 if (! tree_fits_uhwi_p (size))
1705 return false;
1707 if (! integer_all_onesp (size))
1709 tree src_len = c_strlen (src, 1);
1710 if (src_len
1711 && tree_fits_uhwi_p (src_len)
1712 && tree_fits_uhwi_p (len)
1713 && ! tree_int_cst_lt (len, src_len))
1715 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1716 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1717 if (!fn)
1718 return false;
1720 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1721 replace_call_with_call_and_fold (gsi, repl);
1722 return true;
1724 return false;
1727 /* If __builtin_strncat_chk is used, assume strncat is available. */
1728 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1729 if (!fn)
1730 return false;
1732 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1733 replace_call_with_call_and_fold (gsi, repl);
1734 return true;
1737 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1738 to the call. IGNORE is true if the value returned
1739 by the builtin will be ignored. UNLOCKED is true is true if this
1740 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1741 the known length of the string. Return NULL_TREE if no simplification
1742 was possible. */
1744 static bool
1745 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1746 tree arg0, tree arg1,
1747 bool unlocked)
1749 gimple stmt = gsi_stmt (*gsi);
1751 /* If we're using an unlocked function, assume the other unlocked
1752 functions exist explicitly. */
1753 tree const fn_fputc = (unlocked
1754 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1755 : builtin_decl_implicit (BUILT_IN_FPUTC));
1756 tree const fn_fwrite = (unlocked
1757 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1758 : builtin_decl_implicit (BUILT_IN_FWRITE));
1760 /* If the return value is used, don't do the transformation. */
1761 if (gimple_call_lhs (stmt))
1762 return false;
1764 /* Get the length of the string passed to fputs. If the length
1765 can't be determined, punt. */
1766 tree len = get_maxval_strlen (arg0, 0);
1767 if (!len
1768 || TREE_CODE (len) != INTEGER_CST)
1769 return false;
1771 switch (compare_tree_int (len, 1))
1773 case -1: /* length is 0, delete the call entirely . */
1774 replace_call_with_value (gsi, integer_zero_node);
1775 return true;
1777 case 0: /* length is 1, call fputc. */
1779 const char *p = c_getstr (arg0);
1780 if (p != NULL)
1782 if (!fn_fputc)
1783 return false;
1785 gimple repl = gimple_build_call (fn_fputc, 2,
1786 build_int_cst
1787 (integer_type_node, p[0]), arg1);
1788 replace_call_with_call_and_fold (gsi, repl);
1789 return true;
1792 /* FALLTHROUGH */
1793 case 1: /* length is greater than 1, call fwrite. */
1795 /* If optimizing for size keep fputs. */
1796 if (optimize_function_for_size_p (cfun))
1797 return false;
1798 /* New argument list transforming fputs(string, stream) to
1799 fwrite(string, 1, len, stream). */
1800 if (!fn_fwrite)
1801 return false;
1803 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1804 size_one_node, len, arg1);
1805 replace_call_with_call_and_fold (gsi, repl);
1806 return true;
1808 default:
1809 gcc_unreachable ();
1811 return false;
1814 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1815 DEST, SRC, LEN, and SIZE are the arguments to the call.
1816 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1817 code of the builtin. If MAXLEN is not NULL, it is maximum length
1818 passed as third argument. */
1820 static bool
1821 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1822 tree dest, tree src, tree len, tree size,
1823 enum built_in_function fcode)
1825 gimple stmt = gsi_stmt (*gsi);
1826 location_t loc = gimple_location (stmt);
1827 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1828 tree fn;
1830 /* If SRC and DEST are the same (and not volatile), return DEST
1831 (resp. DEST+LEN for __mempcpy_chk). */
1832 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1834 if (fcode != BUILT_IN_MEMPCPY_CHK)
1836 replace_call_with_value (gsi, dest);
1837 return true;
1839 else
1841 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1842 temp = force_gimple_operand_gsi (gsi, temp,
1843 false, NULL_TREE, true,
1844 GSI_SAME_STMT);
1845 replace_call_with_value (gsi, temp);
1846 return true;
1850 if (! tree_fits_uhwi_p (size))
1851 return false;
1853 tree maxlen = get_maxval_strlen (len, 2);
1854 if (! integer_all_onesp (size))
1856 if (! tree_fits_uhwi_p (len))
1858 /* If LEN is not constant, try MAXLEN too.
1859 For MAXLEN only allow optimizing into non-_ocs function
1860 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1861 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1863 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1865 /* (void) __mempcpy_chk () can be optimized into
1866 (void) __memcpy_chk (). */
1867 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1868 if (!fn)
1869 return false;
1871 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1872 replace_call_with_call_and_fold (gsi, repl);
1873 return true;
1875 return false;
1878 else
1879 maxlen = len;
1881 if (tree_int_cst_lt (size, maxlen))
1882 return false;
1885 fn = NULL_TREE;
1886 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1887 mem{cpy,pcpy,move,set} is available. */
1888 switch (fcode)
1890 case BUILT_IN_MEMCPY_CHK:
1891 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1892 break;
1893 case BUILT_IN_MEMPCPY_CHK:
1894 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1895 break;
1896 case BUILT_IN_MEMMOVE_CHK:
1897 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1898 break;
1899 case BUILT_IN_MEMSET_CHK:
1900 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1901 break;
1902 default:
1903 break;
1906 if (!fn)
1907 return false;
1909 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1910 replace_call_with_call_and_fold (gsi, repl);
1911 return true;
1914 /* Fold a call to the __st[rp]cpy_chk builtin.
1915 DEST, SRC, and SIZE are the arguments to the call.
1916 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1917 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1918 strings passed as second argument. */
1920 static bool
1921 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1922 tree dest,
1923 tree src, tree size,
1924 enum built_in_function fcode)
1926 gimple stmt = gsi_stmt (*gsi);
1927 location_t loc = gimple_location (stmt);
1928 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1929 tree len, fn;
1931 /* If SRC and DEST are the same (and not volatile), return DEST. */
1932 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1934 replace_call_with_value (gsi, dest);
1935 return true;
1938 if (! tree_fits_uhwi_p (size))
1939 return false;
1941 tree maxlen = get_maxval_strlen (src, 1);
1942 if (! integer_all_onesp (size))
1944 len = c_strlen (src, 1);
1945 if (! len || ! tree_fits_uhwi_p (len))
1947 /* If LEN is not constant, try MAXLEN too.
1948 For MAXLEN only allow optimizing into non-_ocs function
1949 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1950 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1952 if (fcode == BUILT_IN_STPCPY_CHK)
1954 if (! ignore)
1955 return false;
1957 /* If return value of __stpcpy_chk is ignored,
1958 optimize into __strcpy_chk. */
1959 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1960 if (!fn)
1961 return false;
1963 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1964 replace_call_with_call_and_fold (gsi, repl);
1965 return true;
1968 if (! len || TREE_SIDE_EFFECTS (len))
1969 return false;
1971 /* If c_strlen returned something, but not a constant,
1972 transform __strcpy_chk into __memcpy_chk. */
1973 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1974 if (!fn)
1975 return false;
1977 len = fold_convert_loc (loc, size_type_node, len);
1978 len = size_binop_loc (loc, PLUS_EXPR, len,
1979 build_int_cst (size_type_node, 1));
1980 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1981 true, GSI_SAME_STMT);
1982 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1983 replace_call_with_call_and_fold (gsi, repl);
1984 return true;
1987 else
1988 maxlen = len;
1990 if (! tree_int_cst_lt (maxlen, size))
1991 return false;
1994 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1995 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1996 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1997 if (!fn)
1998 return false;
2000 gimple repl = gimple_build_call (fn, 2, dest, src);
2001 replace_call_with_call_and_fold (gsi, repl);
2002 return true;
2005 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2006 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2007 length passed as third argument. IGNORE is true if return value can be
2008 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2010 static bool
2011 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2012 tree dest, tree src,
2013 tree len, tree size,
2014 enum built_in_function fcode)
2016 gimple stmt = gsi_stmt (*gsi);
2017 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2018 tree fn;
2020 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2022 /* If return value of __stpncpy_chk is ignored,
2023 optimize into __strncpy_chk. */
2024 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2025 if (fn)
2027 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2028 replace_call_with_call_and_fold (gsi, repl);
2029 return true;
2033 if (! tree_fits_uhwi_p (size))
2034 return false;
2036 tree maxlen = get_maxval_strlen (len, 2);
2037 if (! integer_all_onesp (size))
2039 if (! tree_fits_uhwi_p (len))
2041 /* If LEN is not constant, try MAXLEN too.
2042 For MAXLEN only allow optimizing into non-_ocs function
2043 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2044 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2045 return false;
2047 else
2048 maxlen = len;
2050 if (tree_int_cst_lt (size, maxlen))
2051 return false;
2054 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2055 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2056 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2057 if (!fn)
2058 return false;
2060 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2061 replace_call_with_call_and_fold (gsi, repl);
2062 return true;
2065 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2066 Return NULL_TREE if no simplification can be made. */
2068 static bool
2069 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2071 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2072 location_t loc = gimple_location (stmt);
2073 tree dest = gimple_call_arg (stmt, 0);
2074 tree src = gimple_call_arg (stmt, 1);
2075 tree fn, len, lenp1;
2077 /* If the result is unused, replace stpcpy with strcpy. */
2078 if (gimple_call_lhs (stmt) == NULL_TREE)
2080 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2081 if (!fn)
2082 return false;
2083 gimple_call_set_fndecl (stmt, fn);
2084 fold_stmt (gsi);
2085 return true;
2088 len = c_strlen (src, 1);
2089 if (!len
2090 || TREE_CODE (len) != INTEGER_CST)
2091 return false;
2093 if (optimize_function_for_size_p (cfun)
2094 /* If length is zero it's small enough. */
2095 && !integer_zerop (len))
2096 return false;
2098 /* If the source has a known length replace stpcpy with memcpy. */
2099 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2100 if (!fn)
2101 return false;
2103 gimple_seq stmts = NULL;
2104 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2105 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2106 tem, build_int_cst (size_type_node, 1));
2107 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2108 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2109 gimple_set_vuse (repl, gimple_vuse (stmt));
2110 gimple_set_vdef (repl, gimple_vdef (stmt));
2111 if (gimple_vdef (repl)
2112 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2113 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2114 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2115 /* Replace the result with dest + len. */
2116 stmts = NULL;
2117 tem = gimple_convert (&stmts, loc, sizetype, len);
2118 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2119 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2120 POINTER_PLUS_EXPR, dest, tem);
2121 gsi_replace (gsi, ret, true);
2122 /* Finally fold the memcpy call. */
2123 gimple_stmt_iterator gsi2 = *gsi;
2124 gsi_prev (&gsi2);
2125 fold_stmt (&gsi2);
2126 return true;
2129 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2130 NULL_TREE if a normal call should be emitted rather than expanding
2131 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2132 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2133 passed as second argument. */
2135 static bool
2136 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2137 enum built_in_function fcode)
2139 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2140 tree dest, size, len, fn, fmt, flag;
2141 const char *fmt_str;
2143 /* Verify the required arguments in the original call. */
2144 if (gimple_call_num_args (stmt) < 5)
2145 return false;
2147 dest = gimple_call_arg (stmt, 0);
2148 len = gimple_call_arg (stmt, 1);
2149 flag = gimple_call_arg (stmt, 2);
2150 size = gimple_call_arg (stmt, 3);
2151 fmt = gimple_call_arg (stmt, 4);
2153 if (! tree_fits_uhwi_p (size))
2154 return false;
2156 if (! integer_all_onesp (size))
2158 tree maxlen = get_maxval_strlen (len, 2);
2159 if (! tree_fits_uhwi_p (len))
2161 /* If LEN is not constant, try MAXLEN too.
2162 For MAXLEN only allow optimizing into non-_ocs function
2163 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2164 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2165 return false;
2167 else
2168 maxlen = len;
2170 if (tree_int_cst_lt (size, maxlen))
2171 return false;
2174 if (!init_target_chars ())
2175 return false;
2177 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2178 or if format doesn't contain % chars or is "%s". */
2179 if (! integer_zerop (flag))
2181 fmt_str = c_getstr (fmt);
2182 if (fmt_str == NULL)
2183 return false;
2184 if (strchr (fmt_str, target_percent) != NULL
2185 && strcmp (fmt_str, target_percent_s))
2186 return false;
2189 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2190 available. */
2191 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2192 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2193 if (!fn)
2194 return false;
2196 /* Replace the called function and the first 5 argument by 3 retaining
2197 trailing varargs. */
2198 gimple_call_set_fndecl (stmt, fn);
2199 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2200 gimple_call_set_arg (stmt, 0, dest);
2201 gimple_call_set_arg (stmt, 1, len);
2202 gimple_call_set_arg (stmt, 2, fmt);
2203 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2204 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2205 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2206 fold_stmt (gsi);
2207 return true;
2210 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2211 Return NULL_TREE if a normal call should be emitted rather than
2212 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2213 or BUILT_IN_VSPRINTF_CHK. */
2215 static bool
2216 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2217 enum built_in_function fcode)
2219 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2220 tree dest, size, len, fn, fmt, flag;
2221 const char *fmt_str;
2222 unsigned nargs = gimple_call_num_args (stmt);
2224 /* Verify the required arguments in the original call. */
2225 if (nargs < 4)
2226 return false;
2227 dest = gimple_call_arg (stmt, 0);
2228 flag = gimple_call_arg (stmt, 1);
2229 size = gimple_call_arg (stmt, 2);
2230 fmt = gimple_call_arg (stmt, 3);
2232 if (! tree_fits_uhwi_p (size))
2233 return false;
2235 len = NULL_TREE;
2237 if (!init_target_chars ())
2238 return false;
2240 /* Check whether the format is a literal string constant. */
2241 fmt_str = c_getstr (fmt);
2242 if (fmt_str != NULL)
2244 /* If the format doesn't contain % args or %%, we know the size. */
2245 if (strchr (fmt_str, target_percent) == 0)
2247 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2248 len = build_int_cstu (size_type_node, strlen (fmt_str));
2250 /* If the format is "%s" and first ... argument is a string literal,
2251 we know the size too. */
2252 else if (fcode == BUILT_IN_SPRINTF_CHK
2253 && strcmp (fmt_str, target_percent_s) == 0)
2255 tree arg;
2257 if (nargs == 5)
2259 arg = gimple_call_arg (stmt, 4);
2260 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2262 len = c_strlen (arg, 1);
2263 if (! len || ! tree_fits_uhwi_p (len))
2264 len = NULL_TREE;
2270 if (! integer_all_onesp (size))
2272 if (! len || ! tree_int_cst_lt (len, size))
2273 return false;
2276 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2277 or if format doesn't contain % chars or is "%s". */
2278 if (! integer_zerop (flag))
2280 if (fmt_str == NULL)
2281 return false;
2282 if (strchr (fmt_str, target_percent) != NULL
2283 && strcmp (fmt_str, target_percent_s))
2284 return false;
2287 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2288 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2289 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2290 if (!fn)
2291 return false;
2293 /* Replace the called function and the first 4 argument by 2 retaining
2294 trailing varargs. */
2295 gimple_call_set_fndecl (stmt, fn);
2296 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2297 gimple_call_set_arg (stmt, 0, dest);
2298 gimple_call_set_arg (stmt, 1, fmt);
2299 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2300 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2301 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2302 fold_stmt (gsi);
2303 return true;
2306 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2307 ORIG may be null if this is a 2-argument call. We don't attempt to
2308 simplify calls with more than 3 arguments.
2310 Return NULL_TREE if no simplification was possible, otherwise return the
2311 simplified form of the call as a tree. If IGNORED is true, it means that
2312 the caller does not use the returned value of the function. */
2314 static bool
2315 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2317 gimple stmt = gsi_stmt (*gsi);
2318 tree dest = gimple_call_arg (stmt, 0);
2319 tree fmt = gimple_call_arg (stmt, 1);
2320 tree orig = NULL_TREE;
2321 const char *fmt_str = NULL;
2323 /* Verify the required arguments in the original call. We deal with two
2324 types of sprintf() calls: 'sprintf (str, fmt)' and
2325 'sprintf (dest, "%s", orig)'. */
2326 if (gimple_call_num_args (stmt) > 3)
2327 return false;
2329 if (gimple_call_num_args (stmt) == 3)
2330 orig = gimple_call_arg (stmt, 2);
2332 /* Check whether the format is a literal string constant. */
2333 fmt_str = c_getstr (fmt);
2334 if (fmt_str == NULL)
2335 return false;
2337 if (!init_target_chars ())
2338 return false;
2340 /* If the format doesn't contain % args or %%, use strcpy. */
2341 if (strchr (fmt_str, target_percent) == NULL)
2343 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2345 if (!fn)
2346 return false;
2348 /* Don't optimize sprintf (buf, "abc", ptr++). */
2349 if (orig)
2350 return false;
2352 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2353 'format' is known to contain no % formats. */
2354 gimple_seq stmts = NULL;
2355 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2356 gimple_seq_add_stmt_without_update (&stmts, repl);
2357 if (gimple_call_lhs (stmt))
2359 repl = gimple_build_assign (gimple_call_lhs (stmt),
2360 build_int_cst (integer_type_node,
2361 strlen (fmt_str)));
2362 gimple_seq_add_stmt_without_update (&stmts, repl);
2363 gsi_replace_with_seq_vops (gsi, stmts);
2364 /* gsi now points at the assignment to the lhs, get a
2365 stmt iterator to the memcpy call.
2366 ??? We can't use gsi_for_stmt as that doesn't work when the
2367 CFG isn't built yet. */
2368 gimple_stmt_iterator gsi2 = *gsi;
2369 gsi_prev (&gsi2);
2370 fold_stmt (&gsi2);
2372 else
2374 gsi_replace_with_seq_vops (gsi, stmts);
2375 fold_stmt (gsi);
2377 return true;
2380 /* If the format is "%s", use strcpy if the result isn't used. */
2381 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2383 tree fn;
2384 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2386 if (!fn)
2387 return false;
2389 /* Don't crash on sprintf (str1, "%s"). */
2390 if (!orig)
2391 return false;
2393 tree orig_len = NULL_TREE;
2394 if (gimple_call_lhs (stmt))
2396 orig_len = get_maxval_strlen (orig, 0);
2397 if (!orig_len)
2398 return false;
2401 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2402 gimple_seq stmts = NULL;
2403 gimple repl = gimple_build_call (fn, 2, dest, orig);
2404 gimple_seq_add_stmt_without_update (&stmts, repl);
2405 if (gimple_call_lhs (stmt))
2407 if (!useless_type_conversion_p (integer_type_node,
2408 TREE_TYPE (orig_len)))
2409 orig_len = fold_convert (integer_type_node, orig_len);
2410 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2411 gimple_seq_add_stmt_without_update (&stmts, repl);
2412 gsi_replace_with_seq_vops (gsi, stmts);
2413 /* gsi now points at the assignment to the lhs, get a
2414 stmt iterator to the memcpy call.
2415 ??? We can't use gsi_for_stmt as that doesn't work when the
2416 CFG isn't built yet. */
2417 gimple_stmt_iterator gsi2 = *gsi;
2418 gsi_prev (&gsi2);
2419 fold_stmt (&gsi2);
2421 else
2423 gsi_replace_with_seq_vops (gsi, stmts);
2424 fold_stmt (gsi);
2426 return true;
2428 return false;
2431 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2432 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2433 attempt to simplify calls with more than 4 arguments.
2435 Return NULL_TREE if no simplification was possible, otherwise return the
2436 simplified form of the call as a tree. If IGNORED is true, it means that
2437 the caller does not use the returned value of the function. */
2439 static bool
2440 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2442 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2443 tree dest = gimple_call_arg (stmt, 0);
2444 tree destsize = gimple_call_arg (stmt, 1);
2445 tree fmt = gimple_call_arg (stmt, 2);
2446 tree orig = NULL_TREE;
2447 const char *fmt_str = NULL;
2449 if (gimple_call_num_args (stmt) > 4)
2450 return false;
2452 if (gimple_call_num_args (stmt) == 4)
2453 orig = gimple_call_arg (stmt, 3);
2455 if (!tree_fits_uhwi_p (destsize))
2456 return false;
2457 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2459 /* Check whether the format is a literal string constant. */
2460 fmt_str = c_getstr (fmt);
2461 if (fmt_str == NULL)
2462 return false;
2464 if (!init_target_chars ())
2465 return false;
2467 /* If the format doesn't contain % args or %%, use strcpy. */
2468 if (strchr (fmt_str, target_percent) == NULL)
2470 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2471 if (!fn)
2472 return false;
2474 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2475 if (orig)
2476 return false;
2478 /* We could expand this as
2479 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2480 or to
2481 memcpy (str, fmt_with_nul_at_cstm1, cst);
2482 but in the former case that might increase code size
2483 and in the latter case grow .rodata section too much.
2484 So punt for now. */
2485 size_t len = strlen (fmt_str);
2486 if (len >= destlen)
2487 return false;
2489 gimple_seq stmts = NULL;
2490 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2491 gimple_seq_add_stmt_without_update (&stmts, repl);
2492 if (gimple_call_lhs (stmt))
2494 repl = gimple_build_assign (gimple_call_lhs (stmt),
2495 build_int_cst (integer_type_node, len));
2496 gimple_seq_add_stmt_without_update (&stmts, repl);
2497 gsi_replace_with_seq_vops (gsi, stmts);
2498 /* gsi now points at the assignment to the lhs, get a
2499 stmt iterator to the memcpy call.
2500 ??? We can't use gsi_for_stmt as that doesn't work when the
2501 CFG isn't built yet. */
2502 gimple_stmt_iterator gsi2 = *gsi;
2503 gsi_prev (&gsi2);
2504 fold_stmt (&gsi2);
2506 else
2508 gsi_replace_with_seq_vops (gsi, stmts);
2509 fold_stmt (gsi);
2511 return true;
2514 /* If the format is "%s", use strcpy if the result isn't used. */
2515 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2517 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2518 if (!fn)
2519 return false;
2521 /* Don't crash on snprintf (str1, cst, "%s"). */
2522 if (!orig)
2523 return false;
2525 tree orig_len = get_maxval_strlen (orig, 0);
2526 if (!orig_len)
2527 return false;
2529 /* We could expand this as
2530 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2531 or to
2532 memcpy (str1, str2_with_nul_at_cstm1, cst);
2533 but in the former case that might increase code size
2534 and in the latter case grow .rodata section too much.
2535 So punt for now. */
2536 if (compare_tree_int (orig_len, destlen) >= 0)
2537 return false;
2539 /* Convert snprintf (str1, cst, "%s", str2) into
2540 strcpy (str1, str2) if strlen (str2) < cst. */
2541 gimple_seq stmts = NULL;
2542 gimple repl = gimple_build_call (fn, 2, dest, orig);
2543 gimple_seq_add_stmt_without_update (&stmts, repl);
2544 if (gimple_call_lhs (stmt))
2546 if (!useless_type_conversion_p (integer_type_node,
2547 TREE_TYPE (orig_len)))
2548 orig_len = fold_convert (integer_type_node, orig_len);
2549 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2550 gimple_seq_add_stmt_without_update (&stmts, repl);
2551 gsi_replace_with_seq_vops (gsi, stmts);
2552 /* gsi now points at the assignment to the lhs, get a
2553 stmt iterator to the memcpy call.
2554 ??? We can't use gsi_for_stmt as that doesn't work when the
2555 CFG isn't built yet. */
2556 gimple_stmt_iterator gsi2 = *gsi;
2557 gsi_prev (&gsi2);
2558 fold_stmt (&gsi2);
2560 else
2562 gsi_replace_with_seq_vops (gsi, stmts);
2563 fold_stmt (gsi);
2565 return true;
2567 return false;
2570 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2571 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2572 more than 3 arguments, and ARG may be null in the 2-argument case.
2574 Return NULL_TREE if no simplification was possible, otherwise return the
2575 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2576 code of the function to be simplified. */
2578 static bool
2579 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2580 tree fp, tree fmt, tree arg,
2581 enum built_in_function fcode)
2583 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2584 tree fn_fputc, fn_fputs;
2585 const char *fmt_str = NULL;
2587 /* If the return value is used, don't do the transformation. */
2588 if (gimple_call_lhs (stmt) != NULL_TREE)
2589 return false;
2591 /* Check whether the format is a literal string constant. */
2592 fmt_str = c_getstr (fmt);
2593 if (fmt_str == NULL)
2594 return false;
2596 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2598 /* If we're using an unlocked function, assume the other
2599 unlocked functions exist explicitly. */
2600 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2601 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2603 else
2605 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2606 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2609 if (!init_target_chars ())
2610 return false;
2612 /* If the format doesn't contain % args or %%, use strcpy. */
2613 if (strchr (fmt_str, target_percent) == NULL)
2615 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2616 && arg)
2617 return false;
2619 /* If the format specifier was "", fprintf does nothing. */
2620 if (fmt_str[0] == '\0')
2622 replace_call_with_value (gsi, NULL_TREE);
2623 return true;
2626 /* When "string" doesn't contain %, replace all cases of
2627 fprintf (fp, string) with fputs (string, fp). The fputs
2628 builtin will take care of special cases like length == 1. */
2629 if (fn_fputs)
2631 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2632 replace_call_with_call_and_fold (gsi, repl);
2633 return true;
2637 /* The other optimizations can be done only on the non-va_list variants. */
2638 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2639 return false;
2641 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2642 else if (strcmp (fmt_str, target_percent_s) == 0)
2644 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2645 return false;
2646 if (fn_fputs)
2648 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2649 replace_call_with_call_and_fold (gsi, repl);
2650 return true;
2654 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2655 else if (strcmp (fmt_str, target_percent_c) == 0)
2657 if (!arg
2658 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2659 return false;
2660 if (fn_fputc)
2662 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2663 replace_call_with_call_and_fold (gsi, repl);
2664 return true;
2668 return false;
2671 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2672 FMT and ARG are the arguments to the call; we don't fold cases with
2673 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2675 Return NULL_TREE if no simplification was possible, otherwise return the
2676 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2677 code of the function to be simplified. */
2679 static bool
2680 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2681 tree arg, enum built_in_function fcode)
2683 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2684 tree fn_putchar, fn_puts, newarg;
2685 const char *fmt_str = NULL;
2687 /* If the return value is used, don't do the transformation. */
2688 if (gimple_call_lhs (stmt) != NULL_TREE)
2689 return false;
2691 /* Check whether the format is a literal string constant. */
2692 fmt_str = c_getstr (fmt);
2693 if (fmt_str == NULL)
2694 return false;
2696 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2698 /* If we're using an unlocked function, assume the other
2699 unlocked functions exist explicitly. */
2700 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2701 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2703 else
2705 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2706 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2709 if (!init_target_chars ())
2710 return false;
2712 if (strcmp (fmt_str, target_percent_s) == 0
2713 || strchr (fmt_str, target_percent) == NULL)
2715 const char *str;
2717 if (strcmp (fmt_str, target_percent_s) == 0)
2719 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2720 return false;
2722 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2723 return false;
2725 str = c_getstr (arg);
2726 if (str == NULL)
2727 return false;
2729 else
2731 /* The format specifier doesn't contain any '%' characters. */
2732 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2733 && arg)
2734 return false;
2735 str = fmt_str;
2738 /* If the string was "", printf does nothing. */
2739 if (str[0] == '\0')
2741 replace_call_with_value (gsi, NULL_TREE);
2742 return true;
2745 /* If the string has length of 1, call putchar. */
2746 if (str[1] == '\0')
2748 /* Given printf("c"), (where c is any one character,)
2749 convert "c"[0] to an int and pass that to the replacement
2750 function. */
2751 newarg = build_int_cst (integer_type_node, str[0]);
2752 if (fn_putchar)
2754 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2755 replace_call_with_call_and_fold (gsi, repl);
2756 return true;
2759 else
2761 /* If the string was "string\n", call puts("string"). */
2762 size_t len = strlen (str);
2763 if ((unsigned char)str[len - 1] == target_newline
2764 && (size_t) (int) len == len
2765 && (int) len > 0)
2767 char *newstr;
2768 tree offset_node, string_cst;
2770 /* Create a NUL-terminated string that's one char shorter
2771 than the original, stripping off the trailing '\n'. */
2772 newarg = build_string_literal (len, str);
2773 string_cst = string_constant (newarg, &offset_node);
2774 gcc_checking_assert (string_cst
2775 && (TREE_STRING_LENGTH (string_cst)
2776 == (int) len)
2777 && integer_zerop (offset_node)
2778 && (unsigned char)
2779 TREE_STRING_POINTER (string_cst)[len - 1]
2780 == target_newline);
2781 /* build_string_literal creates a new STRING_CST,
2782 modify it in place to avoid double copying. */
2783 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2784 newstr[len - 1] = '\0';
2785 if (fn_puts)
2787 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2788 replace_call_with_call_and_fold (gsi, repl);
2789 return true;
2792 else
2793 /* We'd like to arrange to call fputs(string,stdout) here,
2794 but we need stdout and don't have a way to get it yet. */
2795 return false;
2799 /* The other optimizations can be done only on the non-va_list variants. */
2800 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2801 return false;
2803 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2804 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2806 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2807 return false;
2808 if (fn_puts)
2810 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2811 replace_call_with_call_and_fold (gsi, repl);
2812 return true;
2816 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2817 else if (strcmp (fmt_str, target_percent_c) == 0)
2819 if (!arg || ! useless_type_conversion_p (integer_type_node,
2820 TREE_TYPE (arg)))
2821 return false;
2822 if (fn_putchar)
2824 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2825 replace_call_with_call_and_fold (gsi, repl);
2826 return true;
2830 return false;
2835 /* Fold a call to __builtin_strlen with known length LEN. */
2837 static bool
2838 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2840 gimple stmt = gsi_stmt (*gsi);
2841 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2842 if (!len)
2843 return false;
2844 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2845 replace_call_with_value (gsi, len);
2846 return true;
2850 /* Fold the non-target builtin at *GSI and return whether any simplification
2851 was made. */
2853 static bool
2854 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2856 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2857 tree callee = gimple_call_fndecl (stmt);
2859 /* Give up for always_inline inline builtins until they are
2860 inlined. */
2861 if (avoid_folding_inline_builtin (callee))
2862 return false;
2864 unsigned n = gimple_call_num_args (stmt);
2865 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2866 switch (fcode)
2868 case BUILT_IN_BZERO:
2869 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2870 gimple_call_arg (stmt, 1));
2871 case BUILT_IN_MEMSET:
2872 return gimple_fold_builtin_memset (gsi,
2873 gimple_call_arg (stmt, 1),
2874 gimple_call_arg (stmt, 2));
2875 case BUILT_IN_BCOPY:
2876 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2877 gimple_call_arg (stmt, 0), 3);
2878 case BUILT_IN_MEMCPY:
2879 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2880 gimple_call_arg (stmt, 1), 0);
2881 case BUILT_IN_MEMPCPY:
2882 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2883 gimple_call_arg (stmt, 1), 1);
2884 case BUILT_IN_MEMMOVE:
2885 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2886 gimple_call_arg (stmt, 1), 3);
2887 case BUILT_IN_SPRINTF_CHK:
2888 case BUILT_IN_VSPRINTF_CHK:
2889 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2890 case BUILT_IN_STRCAT_CHK:
2891 return gimple_fold_builtin_strcat_chk (gsi);
2892 case BUILT_IN_STRNCAT_CHK:
2893 return gimple_fold_builtin_strncat_chk (gsi);
2894 case BUILT_IN_STRLEN:
2895 return gimple_fold_builtin_strlen (gsi);
2896 case BUILT_IN_STRCPY:
2897 return gimple_fold_builtin_strcpy (gsi,
2898 gimple_call_arg (stmt, 0),
2899 gimple_call_arg (stmt, 1));
2900 case BUILT_IN_STRNCPY:
2901 return gimple_fold_builtin_strncpy (gsi,
2902 gimple_call_arg (stmt, 0),
2903 gimple_call_arg (stmt, 1),
2904 gimple_call_arg (stmt, 2));
2905 case BUILT_IN_STRCAT:
2906 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2907 gimple_call_arg (stmt, 1));
2908 case BUILT_IN_STRNCAT:
2909 return gimple_fold_builtin_strncat (gsi);
2910 case BUILT_IN_FPUTS:
2911 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2912 gimple_call_arg (stmt, 1), false);
2913 case BUILT_IN_FPUTS_UNLOCKED:
2914 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2915 gimple_call_arg (stmt, 1), true);
2916 case BUILT_IN_MEMCPY_CHK:
2917 case BUILT_IN_MEMPCPY_CHK:
2918 case BUILT_IN_MEMMOVE_CHK:
2919 case BUILT_IN_MEMSET_CHK:
2920 return gimple_fold_builtin_memory_chk (gsi,
2921 gimple_call_arg (stmt, 0),
2922 gimple_call_arg (stmt, 1),
2923 gimple_call_arg (stmt, 2),
2924 gimple_call_arg (stmt, 3),
2925 fcode);
2926 case BUILT_IN_STPCPY:
2927 return gimple_fold_builtin_stpcpy (gsi);
2928 case BUILT_IN_STRCPY_CHK:
2929 case BUILT_IN_STPCPY_CHK:
2930 return gimple_fold_builtin_stxcpy_chk (gsi,
2931 gimple_call_arg (stmt, 0),
2932 gimple_call_arg (stmt, 1),
2933 gimple_call_arg (stmt, 2),
2934 fcode);
2935 case BUILT_IN_STRNCPY_CHK:
2936 case BUILT_IN_STPNCPY_CHK:
2937 return gimple_fold_builtin_stxncpy_chk (gsi,
2938 gimple_call_arg (stmt, 0),
2939 gimple_call_arg (stmt, 1),
2940 gimple_call_arg (stmt, 2),
2941 gimple_call_arg (stmt, 3),
2942 fcode);
2943 case BUILT_IN_SNPRINTF_CHK:
2944 case BUILT_IN_VSNPRINTF_CHK:
2945 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2946 case BUILT_IN_SNPRINTF:
2947 return gimple_fold_builtin_snprintf (gsi);
2948 case BUILT_IN_SPRINTF:
2949 return gimple_fold_builtin_sprintf (gsi);
2950 case BUILT_IN_FPRINTF:
2951 case BUILT_IN_FPRINTF_UNLOCKED:
2952 case BUILT_IN_VFPRINTF:
2953 if (n == 2 || n == 3)
2954 return gimple_fold_builtin_fprintf (gsi,
2955 gimple_call_arg (stmt, 0),
2956 gimple_call_arg (stmt, 1),
2957 n == 3
2958 ? gimple_call_arg (stmt, 2)
2959 : NULL_TREE,
2960 fcode);
2961 break;
2962 case BUILT_IN_FPRINTF_CHK:
2963 case BUILT_IN_VFPRINTF_CHK:
2964 if (n == 3 || n == 4)
2965 return gimple_fold_builtin_fprintf (gsi,
2966 gimple_call_arg (stmt, 0),
2967 gimple_call_arg (stmt, 2),
2968 n == 4
2969 ? gimple_call_arg (stmt, 3)
2970 : NULL_TREE,
2971 fcode);
2972 break;
2973 case BUILT_IN_PRINTF:
2974 case BUILT_IN_PRINTF_UNLOCKED:
2975 case BUILT_IN_VPRINTF:
2976 if (n == 1 || n == 2)
2977 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2978 n == 2
2979 ? gimple_call_arg (stmt, 1)
2980 : NULL_TREE, fcode);
2981 break;
2982 case BUILT_IN_PRINTF_CHK:
2983 case BUILT_IN_VPRINTF_CHK:
2984 if (n == 2 || n == 3)
2985 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2986 n == 3
2987 ? gimple_call_arg (stmt, 2)
2988 : NULL_TREE, fcode);
2989 default:;
2992 /* Try the generic builtin folder. */
2993 bool ignore = (gimple_call_lhs (stmt) == NULL);
2994 tree result = fold_call_stmt (stmt, ignore);
2995 if (result)
2997 if (ignore)
2998 STRIP_NOPS (result);
2999 else
3000 result = fold_convert (gimple_call_return_type (stmt), result);
3001 if (!update_call_from_tree (gsi, result))
3002 gimplify_and_update_call_from_tree (gsi, result);
3003 return true;
3006 return false;
3009 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3010 doesn't fit into TYPE. The test for overflow should be regardless of
3011 -fwrapv, and even for unsigned types. */
3013 bool
3014 arith_overflowed_p (enum tree_code code, const_tree type,
3015 const_tree arg0, const_tree arg1)
3017 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3018 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3019 widest2_int_cst;
3020 widest2_int warg0 = widest2_int_cst (arg0);
3021 widest2_int warg1 = widest2_int_cst (arg1);
3022 widest2_int wres;
3023 switch (code)
3025 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3026 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3027 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3028 default: gcc_unreachable ();
3030 signop sign = TYPE_SIGN (type);
3031 if (sign == UNSIGNED && wi::neg_p (wres))
3032 return true;
3033 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3036 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3037 The statement may be replaced by another statement, e.g., if the call
3038 simplifies to a constant value. Return true if any changes were made.
3039 It is assumed that the operands have been previously folded. */
3041 static bool
3042 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3044 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3045 tree callee;
3046 bool changed = false;
3047 unsigned i;
3049 /* Fold *& in call arguments. */
3050 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3051 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3053 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3054 if (tmp)
3056 gimple_call_set_arg (stmt, i, tmp);
3057 changed = true;
3061 /* Check for virtual calls that became direct calls. */
3062 callee = gimple_call_fn (stmt);
3063 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3065 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3067 if (dump_file && virtual_method_call_p (callee)
3068 && !possible_polymorphic_call_target_p
3069 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3070 (OBJ_TYPE_REF_EXPR (callee)))))
3072 fprintf (dump_file,
3073 "Type inheritance inconsistent devirtualization of ");
3074 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3075 fprintf (dump_file, " to ");
3076 print_generic_expr (dump_file, callee, TDF_SLIM);
3077 fprintf (dump_file, "\n");
3080 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3081 changed = true;
3083 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3085 bool final;
3086 vec <cgraph_node *>targets
3087 = possible_polymorphic_call_targets (callee, stmt, &final);
3088 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3090 tree lhs = gimple_call_lhs (stmt);
3091 if (dump_enabled_p ())
3093 location_t loc = gimple_location_safe (stmt);
3094 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3095 "folding virtual function call to %s\n",
3096 targets.length () == 1
3097 ? targets[0]->name ()
3098 : "__builtin_unreachable");
3100 if (targets.length () == 1)
3102 gimple_call_set_fndecl (stmt, targets[0]->decl);
3103 changed = true;
3104 /* If the call becomes noreturn, remove the lhs. */
3105 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3107 if (TREE_CODE (lhs) == SSA_NAME)
3109 tree var = create_tmp_var (TREE_TYPE (lhs));
3110 tree def = get_or_create_ssa_default_def (cfun, var);
3111 gimple new_stmt = gimple_build_assign (lhs, def);
3112 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3114 gimple_call_set_lhs (stmt, NULL_TREE);
3117 else
3119 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3120 gimple new_stmt = gimple_build_call (fndecl, 0);
3121 gimple_set_location (new_stmt, gimple_location (stmt));
3122 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3124 tree var = create_tmp_var (TREE_TYPE (lhs));
3125 tree def = get_or_create_ssa_default_def (cfun, var);
3127 /* To satisfy condition for
3128 cgraph_update_edges_for_call_stmt_node,
3129 we need to preserve GIMPLE_CALL statement
3130 at position of GSI iterator. */
3131 update_call_from_tree (gsi, def);
3132 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3134 else
3136 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3137 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3138 gsi_replace (gsi, new_stmt, false);
3140 return true;
3146 /* Check for indirect calls that became direct calls, and then
3147 no longer require a static chain. */
3148 if (gimple_call_chain (stmt))
3150 tree fn = gimple_call_fndecl (stmt);
3151 if (fn && !DECL_STATIC_CHAIN (fn))
3153 gimple_call_set_chain (stmt, NULL);
3154 changed = true;
3156 else
3158 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3159 if (tmp)
3161 gimple_call_set_chain (stmt, tmp);
3162 changed = true;
3167 if (inplace)
3168 return changed;
3170 /* Check for builtins that CCP can handle using information not
3171 available in the generic fold routines. */
3172 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3174 if (gimple_fold_builtin (gsi))
3175 changed = true;
3177 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3179 changed |= targetm.gimple_fold_builtin (gsi);
3181 else if (gimple_call_internal_p (stmt))
3183 enum tree_code subcode = ERROR_MARK;
3184 tree result = NULL_TREE;
3185 bool cplx_result = false;
3186 tree overflow = NULL_TREE;
3187 switch (gimple_call_internal_fn (stmt))
3189 case IFN_BUILTIN_EXPECT:
3190 result = fold_builtin_expect (gimple_location (stmt),
3191 gimple_call_arg (stmt, 0),
3192 gimple_call_arg (stmt, 1),
3193 gimple_call_arg (stmt, 2));
3194 break;
3195 case IFN_UBSAN_OBJECT_SIZE:
3196 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3197 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3198 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3199 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3200 gimple_call_arg (stmt, 2))))
3202 gsi_replace (gsi, gimple_build_nop (), true);
3203 unlink_stmt_vdef (stmt);
3204 release_defs (stmt);
3205 return true;
3207 break;
3208 case IFN_UBSAN_CHECK_ADD:
3209 subcode = PLUS_EXPR;
3210 break;
3211 case IFN_UBSAN_CHECK_SUB:
3212 subcode = MINUS_EXPR;
3213 break;
3214 case IFN_UBSAN_CHECK_MUL:
3215 subcode = MULT_EXPR;
3216 break;
3217 case IFN_ADD_OVERFLOW:
3218 subcode = PLUS_EXPR;
3219 cplx_result = true;
3220 break;
3221 case IFN_SUB_OVERFLOW:
3222 subcode = MINUS_EXPR;
3223 cplx_result = true;
3224 break;
3225 case IFN_MUL_OVERFLOW:
3226 subcode = MULT_EXPR;
3227 cplx_result = true;
3228 break;
3229 default:
3230 break;
3232 if (subcode != ERROR_MARK)
3234 tree arg0 = gimple_call_arg (stmt, 0);
3235 tree arg1 = gimple_call_arg (stmt, 1);
3236 tree type = TREE_TYPE (arg0);
3237 if (cplx_result)
3239 tree lhs = gimple_call_lhs (stmt);
3240 if (lhs == NULL_TREE)
3241 type = NULL_TREE;
3242 else
3243 type = TREE_TYPE (TREE_TYPE (lhs));
3245 if (type == NULL_TREE)
3247 /* x = y + 0; x = y - 0; x = y * 0; */
3248 else if (integer_zerop (arg1))
3249 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3250 /* x = 0 + y; x = 0 * y; */
3251 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3252 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3253 /* x = y - y; */
3254 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3255 result = integer_zero_node;
3256 /* x = y * 1; x = 1 * y; */
3257 else if (subcode == MULT_EXPR && integer_onep (arg1))
3258 result = arg0;
3259 else if (subcode == MULT_EXPR && integer_onep (arg0))
3260 result = arg1;
3261 else if (TREE_CODE (arg0) == INTEGER_CST
3262 && TREE_CODE (arg1) == INTEGER_CST)
3264 if (cplx_result)
3265 result = int_const_binop (subcode, fold_convert (type, arg0),
3266 fold_convert (type, arg1));
3267 else
3268 result = int_const_binop (subcode, arg0, arg1);
3269 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3271 if (cplx_result)
3272 overflow = build_one_cst (type);
3273 else
3274 result = NULL_TREE;
3277 if (result)
3279 if (result == integer_zero_node)
3280 result = build_zero_cst (type);
3281 else if (cplx_result && TREE_TYPE (result) != type)
3283 if (TREE_CODE (result) == INTEGER_CST)
3285 if (arith_overflowed_p (PLUS_EXPR, type, result,
3286 integer_zero_node))
3287 overflow = build_one_cst (type);
3289 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3290 && TYPE_UNSIGNED (type))
3291 || (TYPE_PRECISION (type)
3292 < (TYPE_PRECISION (TREE_TYPE (result))
3293 + (TYPE_UNSIGNED (TREE_TYPE (result))
3294 && !TYPE_UNSIGNED (type)))))
3295 result = NULL_TREE;
3296 if (result)
3297 result = fold_convert (type, result);
3302 if (result)
3304 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3305 result = drop_tree_overflow (result);
3306 if (cplx_result)
3308 if (overflow == NULL_TREE)
3309 overflow = build_zero_cst (TREE_TYPE (result));
3310 tree ctype = build_complex_type (TREE_TYPE (result));
3311 if (TREE_CODE (result) == INTEGER_CST
3312 && TREE_CODE (overflow) == INTEGER_CST)
3313 result = build_complex (ctype, result, overflow);
3314 else
3315 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3316 ctype, result, overflow);
3318 if (!update_call_from_tree (gsi, result))
3319 gimplify_and_update_call_from_tree (gsi, result);
3320 changed = true;
3324 return changed;
3328 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3329 gimple_simplify.
3331 Replaces *GSI with the simplification result in RCODE and OPS
3332 and the associated statements in *SEQ. Does the replacement
3333 according to INPLACE and returns true if the operation succeeded. */
3335 static bool
3336 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3337 code_helper rcode, tree *ops,
3338 gimple_seq *seq, bool inplace)
3340 gimple stmt = gsi_stmt (*gsi);
3342 /* Play safe and do not allow abnormals to be mentioned in
3343 newly created statements. See also maybe_push_res_to_seq. */
3344 if ((TREE_CODE (ops[0]) == SSA_NAME
3345 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3346 || (ops[1]
3347 && TREE_CODE (ops[1]) == SSA_NAME
3348 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3349 || (ops[2]
3350 && TREE_CODE (ops[2]) == SSA_NAME
3351 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3352 return false;
3354 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3356 gcc_assert (rcode.is_tree_code ());
3357 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3358 /* GIMPLE_CONDs condition may not throw. */
3359 && (!flag_exceptions
3360 || !cfun->can_throw_non_call_exceptions
3361 || !operation_could_trap_p (rcode,
3362 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3363 false, NULL_TREE)))
3364 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3365 else if (rcode == SSA_NAME)
3366 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3367 build_zero_cst (TREE_TYPE (ops[0])));
3368 else if (rcode == INTEGER_CST)
3370 if (integer_zerop (ops[0]))
3371 gimple_cond_make_false (cond_stmt);
3372 else
3373 gimple_cond_make_true (cond_stmt);
3375 else if (!inplace)
3377 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3378 ops, seq);
3379 if (!res)
3380 return false;
3381 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3382 build_zero_cst (TREE_TYPE (res)));
3384 else
3385 return false;
3386 if (dump_file && (dump_flags & TDF_DETAILS))
3388 fprintf (dump_file, "gimple_simplified to ");
3389 if (!gimple_seq_empty_p (*seq))
3390 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3391 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3392 0, TDF_SLIM);
3394 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3395 return true;
3397 else if (is_gimple_assign (stmt)
3398 && rcode.is_tree_code ())
3400 if (!inplace
3401 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3403 maybe_build_generic_op (rcode,
3404 TREE_TYPE (gimple_assign_lhs (stmt)),
3405 &ops[0], ops[1], ops[2]);
3406 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3407 if (dump_file && (dump_flags & TDF_DETAILS))
3409 fprintf (dump_file, "gimple_simplified to ");
3410 if (!gimple_seq_empty_p (*seq))
3411 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3412 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3413 0, TDF_SLIM);
3415 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3416 return true;
3419 else if (!inplace)
3421 if (gimple_has_lhs (stmt))
3423 tree lhs = gimple_get_lhs (stmt);
3424 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3425 ops, seq, lhs))
3426 return false;
3427 if (dump_file && (dump_flags & TDF_DETAILS))
3429 fprintf (dump_file, "gimple_simplified to ");
3430 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3432 gsi_replace_with_seq_vops (gsi, *seq);
3433 return true;
3435 else
3436 gcc_unreachable ();
3439 return false;
3442 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3444 static bool
3445 maybe_canonicalize_mem_ref_addr (tree *t)
3447 bool res = false;
3449 if (TREE_CODE (*t) == ADDR_EXPR)
3450 t = &TREE_OPERAND (*t, 0);
3452 while (handled_component_p (*t))
3453 t = &TREE_OPERAND (*t, 0);
3455 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3456 of invariant addresses into a SSA name MEM_REF address. */
3457 if (TREE_CODE (*t) == MEM_REF
3458 || TREE_CODE (*t) == TARGET_MEM_REF)
3460 tree addr = TREE_OPERAND (*t, 0);
3461 if (TREE_CODE (addr) == ADDR_EXPR
3462 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3463 || handled_component_p (TREE_OPERAND (addr, 0))))
3465 tree base;
3466 HOST_WIDE_INT coffset;
3467 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3468 &coffset);
3469 if (!base)
3470 gcc_unreachable ();
3472 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3473 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3474 TREE_OPERAND (*t, 1),
3475 size_int (coffset));
3476 res = true;
3478 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3479 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3482 /* Canonicalize back MEM_REFs to plain reference trees if the object
3483 accessed is a decl that has the same access semantics as the MEM_REF. */
3484 if (TREE_CODE (*t) == MEM_REF
3485 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3486 && integer_zerop (TREE_OPERAND (*t, 1))
3487 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3489 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3490 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3491 if (/* Same volatile qualification. */
3492 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3493 /* Same TBAA behavior with -fstrict-aliasing. */
3494 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3495 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3496 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3497 /* Same alignment. */
3498 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3499 /* We have to look out here to not drop a required conversion
3500 from the rhs to the lhs if *t appears on the lhs or vice-versa
3501 if it appears on the rhs. Thus require strict type
3502 compatibility. */
3503 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3505 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3506 res = true;
3510 /* Canonicalize TARGET_MEM_REF in particular with respect to
3511 the indexes becoming constant. */
3512 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3514 tree tem = maybe_fold_tmr (*t);
3515 if (tem)
3517 *t = tem;
3518 res = true;
3522 return res;
3525 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3526 distinguishes both cases. */
3528 static bool
3529 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3531 bool changed = false;
3532 gimple stmt = gsi_stmt (*gsi);
3533 unsigned i;
3535 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3536 after propagation.
3537 ??? This shouldn't be done in generic folding but in the
3538 propagation helpers which also know whether an address was
3539 propagated. */
3540 switch (gimple_code (stmt))
3542 case GIMPLE_ASSIGN:
3543 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3545 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3546 if ((REFERENCE_CLASS_P (*rhs)
3547 || TREE_CODE (*rhs) == ADDR_EXPR)
3548 && maybe_canonicalize_mem_ref_addr (rhs))
3549 changed = true;
3550 tree *lhs = gimple_assign_lhs_ptr (stmt);
3551 if (REFERENCE_CLASS_P (*lhs)
3552 && maybe_canonicalize_mem_ref_addr (lhs))
3553 changed = true;
3555 break;
3556 case GIMPLE_CALL:
3558 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3560 tree *arg = gimple_call_arg_ptr (stmt, i);
3561 if (REFERENCE_CLASS_P (*arg)
3562 && maybe_canonicalize_mem_ref_addr (arg))
3563 changed = true;
3565 tree *lhs = gimple_call_lhs_ptr (stmt);
3566 if (*lhs
3567 && REFERENCE_CLASS_P (*lhs)
3568 && maybe_canonicalize_mem_ref_addr (lhs))
3569 changed = true;
3570 break;
3572 case GIMPLE_ASM:
3574 gasm *asm_stmt = as_a <gasm *> (stmt);
3575 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3577 tree link = gimple_asm_output_op (asm_stmt, i);
3578 tree op = TREE_VALUE (link);
3579 if (REFERENCE_CLASS_P (op)
3580 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3581 changed = true;
3583 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3585 tree link = gimple_asm_input_op (asm_stmt, i);
3586 tree op = TREE_VALUE (link);
3587 if ((REFERENCE_CLASS_P (op)
3588 || TREE_CODE (op) == ADDR_EXPR)
3589 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3590 changed = true;
3593 break;
3594 case GIMPLE_DEBUG:
3595 if (gimple_debug_bind_p (stmt))
3597 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3598 if (*val
3599 && (REFERENCE_CLASS_P (*val)
3600 || TREE_CODE (*val) == ADDR_EXPR)
3601 && maybe_canonicalize_mem_ref_addr (val))
3602 changed = true;
3604 break;
3605 default:;
3608 /* Dispatch to pattern-based folding. */
3609 if (!inplace
3610 || is_gimple_assign (stmt)
3611 || gimple_code (stmt) == GIMPLE_COND)
3613 gimple_seq seq = NULL;
3614 code_helper rcode;
3615 tree ops[3] = {};
3616 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3618 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3619 changed = true;
3620 else
3621 gimple_seq_discard (seq);
3625 stmt = gsi_stmt (*gsi);
3627 /* Fold the main computation performed by the statement. */
3628 switch (gimple_code (stmt))
3630 case GIMPLE_ASSIGN:
3632 unsigned old_num_ops = gimple_num_ops (stmt);
3633 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3634 tree lhs = gimple_assign_lhs (stmt);
3635 tree new_rhs;
3636 /* First canonicalize operand order. This avoids building new
3637 trees if this is the only thing fold would later do. */
3638 if ((commutative_tree_code (subcode)
3639 || commutative_ternary_tree_code (subcode))
3640 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3641 gimple_assign_rhs2 (stmt), false))
3643 tree tem = gimple_assign_rhs1 (stmt);
3644 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3645 gimple_assign_set_rhs2 (stmt, tem);
3646 changed = true;
3648 new_rhs = fold_gimple_assign (gsi);
3649 if (new_rhs
3650 && !useless_type_conversion_p (TREE_TYPE (lhs),
3651 TREE_TYPE (new_rhs)))
3652 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3653 if (new_rhs
3654 && (!inplace
3655 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3657 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3658 changed = true;
3660 break;
3663 case GIMPLE_COND:
3664 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
3665 break;
3667 case GIMPLE_CALL:
3668 changed |= gimple_fold_call (gsi, inplace);
3669 break;
3671 case GIMPLE_ASM:
3672 /* Fold *& in asm operands. */
3674 gasm *asm_stmt = as_a <gasm *> (stmt);
3675 size_t noutputs;
3676 const char **oconstraints;
3677 const char *constraint;
3678 bool allows_mem, allows_reg;
3680 noutputs = gimple_asm_noutputs (asm_stmt);
3681 oconstraints = XALLOCAVEC (const char *, noutputs);
3683 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3685 tree link = gimple_asm_output_op (asm_stmt, i);
3686 tree op = TREE_VALUE (link);
3687 oconstraints[i]
3688 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3689 if (REFERENCE_CLASS_P (op)
3690 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3692 TREE_VALUE (link) = op;
3693 changed = true;
3696 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3698 tree link = gimple_asm_input_op (asm_stmt, i);
3699 tree op = TREE_VALUE (link);
3700 constraint
3701 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3702 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3703 oconstraints, &allows_mem, &allows_reg);
3704 if (REFERENCE_CLASS_P (op)
3705 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3706 != NULL_TREE)
3708 TREE_VALUE (link) = op;
3709 changed = true;
3713 break;
3715 case GIMPLE_DEBUG:
3716 if (gimple_debug_bind_p (stmt))
3718 tree val = gimple_debug_bind_get_value (stmt);
3719 if (val
3720 && REFERENCE_CLASS_P (val))
3722 tree tem = maybe_fold_reference (val, false);
3723 if (tem)
3725 gimple_debug_bind_set_value (stmt, tem);
3726 changed = true;
3729 else if (val
3730 && TREE_CODE (val) == ADDR_EXPR)
3732 tree ref = TREE_OPERAND (val, 0);
3733 tree tem = maybe_fold_reference (ref, false);
3734 if (tem)
3736 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3737 gimple_debug_bind_set_value (stmt, tem);
3738 changed = true;
3742 break;
3744 default:;
3747 stmt = gsi_stmt (*gsi);
3749 /* Fold *& on the lhs. */
3750 if (gimple_has_lhs (stmt))
3752 tree lhs = gimple_get_lhs (stmt);
3753 if (lhs && REFERENCE_CLASS_P (lhs))
3755 tree new_lhs = maybe_fold_reference (lhs, true);
3756 if (new_lhs)
3758 gimple_set_lhs (stmt, new_lhs);
3759 changed = true;
3764 return changed;
3767 /* Valueziation callback that ends up not following SSA edges. */
3769 tree
3770 no_follow_ssa_edges (tree)
3772 return NULL_TREE;
3775 /* Valueization callback that ends up following single-use SSA edges only. */
3777 tree
3778 follow_single_use_edges (tree val)
3780 if (TREE_CODE (val) == SSA_NAME
3781 && !has_single_use (val))
3782 return NULL_TREE;
3783 return val;
3786 /* Fold the statement pointed to by GSI. In some cases, this function may
3787 replace the whole statement with a new one. Returns true iff folding
3788 makes any changes.
3789 The statement pointed to by GSI should be in valid gimple form but may
3790 be in unfolded state as resulting from for example constant propagation
3791 which can produce *&x = 0. */
3793 bool
3794 fold_stmt (gimple_stmt_iterator *gsi)
3796 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3799 bool
3800 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3802 return fold_stmt_1 (gsi, false, valueize);
3805 /* Perform the minimal folding on statement *GSI. Only operations like
3806 *&x created by constant propagation are handled. The statement cannot
3807 be replaced with a new one. Return true if the statement was
3808 changed, false otherwise.
3809 The statement *GSI should be in valid gimple form but may
3810 be in unfolded state as resulting from for example constant propagation
3811 which can produce *&x = 0. */
3813 bool
3814 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3816 gimple stmt = gsi_stmt (*gsi);
3817 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3818 gcc_assert (gsi_stmt (*gsi) == stmt);
3819 return changed;
3822 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3823 if EXPR is null or we don't know how.
3824 If non-null, the result always has boolean type. */
3826 static tree
3827 canonicalize_bool (tree expr, bool invert)
3829 if (!expr)
3830 return NULL_TREE;
3831 else if (invert)
3833 if (integer_nonzerop (expr))
3834 return boolean_false_node;
3835 else if (integer_zerop (expr))
3836 return boolean_true_node;
3837 else if (TREE_CODE (expr) == SSA_NAME)
3838 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3839 build_int_cst (TREE_TYPE (expr), 0));
3840 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3841 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3842 boolean_type_node,
3843 TREE_OPERAND (expr, 0),
3844 TREE_OPERAND (expr, 1));
3845 else
3846 return NULL_TREE;
3848 else
3850 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3851 return expr;
3852 if (integer_nonzerop (expr))
3853 return boolean_true_node;
3854 else if (integer_zerop (expr))
3855 return boolean_false_node;
3856 else if (TREE_CODE (expr) == SSA_NAME)
3857 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3858 build_int_cst (TREE_TYPE (expr), 0));
3859 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3860 return fold_build2 (TREE_CODE (expr),
3861 boolean_type_node,
3862 TREE_OPERAND (expr, 0),
3863 TREE_OPERAND (expr, 1));
3864 else
3865 return NULL_TREE;
3869 /* Check to see if a boolean expression EXPR is logically equivalent to the
3870 comparison (OP1 CODE OP2). Check for various identities involving
3871 SSA_NAMEs. */
3873 static bool
3874 same_bool_comparison_p (const_tree expr, enum tree_code code,
3875 const_tree op1, const_tree op2)
3877 gimple s;
3879 /* The obvious case. */
3880 if (TREE_CODE (expr) == code
3881 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3882 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3883 return true;
3885 /* Check for comparing (name, name != 0) and the case where expr
3886 is an SSA_NAME with a definition matching the comparison. */
3887 if (TREE_CODE (expr) == SSA_NAME
3888 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3890 if (operand_equal_p (expr, op1, 0))
3891 return ((code == NE_EXPR && integer_zerop (op2))
3892 || (code == EQ_EXPR && integer_nonzerop (op2)));
3893 s = SSA_NAME_DEF_STMT (expr);
3894 if (is_gimple_assign (s)
3895 && gimple_assign_rhs_code (s) == code
3896 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3897 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3898 return true;
3901 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3902 of name is a comparison, recurse. */
3903 if (TREE_CODE (op1) == SSA_NAME
3904 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3906 s = SSA_NAME_DEF_STMT (op1);
3907 if (is_gimple_assign (s)
3908 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3910 enum tree_code c = gimple_assign_rhs_code (s);
3911 if ((c == NE_EXPR && integer_zerop (op2))
3912 || (c == EQ_EXPR && integer_nonzerop (op2)))
3913 return same_bool_comparison_p (expr, c,
3914 gimple_assign_rhs1 (s),
3915 gimple_assign_rhs2 (s));
3916 if ((c == EQ_EXPR && integer_zerop (op2))
3917 || (c == NE_EXPR && integer_nonzerop (op2)))
3918 return same_bool_comparison_p (expr,
3919 invert_tree_comparison (c, false),
3920 gimple_assign_rhs1 (s),
3921 gimple_assign_rhs2 (s));
3924 return false;
3927 /* Check to see if two boolean expressions OP1 and OP2 are logically
3928 equivalent. */
3930 static bool
3931 same_bool_result_p (const_tree op1, const_tree op2)
3933 /* Simple cases first. */
3934 if (operand_equal_p (op1, op2, 0))
3935 return true;
3937 /* Check the cases where at least one of the operands is a comparison.
3938 These are a bit smarter than operand_equal_p in that they apply some
3939 identifies on SSA_NAMEs. */
3940 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3941 && same_bool_comparison_p (op1, TREE_CODE (op2),
3942 TREE_OPERAND (op2, 0),
3943 TREE_OPERAND (op2, 1)))
3944 return true;
3945 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3946 && same_bool_comparison_p (op2, TREE_CODE (op1),
3947 TREE_OPERAND (op1, 0),
3948 TREE_OPERAND (op1, 1)))
3949 return true;
3951 /* Default case. */
3952 return false;
3955 /* Forward declarations for some mutually recursive functions. */
3957 static tree
3958 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3959 enum tree_code code2, tree op2a, tree op2b);
3960 static tree
3961 and_var_with_comparison (tree var, bool invert,
3962 enum tree_code code2, tree op2a, tree op2b);
3963 static tree
3964 and_var_with_comparison_1 (gimple stmt,
3965 enum tree_code code2, tree op2a, tree op2b);
3966 static tree
3967 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3968 enum tree_code code2, tree op2a, tree op2b);
3969 static tree
3970 or_var_with_comparison (tree var, bool invert,
3971 enum tree_code code2, tree op2a, tree op2b);
3972 static tree
3973 or_var_with_comparison_1 (gimple stmt,
3974 enum tree_code code2, tree op2a, tree op2b);
3976 /* Helper function for and_comparisons_1: try to simplify the AND of the
3977 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3978 If INVERT is true, invert the value of the VAR before doing the AND.
3979 Return NULL_EXPR if we can't simplify this to a single expression. */
3981 static tree
3982 and_var_with_comparison (tree var, bool invert,
3983 enum tree_code code2, tree op2a, tree op2b)
3985 tree t;
3986 gimple stmt = SSA_NAME_DEF_STMT (var);
3988 /* We can only deal with variables whose definitions are assignments. */
3989 if (!is_gimple_assign (stmt))
3990 return NULL_TREE;
3992 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3993 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3994 Then we only have to consider the simpler non-inverted cases. */
3995 if (invert)
3996 t = or_var_with_comparison_1 (stmt,
3997 invert_tree_comparison (code2, false),
3998 op2a, op2b);
3999 else
4000 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4001 return canonicalize_bool (t, invert);
4004 /* Try to simplify the AND of the ssa variable defined by the assignment
4005 STMT with the comparison specified by (OP2A CODE2 OP2B).
4006 Return NULL_EXPR if we can't simplify this to a single expression. */
4008 static tree
4009 and_var_with_comparison_1 (gimple stmt,
4010 enum tree_code code2, tree op2a, tree op2b)
4012 tree var = gimple_assign_lhs (stmt);
4013 tree true_test_var = NULL_TREE;
4014 tree false_test_var = NULL_TREE;
4015 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4017 /* Check for identities like (var AND (var == 0)) => false. */
4018 if (TREE_CODE (op2a) == SSA_NAME
4019 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4021 if ((code2 == NE_EXPR && integer_zerop (op2b))
4022 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4024 true_test_var = op2a;
4025 if (var == true_test_var)
4026 return var;
4028 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4029 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4031 false_test_var = op2a;
4032 if (var == false_test_var)
4033 return boolean_false_node;
4037 /* If the definition is a comparison, recurse on it. */
4038 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4040 tree t = and_comparisons_1 (innercode,
4041 gimple_assign_rhs1 (stmt),
4042 gimple_assign_rhs2 (stmt),
4043 code2,
4044 op2a,
4045 op2b);
4046 if (t)
4047 return t;
4050 /* If the definition is an AND or OR expression, we may be able to
4051 simplify by reassociating. */
4052 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4053 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4055 tree inner1 = gimple_assign_rhs1 (stmt);
4056 tree inner2 = gimple_assign_rhs2 (stmt);
4057 gimple s;
4058 tree t;
4059 tree partial = NULL_TREE;
4060 bool is_and = (innercode == BIT_AND_EXPR);
4062 /* Check for boolean identities that don't require recursive examination
4063 of inner1/inner2:
4064 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4065 inner1 AND (inner1 OR inner2) => inner1
4066 !inner1 AND (inner1 AND inner2) => false
4067 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4068 Likewise for similar cases involving inner2. */
4069 if (inner1 == true_test_var)
4070 return (is_and ? var : inner1);
4071 else if (inner2 == true_test_var)
4072 return (is_and ? var : inner2);
4073 else if (inner1 == false_test_var)
4074 return (is_and
4075 ? boolean_false_node
4076 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4077 else if (inner2 == false_test_var)
4078 return (is_and
4079 ? boolean_false_node
4080 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4082 /* Next, redistribute/reassociate the AND across the inner tests.
4083 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4084 if (TREE_CODE (inner1) == SSA_NAME
4085 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4086 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4087 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4088 gimple_assign_rhs1 (s),
4089 gimple_assign_rhs2 (s),
4090 code2, op2a, op2b)))
4092 /* Handle the AND case, where we are reassociating:
4093 (inner1 AND inner2) AND (op2a code2 op2b)
4094 => (t AND inner2)
4095 If the partial result t is a constant, we win. Otherwise
4096 continue on to try reassociating with the other inner test. */
4097 if (is_and)
4099 if (integer_onep (t))
4100 return inner2;
4101 else if (integer_zerop (t))
4102 return boolean_false_node;
4105 /* Handle the OR case, where we are redistributing:
4106 (inner1 OR inner2) AND (op2a code2 op2b)
4107 => (t OR (inner2 AND (op2a code2 op2b))) */
4108 else if (integer_onep (t))
4109 return boolean_true_node;
4111 /* Save partial result for later. */
4112 partial = t;
4115 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4116 if (TREE_CODE (inner2) == SSA_NAME
4117 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4118 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4119 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4120 gimple_assign_rhs1 (s),
4121 gimple_assign_rhs2 (s),
4122 code2, op2a, op2b)))
4124 /* Handle the AND case, where we are reassociating:
4125 (inner1 AND inner2) AND (op2a code2 op2b)
4126 => (inner1 AND t) */
4127 if (is_and)
4129 if (integer_onep (t))
4130 return inner1;
4131 else if (integer_zerop (t))
4132 return boolean_false_node;
4133 /* If both are the same, we can apply the identity
4134 (x AND x) == x. */
4135 else if (partial && same_bool_result_p (t, partial))
4136 return t;
4139 /* Handle the OR case. where we are redistributing:
4140 (inner1 OR inner2) AND (op2a code2 op2b)
4141 => (t OR (inner1 AND (op2a code2 op2b)))
4142 => (t OR partial) */
4143 else
4145 if (integer_onep (t))
4146 return boolean_true_node;
4147 else if (partial)
4149 /* We already got a simplification for the other
4150 operand to the redistributed OR expression. The
4151 interesting case is when at least one is false.
4152 Or, if both are the same, we can apply the identity
4153 (x OR x) == x. */
4154 if (integer_zerop (partial))
4155 return t;
4156 else if (integer_zerop (t))
4157 return partial;
4158 else if (same_bool_result_p (t, partial))
4159 return t;
4164 return NULL_TREE;
4167 /* Try to simplify the AND of two comparisons defined by
4168 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4169 If this can be done without constructing an intermediate value,
4170 return the resulting tree; otherwise NULL_TREE is returned.
4171 This function is deliberately asymmetric as it recurses on SSA_DEFs
4172 in the first comparison but not the second. */
4174 static tree
4175 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4176 enum tree_code code2, tree op2a, tree op2b)
4178 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4180 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4181 if (operand_equal_p (op1a, op2a, 0)
4182 && operand_equal_p (op1b, op2b, 0))
4184 /* Result will be either NULL_TREE, or a combined comparison. */
4185 tree t = combine_comparisons (UNKNOWN_LOCATION,
4186 TRUTH_ANDIF_EXPR, code1, code2,
4187 truth_type, op1a, op1b);
4188 if (t)
4189 return t;
4192 /* Likewise the swapped case of the above. */
4193 if (operand_equal_p (op1a, op2b, 0)
4194 && operand_equal_p (op1b, op2a, 0))
4196 /* Result will be either NULL_TREE, or a combined comparison. */
4197 tree t = combine_comparisons (UNKNOWN_LOCATION,
4198 TRUTH_ANDIF_EXPR, code1,
4199 swap_tree_comparison (code2),
4200 truth_type, op1a, op1b);
4201 if (t)
4202 return t;
4205 /* If both comparisons are of the same value against constants, we might
4206 be able to merge them. */
4207 if (operand_equal_p (op1a, op2a, 0)
4208 && TREE_CODE (op1b) == INTEGER_CST
4209 && TREE_CODE (op2b) == INTEGER_CST)
4211 int cmp = tree_int_cst_compare (op1b, op2b);
4213 /* If we have (op1a == op1b), we should either be able to
4214 return that or FALSE, depending on whether the constant op1b
4215 also satisfies the other comparison against op2b. */
4216 if (code1 == EQ_EXPR)
4218 bool done = true;
4219 bool val;
4220 switch (code2)
4222 case EQ_EXPR: val = (cmp == 0); break;
4223 case NE_EXPR: val = (cmp != 0); break;
4224 case LT_EXPR: val = (cmp < 0); break;
4225 case GT_EXPR: val = (cmp > 0); break;
4226 case LE_EXPR: val = (cmp <= 0); break;
4227 case GE_EXPR: val = (cmp >= 0); break;
4228 default: done = false;
4230 if (done)
4232 if (val)
4233 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4234 else
4235 return boolean_false_node;
4238 /* Likewise if the second comparison is an == comparison. */
4239 else if (code2 == EQ_EXPR)
4241 bool done = true;
4242 bool val;
4243 switch (code1)
4245 case EQ_EXPR: val = (cmp == 0); break;
4246 case NE_EXPR: val = (cmp != 0); break;
4247 case LT_EXPR: val = (cmp > 0); break;
4248 case GT_EXPR: val = (cmp < 0); break;
4249 case LE_EXPR: val = (cmp >= 0); break;
4250 case GE_EXPR: val = (cmp <= 0); break;
4251 default: done = false;
4253 if (done)
4255 if (val)
4256 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4257 else
4258 return boolean_false_node;
4262 /* Same business with inequality tests. */
4263 else if (code1 == NE_EXPR)
4265 bool val;
4266 switch (code2)
4268 case EQ_EXPR: val = (cmp != 0); break;
4269 case NE_EXPR: val = (cmp == 0); break;
4270 case LT_EXPR: val = (cmp >= 0); break;
4271 case GT_EXPR: val = (cmp <= 0); break;
4272 case LE_EXPR: val = (cmp > 0); break;
4273 case GE_EXPR: val = (cmp < 0); break;
4274 default:
4275 val = false;
4277 if (val)
4278 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4280 else if (code2 == NE_EXPR)
4282 bool val;
4283 switch (code1)
4285 case EQ_EXPR: val = (cmp == 0); break;
4286 case NE_EXPR: val = (cmp != 0); break;
4287 case LT_EXPR: val = (cmp <= 0); break;
4288 case GT_EXPR: val = (cmp >= 0); break;
4289 case LE_EXPR: val = (cmp < 0); break;
4290 case GE_EXPR: val = (cmp > 0); break;
4291 default:
4292 val = false;
4294 if (val)
4295 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4298 /* Chose the more restrictive of two < or <= comparisons. */
4299 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4300 && (code2 == LT_EXPR || code2 == LE_EXPR))
4302 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4303 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4304 else
4305 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4308 /* Likewise chose the more restrictive of two > or >= comparisons. */
4309 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4310 && (code2 == GT_EXPR || code2 == GE_EXPR))
4312 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4313 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4314 else
4315 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4318 /* Check for singleton ranges. */
4319 else if (cmp == 0
4320 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4321 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4322 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4324 /* Check for disjoint ranges. */
4325 else if (cmp <= 0
4326 && (code1 == LT_EXPR || code1 == LE_EXPR)
4327 && (code2 == GT_EXPR || code2 == GE_EXPR))
4328 return boolean_false_node;
4329 else if (cmp >= 0
4330 && (code1 == GT_EXPR || code1 == GE_EXPR)
4331 && (code2 == LT_EXPR || code2 == LE_EXPR))
4332 return boolean_false_node;
4335 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4336 NAME's definition is a truth value. See if there are any simplifications
4337 that can be done against the NAME's definition. */
4338 if (TREE_CODE (op1a) == SSA_NAME
4339 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4340 && (integer_zerop (op1b) || integer_onep (op1b)))
4342 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4343 || (code1 == NE_EXPR && integer_onep (op1b)));
4344 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4345 switch (gimple_code (stmt))
4347 case GIMPLE_ASSIGN:
4348 /* Try to simplify by copy-propagating the definition. */
4349 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4351 case GIMPLE_PHI:
4352 /* If every argument to the PHI produces the same result when
4353 ANDed with the second comparison, we win.
4354 Do not do this unless the type is bool since we need a bool
4355 result here anyway. */
4356 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4358 tree result = NULL_TREE;
4359 unsigned i;
4360 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4362 tree arg = gimple_phi_arg_def (stmt, i);
4364 /* If this PHI has itself as an argument, ignore it.
4365 If all the other args produce the same result,
4366 we're still OK. */
4367 if (arg == gimple_phi_result (stmt))
4368 continue;
4369 else if (TREE_CODE (arg) == INTEGER_CST)
4371 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4373 if (!result)
4374 result = boolean_false_node;
4375 else if (!integer_zerop (result))
4376 return NULL_TREE;
4378 else if (!result)
4379 result = fold_build2 (code2, boolean_type_node,
4380 op2a, op2b);
4381 else if (!same_bool_comparison_p (result,
4382 code2, op2a, op2b))
4383 return NULL_TREE;
4385 else if (TREE_CODE (arg) == SSA_NAME
4386 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4388 tree temp;
4389 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4390 /* In simple cases we can look through PHI nodes,
4391 but we have to be careful with loops.
4392 See PR49073. */
4393 if (! dom_info_available_p (CDI_DOMINATORS)
4394 || gimple_bb (def_stmt) == gimple_bb (stmt)
4395 || dominated_by_p (CDI_DOMINATORS,
4396 gimple_bb (def_stmt),
4397 gimple_bb (stmt)))
4398 return NULL_TREE;
4399 temp = and_var_with_comparison (arg, invert, code2,
4400 op2a, op2b);
4401 if (!temp)
4402 return NULL_TREE;
4403 else if (!result)
4404 result = temp;
4405 else if (!same_bool_result_p (result, temp))
4406 return NULL_TREE;
4408 else
4409 return NULL_TREE;
4411 return result;
4414 default:
4415 break;
4418 return NULL_TREE;
4421 /* Try to simplify the AND of two comparisons, specified by
4422 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4423 If this can be simplified to a single expression (without requiring
4424 introducing more SSA variables to hold intermediate values),
4425 return the resulting tree. Otherwise return NULL_TREE.
4426 If the result expression is non-null, it has boolean type. */
4428 tree
4429 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4430 enum tree_code code2, tree op2a, tree op2b)
4432 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4433 if (t)
4434 return t;
4435 else
4436 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4439 /* Helper function for or_comparisons_1: try to simplify the OR of the
4440 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4441 If INVERT is true, invert the value of VAR before doing the OR.
4442 Return NULL_EXPR if we can't simplify this to a single expression. */
4444 static tree
4445 or_var_with_comparison (tree var, bool invert,
4446 enum tree_code code2, tree op2a, tree op2b)
4448 tree t;
4449 gimple stmt = SSA_NAME_DEF_STMT (var);
4451 /* We can only deal with variables whose definitions are assignments. */
4452 if (!is_gimple_assign (stmt))
4453 return NULL_TREE;
4455 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4456 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4457 Then we only have to consider the simpler non-inverted cases. */
4458 if (invert)
4459 t = and_var_with_comparison_1 (stmt,
4460 invert_tree_comparison (code2, false),
4461 op2a, op2b);
4462 else
4463 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4464 return canonicalize_bool (t, invert);
4467 /* Try to simplify the OR of the ssa variable defined by the assignment
4468 STMT with the comparison specified by (OP2A CODE2 OP2B).
4469 Return NULL_EXPR if we can't simplify this to a single expression. */
4471 static tree
4472 or_var_with_comparison_1 (gimple stmt,
4473 enum tree_code code2, tree op2a, tree op2b)
4475 tree var = gimple_assign_lhs (stmt);
4476 tree true_test_var = NULL_TREE;
4477 tree false_test_var = NULL_TREE;
4478 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4480 /* Check for identities like (var OR (var != 0)) => true . */
4481 if (TREE_CODE (op2a) == SSA_NAME
4482 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4484 if ((code2 == NE_EXPR && integer_zerop (op2b))
4485 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4487 true_test_var = op2a;
4488 if (var == true_test_var)
4489 return var;
4491 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4492 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4494 false_test_var = op2a;
4495 if (var == false_test_var)
4496 return boolean_true_node;
4500 /* If the definition is a comparison, recurse on it. */
4501 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4503 tree t = or_comparisons_1 (innercode,
4504 gimple_assign_rhs1 (stmt),
4505 gimple_assign_rhs2 (stmt),
4506 code2,
4507 op2a,
4508 op2b);
4509 if (t)
4510 return t;
4513 /* If the definition is an AND or OR expression, we may be able to
4514 simplify by reassociating. */
4515 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4516 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4518 tree inner1 = gimple_assign_rhs1 (stmt);
4519 tree inner2 = gimple_assign_rhs2 (stmt);
4520 gimple s;
4521 tree t;
4522 tree partial = NULL_TREE;
4523 bool is_or = (innercode == BIT_IOR_EXPR);
4525 /* Check for boolean identities that don't require recursive examination
4526 of inner1/inner2:
4527 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4528 inner1 OR (inner1 AND inner2) => inner1
4529 !inner1 OR (inner1 OR inner2) => true
4530 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4532 if (inner1 == true_test_var)
4533 return (is_or ? var : inner1);
4534 else if (inner2 == true_test_var)
4535 return (is_or ? var : inner2);
4536 else if (inner1 == false_test_var)
4537 return (is_or
4538 ? boolean_true_node
4539 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4540 else if (inner2 == false_test_var)
4541 return (is_or
4542 ? boolean_true_node
4543 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4545 /* Next, redistribute/reassociate the OR across the inner tests.
4546 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4547 if (TREE_CODE (inner1) == SSA_NAME
4548 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4549 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4550 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4551 gimple_assign_rhs1 (s),
4552 gimple_assign_rhs2 (s),
4553 code2, op2a, op2b)))
4555 /* Handle the OR case, where we are reassociating:
4556 (inner1 OR inner2) OR (op2a code2 op2b)
4557 => (t OR inner2)
4558 If the partial result t is a constant, we win. Otherwise
4559 continue on to try reassociating with the other inner test. */
4560 if (is_or)
4562 if (integer_onep (t))
4563 return boolean_true_node;
4564 else if (integer_zerop (t))
4565 return inner2;
4568 /* Handle the AND case, where we are redistributing:
4569 (inner1 AND inner2) OR (op2a code2 op2b)
4570 => (t AND (inner2 OR (op2a code op2b))) */
4571 else if (integer_zerop (t))
4572 return boolean_false_node;
4574 /* Save partial result for later. */
4575 partial = t;
4578 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4579 if (TREE_CODE (inner2) == SSA_NAME
4580 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4581 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4582 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4583 gimple_assign_rhs1 (s),
4584 gimple_assign_rhs2 (s),
4585 code2, op2a, op2b)))
4587 /* Handle the OR case, where we are reassociating:
4588 (inner1 OR inner2) OR (op2a code2 op2b)
4589 => (inner1 OR t)
4590 => (t OR partial) */
4591 if (is_or)
4593 if (integer_zerop (t))
4594 return inner1;
4595 else if (integer_onep (t))
4596 return boolean_true_node;
4597 /* If both are the same, we can apply the identity
4598 (x OR x) == x. */
4599 else if (partial && same_bool_result_p (t, partial))
4600 return t;
4603 /* Handle the AND case, where we are redistributing:
4604 (inner1 AND inner2) OR (op2a code2 op2b)
4605 => (t AND (inner1 OR (op2a code2 op2b)))
4606 => (t AND partial) */
4607 else
4609 if (integer_zerop (t))
4610 return boolean_false_node;
4611 else if (partial)
4613 /* We already got a simplification for the other
4614 operand to the redistributed AND expression. The
4615 interesting case is when at least one is true.
4616 Or, if both are the same, we can apply the identity
4617 (x AND x) == x. */
4618 if (integer_onep (partial))
4619 return t;
4620 else if (integer_onep (t))
4621 return partial;
4622 else if (same_bool_result_p (t, partial))
4623 return t;
4628 return NULL_TREE;
4631 /* Try to simplify the OR of two comparisons defined by
4632 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4633 If this can be done without constructing an intermediate value,
4634 return the resulting tree; otherwise NULL_TREE is returned.
4635 This function is deliberately asymmetric as it recurses on SSA_DEFs
4636 in the first comparison but not the second. */
4638 static tree
4639 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4640 enum tree_code code2, tree op2a, tree op2b)
4642 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4644 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4645 if (operand_equal_p (op1a, op2a, 0)
4646 && operand_equal_p (op1b, op2b, 0))
4648 /* Result will be either NULL_TREE, or a combined comparison. */
4649 tree t = combine_comparisons (UNKNOWN_LOCATION,
4650 TRUTH_ORIF_EXPR, code1, code2,
4651 truth_type, op1a, op1b);
4652 if (t)
4653 return t;
4656 /* Likewise the swapped case of the above. */
4657 if (operand_equal_p (op1a, op2b, 0)
4658 && operand_equal_p (op1b, op2a, 0))
4660 /* Result will be either NULL_TREE, or a combined comparison. */
4661 tree t = combine_comparisons (UNKNOWN_LOCATION,
4662 TRUTH_ORIF_EXPR, code1,
4663 swap_tree_comparison (code2),
4664 truth_type, op1a, op1b);
4665 if (t)
4666 return t;
4669 /* If both comparisons are of the same value against constants, we might
4670 be able to merge them. */
4671 if (operand_equal_p (op1a, op2a, 0)
4672 && TREE_CODE (op1b) == INTEGER_CST
4673 && TREE_CODE (op2b) == INTEGER_CST)
4675 int cmp = tree_int_cst_compare (op1b, op2b);
4677 /* If we have (op1a != op1b), we should either be able to
4678 return that or TRUE, depending on whether the constant op1b
4679 also satisfies the other comparison against op2b. */
4680 if (code1 == NE_EXPR)
4682 bool done = true;
4683 bool val;
4684 switch (code2)
4686 case EQ_EXPR: val = (cmp == 0); break;
4687 case NE_EXPR: val = (cmp != 0); break;
4688 case LT_EXPR: val = (cmp < 0); break;
4689 case GT_EXPR: val = (cmp > 0); break;
4690 case LE_EXPR: val = (cmp <= 0); break;
4691 case GE_EXPR: val = (cmp >= 0); break;
4692 default: done = false;
4694 if (done)
4696 if (val)
4697 return boolean_true_node;
4698 else
4699 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4702 /* Likewise if the second comparison is a != comparison. */
4703 else if (code2 == NE_EXPR)
4705 bool done = true;
4706 bool val;
4707 switch (code1)
4709 case EQ_EXPR: val = (cmp == 0); break;
4710 case NE_EXPR: val = (cmp != 0); break;
4711 case LT_EXPR: val = (cmp > 0); break;
4712 case GT_EXPR: val = (cmp < 0); break;
4713 case LE_EXPR: val = (cmp >= 0); break;
4714 case GE_EXPR: val = (cmp <= 0); break;
4715 default: done = false;
4717 if (done)
4719 if (val)
4720 return boolean_true_node;
4721 else
4722 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4726 /* See if an equality test is redundant with the other comparison. */
4727 else if (code1 == EQ_EXPR)
4729 bool val;
4730 switch (code2)
4732 case EQ_EXPR: val = (cmp == 0); break;
4733 case NE_EXPR: val = (cmp != 0); break;
4734 case LT_EXPR: val = (cmp < 0); break;
4735 case GT_EXPR: val = (cmp > 0); break;
4736 case LE_EXPR: val = (cmp <= 0); break;
4737 case GE_EXPR: val = (cmp >= 0); break;
4738 default:
4739 val = false;
4741 if (val)
4742 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4744 else if (code2 == EQ_EXPR)
4746 bool val;
4747 switch (code1)
4749 case EQ_EXPR: val = (cmp == 0); break;
4750 case NE_EXPR: val = (cmp != 0); break;
4751 case LT_EXPR: val = (cmp > 0); break;
4752 case GT_EXPR: val = (cmp < 0); break;
4753 case LE_EXPR: val = (cmp >= 0); break;
4754 case GE_EXPR: val = (cmp <= 0); break;
4755 default:
4756 val = false;
4758 if (val)
4759 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4762 /* Chose the less restrictive of two < or <= comparisons. */
4763 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4764 && (code2 == LT_EXPR || code2 == LE_EXPR))
4766 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4767 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4768 else
4769 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4772 /* Likewise chose the less restrictive of two > or >= comparisons. */
4773 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4774 && (code2 == GT_EXPR || code2 == GE_EXPR))
4776 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4777 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4778 else
4779 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4782 /* Check for singleton ranges. */
4783 else if (cmp == 0
4784 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4785 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4786 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4788 /* Check for less/greater pairs that don't restrict the range at all. */
4789 else if (cmp >= 0
4790 && (code1 == LT_EXPR || code1 == LE_EXPR)
4791 && (code2 == GT_EXPR || code2 == GE_EXPR))
4792 return boolean_true_node;
4793 else if (cmp <= 0
4794 && (code1 == GT_EXPR || code1 == GE_EXPR)
4795 && (code2 == LT_EXPR || code2 == LE_EXPR))
4796 return boolean_true_node;
4799 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4800 NAME's definition is a truth value. See if there are any simplifications
4801 that can be done against the NAME's definition. */
4802 if (TREE_CODE (op1a) == SSA_NAME
4803 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4804 && (integer_zerop (op1b) || integer_onep (op1b)))
4806 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4807 || (code1 == NE_EXPR && integer_onep (op1b)));
4808 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4809 switch (gimple_code (stmt))
4811 case GIMPLE_ASSIGN:
4812 /* Try to simplify by copy-propagating the definition. */
4813 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4815 case GIMPLE_PHI:
4816 /* If every argument to the PHI produces the same result when
4817 ORed with the second comparison, we win.
4818 Do not do this unless the type is bool since we need a bool
4819 result here anyway. */
4820 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4822 tree result = NULL_TREE;
4823 unsigned i;
4824 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4826 tree arg = gimple_phi_arg_def (stmt, i);
4828 /* If this PHI has itself as an argument, ignore it.
4829 If all the other args produce the same result,
4830 we're still OK. */
4831 if (arg == gimple_phi_result (stmt))
4832 continue;
4833 else if (TREE_CODE (arg) == INTEGER_CST)
4835 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4837 if (!result)
4838 result = boolean_true_node;
4839 else if (!integer_onep (result))
4840 return NULL_TREE;
4842 else if (!result)
4843 result = fold_build2 (code2, boolean_type_node,
4844 op2a, op2b);
4845 else if (!same_bool_comparison_p (result,
4846 code2, op2a, op2b))
4847 return NULL_TREE;
4849 else if (TREE_CODE (arg) == SSA_NAME
4850 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4852 tree temp;
4853 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4854 /* In simple cases we can look through PHI nodes,
4855 but we have to be careful with loops.
4856 See PR49073. */
4857 if (! dom_info_available_p (CDI_DOMINATORS)
4858 || gimple_bb (def_stmt) == gimple_bb (stmt)
4859 || dominated_by_p (CDI_DOMINATORS,
4860 gimple_bb (def_stmt),
4861 gimple_bb (stmt)))
4862 return NULL_TREE;
4863 temp = or_var_with_comparison (arg, invert, code2,
4864 op2a, op2b);
4865 if (!temp)
4866 return NULL_TREE;
4867 else if (!result)
4868 result = temp;
4869 else if (!same_bool_result_p (result, temp))
4870 return NULL_TREE;
4872 else
4873 return NULL_TREE;
4875 return result;
4878 default:
4879 break;
4882 return NULL_TREE;
4885 /* Try to simplify the OR of two comparisons, specified by
4886 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4887 If this can be simplified to a single expression (without requiring
4888 introducing more SSA variables to hold intermediate values),
4889 return the resulting tree. Otherwise return NULL_TREE.
4890 If the result expression is non-null, it has boolean type. */
4892 tree
4893 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4894 enum tree_code code2, tree op2a, tree op2b)
4896 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4897 if (t)
4898 return t;
4899 else
4900 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4904 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4906 Either NULL_TREE, a simplified but non-constant or a constant
4907 is returned.
4909 ??? This should go into a gimple-fold-inline.h file to be eventually
4910 privatized with the single valueize function used in the various TUs
4911 to avoid the indirect function call overhead. */
4913 tree
4914 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4915 tree (*gvalueize) (tree))
4917 code_helper rcode;
4918 tree ops[3] = {};
4919 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4920 edges if there are intermediate VARYING defs. For this reason
4921 do not follow SSA edges here even though SCCVN can technically
4922 just deal fine with that. */
4923 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize)
4924 && rcode.is_tree_code ()
4925 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4926 || ((tree_code) rcode) == ADDR_EXPR)
4927 && is_gimple_val (ops[0]))
4929 tree res = ops[0];
4930 if (dump_file && dump_flags & TDF_DETAILS)
4932 fprintf (dump_file, "Match-and-simplified ");
4933 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4934 fprintf (dump_file, " to ");
4935 print_generic_expr (dump_file, res, 0);
4936 fprintf (dump_file, "\n");
4938 return res;
4941 location_t loc = gimple_location (stmt);
4942 switch (gimple_code (stmt))
4944 case GIMPLE_ASSIGN:
4946 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4948 switch (get_gimple_rhs_class (subcode))
4950 case GIMPLE_SINGLE_RHS:
4952 tree rhs = gimple_assign_rhs1 (stmt);
4953 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4955 if (TREE_CODE (rhs) == SSA_NAME)
4957 /* If the RHS is an SSA_NAME, return its known constant value,
4958 if any. */
4959 return (*valueize) (rhs);
4961 /* Handle propagating invariant addresses into address
4962 operations. */
4963 else if (TREE_CODE (rhs) == ADDR_EXPR
4964 && !is_gimple_min_invariant (rhs))
4966 HOST_WIDE_INT offset = 0;
4967 tree base;
4968 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4969 &offset,
4970 valueize);
4971 if (base
4972 && (CONSTANT_CLASS_P (base)
4973 || decl_address_invariant_p (base)))
4974 return build_invariant_address (TREE_TYPE (rhs),
4975 base, offset);
4977 else if (TREE_CODE (rhs) == CONSTRUCTOR
4978 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4979 && (CONSTRUCTOR_NELTS (rhs)
4980 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4982 unsigned i;
4983 tree val, *vec;
4985 vec = XALLOCAVEC (tree,
4986 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4987 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4989 val = (*valueize) (val);
4990 if (TREE_CODE (val) == INTEGER_CST
4991 || TREE_CODE (val) == REAL_CST
4992 || TREE_CODE (val) == FIXED_CST)
4993 vec[i] = val;
4994 else
4995 return NULL_TREE;
4998 return build_vector (TREE_TYPE (rhs), vec);
5000 if (subcode == OBJ_TYPE_REF)
5002 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5003 /* If callee is constant, we can fold away the wrapper. */
5004 if (is_gimple_min_invariant (val))
5005 return val;
5008 if (kind == tcc_reference)
5010 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5011 || TREE_CODE (rhs) == REALPART_EXPR
5012 || TREE_CODE (rhs) == IMAGPART_EXPR)
5013 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5015 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5016 return fold_unary_loc (EXPR_LOCATION (rhs),
5017 TREE_CODE (rhs),
5018 TREE_TYPE (rhs), val);
5020 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5021 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5023 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5024 return fold_ternary_loc (EXPR_LOCATION (rhs),
5025 TREE_CODE (rhs),
5026 TREE_TYPE (rhs), val,
5027 TREE_OPERAND (rhs, 1),
5028 TREE_OPERAND (rhs, 2));
5030 else if (TREE_CODE (rhs) == MEM_REF
5031 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5033 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5034 if (TREE_CODE (val) == ADDR_EXPR
5035 && is_gimple_min_invariant (val))
5037 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5038 unshare_expr (val),
5039 TREE_OPERAND (rhs, 1));
5040 if (tem)
5041 rhs = tem;
5044 return fold_const_aggregate_ref_1 (rhs, valueize);
5046 else if (kind == tcc_declaration)
5047 return get_symbol_constant_value (rhs);
5048 return rhs;
5051 case GIMPLE_UNARY_RHS:
5052 return NULL_TREE;
5054 case GIMPLE_BINARY_RHS:
5056 /* Handle binary operators that can appear in GIMPLE form. */
5057 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5058 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5060 /* Translate &x + CST into an invariant form suitable for
5061 further propagation. */
5062 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
5063 && TREE_CODE (op0) == ADDR_EXPR
5064 && TREE_CODE (op1) == INTEGER_CST)
5066 tree off = fold_convert (ptr_type_node, op1);
5067 return build_fold_addr_expr_loc
5068 (loc,
5069 fold_build2 (MEM_REF,
5070 TREE_TYPE (TREE_TYPE (op0)),
5071 unshare_expr (op0), off));
5074 return fold_binary_loc (loc, subcode,
5075 gimple_expr_type (stmt), op0, op1);
5078 case GIMPLE_TERNARY_RHS:
5080 /* Handle ternary operators that can appear in GIMPLE form. */
5081 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5082 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5083 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5085 /* Fold embedded expressions in ternary codes. */
5086 if ((subcode == COND_EXPR
5087 || subcode == VEC_COND_EXPR)
5088 && COMPARISON_CLASS_P (op0))
5090 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
5091 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
5092 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
5093 TREE_TYPE (op0), op00, op01);
5094 if (tem)
5095 op0 = tem;
5098 return fold_ternary_loc (loc, subcode,
5099 gimple_expr_type (stmt), op0, op1, op2);
5102 default:
5103 gcc_unreachable ();
5107 case GIMPLE_CALL:
5109 tree fn;
5110 gcall *call_stmt = as_a <gcall *> (stmt);
5112 if (gimple_call_internal_p (stmt))
5114 enum tree_code subcode = ERROR_MARK;
5115 switch (gimple_call_internal_fn (stmt))
5117 case IFN_UBSAN_CHECK_ADD:
5118 subcode = PLUS_EXPR;
5119 break;
5120 case IFN_UBSAN_CHECK_SUB:
5121 subcode = MINUS_EXPR;
5122 break;
5123 case IFN_UBSAN_CHECK_MUL:
5124 subcode = MULT_EXPR;
5125 break;
5126 default:
5127 return NULL_TREE;
5129 tree arg0 = gimple_call_arg (stmt, 0);
5130 tree arg1 = gimple_call_arg (stmt, 1);
5131 tree op0 = (*valueize) (arg0);
5132 tree op1 = (*valueize) (arg1);
5134 if (TREE_CODE (op0) != INTEGER_CST
5135 || TREE_CODE (op1) != INTEGER_CST)
5137 switch (subcode)
5139 case MULT_EXPR:
5140 /* x * 0 = 0 * x = 0 without overflow. */
5141 if (integer_zerop (op0) || integer_zerop (op1))
5142 return build_zero_cst (TREE_TYPE (arg0));
5143 break;
5144 case MINUS_EXPR:
5145 /* y - y = 0 without overflow. */
5146 if (operand_equal_p (op0, op1, 0))
5147 return build_zero_cst (TREE_TYPE (arg0));
5148 break;
5149 default:
5150 break;
5153 tree res
5154 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5155 if (res
5156 && TREE_CODE (res) == INTEGER_CST
5157 && !TREE_OVERFLOW (res))
5158 return res;
5159 return NULL_TREE;
5162 fn = (*valueize) (gimple_call_fn (stmt));
5163 if (TREE_CODE (fn) == ADDR_EXPR
5164 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5165 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5166 && gimple_builtin_call_types_compatible_p (stmt,
5167 TREE_OPERAND (fn, 0)))
5169 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5170 tree retval;
5171 unsigned i;
5172 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5173 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5174 retval = fold_builtin_call_array (loc,
5175 gimple_call_return_type (call_stmt),
5176 fn, gimple_call_num_args (stmt), args);
5177 if (retval)
5179 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5180 STRIP_NOPS (retval);
5181 retval = fold_convert (gimple_call_return_type (call_stmt),
5182 retval);
5184 return retval;
5186 return NULL_TREE;
5189 default:
5190 return NULL_TREE;
5194 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5195 Returns NULL_TREE if folding to a constant is not possible, otherwise
5196 returns a constant according to is_gimple_min_invariant. */
5198 tree
5199 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5201 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5202 if (res && is_gimple_min_invariant (res))
5203 return res;
5204 return NULL_TREE;
5208 /* The following set of functions are supposed to fold references using
5209 their constant initializers. */
5211 /* See if we can find constructor defining value of BASE.
5212 When we know the consructor with constant offset (such as
5213 base is array[40] and we do know constructor of array), then
5214 BIT_OFFSET is adjusted accordingly.
5216 As a special case, return error_mark_node when constructor
5217 is not explicitly available, but it is known to be zero
5218 such as 'static const int a;'. */
5219 static tree
5220 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5221 tree (*valueize)(tree))
5223 HOST_WIDE_INT bit_offset2, size, max_size;
5224 if (TREE_CODE (base) == MEM_REF)
5226 if (!integer_zerop (TREE_OPERAND (base, 1)))
5228 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5229 return NULL_TREE;
5230 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5231 * BITS_PER_UNIT);
5234 if (valueize
5235 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5236 base = valueize (TREE_OPERAND (base, 0));
5237 if (!base || TREE_CODE (base) != ADDR_EXPR)
5238 return NULL_TREE;
5239 base = TREE_OPERAND (base, 0);
5242 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5243 DECL_INITIAL. If BASE is a nested reference into another
5244 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5245 the inner reference. */
5246 switch (TREE_CODE (base))
5248 case VAR_DECL:
5249 case CONST_DECL:
5251 tree init = ctor_for_folding (base);
5253 /* Our semantic is exact opposite of ctor_for_folding;
5254 NULL means unknown, while error_mark_node is 0. */
5255 if (init == error_mark_node)
5256 return NULL_TREE;
5257 if (!init)
5258 return error_mark_node;
5259 return init;
5262 case ARRAY_REF:
5263 case COMPONENT_REF:
5264 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5265 if (max_size == -1 || size != max_size)
5266 return NULL_TREE;
5267 *bit_offset += bit_offset2;
5268 return get_base_constructor (base, bit_offset, valueize);
5270 case STRING_CST:
5271 case CONSTRUCTOR:
5272 return base;
5274 default:
5275 return NULL_TREE;
5279 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5280 SIZE to the memory at bit OFFSET. */
5282 static tree
5283 fold_array_ctor_reference (tree type, tree ctor,
5284 unsigned HOST_WIDE_INT offset,
5285 unsigned HOST_WIDE_INT size,
5286 tree from_decl)
5288 unsigned HOST_WIDE_INT cnt;
5289 tree cfield, cval;
5290 offset_int low_bound;
5291 offset_int elt_size;
5292 offset_int index, max_index;
5293 offset_int access_index;
5294 tree domain_type = NULL_TREE, index_type = NULL_TREE;
5295 HOST_WIDE_INT inner_offset;
5297 /* Compute low bound and elt size. */
5298 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5299 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5300 if (domain_type && TYPE_MIN_VALUE (domain_type))
5302 /* Static constructors for variably sized objects makes no sense. */
5303 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5304 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5305 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5307 else
5308 low_bound = 0;
5309 /* Static constructors for variably sized objects makes no sense. */
5310 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5311 == INTEGER_CST);
5312 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5314 /* We can handle only constantly sized accesses that are known to not
5315 be larger than size of array element. */
5316 if (!TYPE_SIZE_UNIT (type)
5317 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5318 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5319 || elt_size == 0)
5320 return NULL_TREE;
5322 /* Compute the array index we look for. */
5323 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5324 elt_size);
5325 access_index += low_bound;
5326 if (index_type)
5327 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5328 TYPE_SIGN (index_type));
5330 /* And offset within the access. */
5331 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5333 /* See if the array field is large enough to span whole access. We do not
5334 care to fold accesses spanning multiple array indexes. */
5335 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5336 return NULL_TREE;
5338 index = low_bound - 1;
5339 if (index_type)
5340 index = wi::ext (index, TYPE_PRECISION (index_type),
5341 TYPE_SIGN (index_type));
5343 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5345 /* Array constructor might explicitely set index, or specify range
5346 or leave index NULL meaning that it is next index after previous
5347 one. */
5348 if (cfield)
5350 if (TREE_CODE (cfield) == INTEGER_CST)
5351 max_index = index = wi::to_offset (cfield);
5352 else
5354 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5355 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5356 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5359 else
5361 index += 1;
5362 if (index_type)
5363 index = wi::ext (index, TYPE_PRECISION (index_type),
5364 TYPE_SIGN (index_type));
5365 max_index = index;
5368 /* Do we have match? */
5369 if (wi::cmpu (access_index, index) >= 0
5370 && wi::cmpu (access_index, max_index) <= 0)
5371 return fold_ctor_reference (type, cval, inner_offset, size,
5372 from_decl);
5374 /* When memory is not explicitely mentioned in constructor,
5375 it is 0 (or out of range). */
5376 return build_zero_cst (type);
5379 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5380 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5382 static tree
5383 fold_nonarray_ctor_reference (tree type, tree ctor,
5384 unsigned HOST_WIDE_INT offset,
5385 unsigned HOST_WIDE_INT size,
5386 tree from_decl)
5388 unsigned HOST_WIDE_INT cnt;
5389 tree cfield, cval;
5391 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5392 cval)
5394 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5395 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5396 tree field_size = DECL_SIZE (cfield);
5397 offset_int bitoffset;
5398 offset_int bitoffset_end, access_end;
5400 /* Variable sized objects in static constructors makes no sense,
5401 but field_size can be NULL for flexible array members. */
5402 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5403 && TREE_CODE (byte_offset) == INTEGER_CST
5404 && (field_size != NULL_TREE
5405 ? TREE_CODE (field_size) == INTEGER_CST
5406 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5408 /* Compute bit offset of the field. */
5409 bitoffset = (wi::to_offset (field_offset)
5410 + wi::lshift (wi::to_offset (byte_offset),
5411 LOG2_BITS_PER_UNIT));
5412 /* Compute bit offset where the field ends. */
5413 if (field_size != NULL_TREE)
5414 bitoffset_end = bitoffset + wi::to_offset (field_size);
5415 else
5416 bitoffset_end = 0;
5418 access_end = offset_int (offset) + size;
5420 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5421 [BITOFFSET, BITOFFSET_END)? */
5422 if (wi::cmps (access_end, bitoffset) > 0
5423 && (field_size == NULL_TREE
5424 || wi::lts_p (offset, bitoffset_end)))
5426 offset_int inner_offset = offset_int (offset) - bitoffset;
5427 /* We do have overlap. Now see if field is large enough to
5428 cover the access. Give up for accesses spanning multiple
5429 fields. */
5430 if (wi::cmps (access_end, bitoffset_end) > 0)
5431 return NULL_TREE;
5432 if (wi::lts_p (offset, bitoffset))
5433 return NULL_TREE;
5434 return fold_ctor_reference (type, cval,
5435 inner_offset.to_uhwi (), size,
5436 from_decl);
5439 /* When memory is not explicitely mentioned in constructor, it is 0. */
5440 return build_zero_cst (type);
5443 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5444 to the memory at bit OFFSET. */
5446 tree
5447 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5448 unsigned HOST_WIDE_INT size, tree from_decl)
5450 tree ret;
5452 /* We found the field with exact match. */
5453 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5454 && !offset)
5455 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5457 /* We are at the end of walk, see if we can view convert the
5458 result. */
5459 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5460 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5461 && !compare_tree_int (TYPE_SIZE (type), size)
5462 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5464 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5465 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5466 if (ret)
5467 STRIP_NOPS (ret);
5468 return ret;
5470 /* For constants and byte-aligned/sized reads try to go through
5471 native_encode/interpret. */
5472 if (CONSTANT_CLASS_P (ctor)
5473 && BITS_PER_UNIT == 8
5474 && offset % BITS_PER_UNIT == 0
5475 && size % BITS_PER_UNIT == 0
5476 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5478 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5479 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5480 offset / BITS_PER_UNIT) > 0)
5481 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5483 if (TREE_CODE (ctor) == CONSTRUCTOR)
5486 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5487 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5488 return fold_array_ctor_reference (type, ctor, offset, size,
5489 from_decl);
5490 else
5491 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5492 from_decl);
5495 return NULL_TREE;
5498 /* Return the tree representing the element referenced by T if T is an
5499 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5500 names using VALUEIZE. Return NULL_TREE otherwise. */
5502 tree
5503 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5505 tree ctor, idx, base;
5506 HOST_WIDE_INT offset, size, max_size;
5507 tree tem;
5509 if (TREE_THIS_VOLATILE (t))
5510 return NULL_TREE;
5512 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
5513 return get_symbol_constant_value (t);
5515 tem = fold_read_from_constant_string (t);
5516 if (tem)
5517 return tem;
5519 switch (TREE_CODE (t))
5521 case ARRAY_REF:
5522 case ARRAY_RANGE_REF:
5523 /* Constant indexes are handled well by get_base_constructor.
5524 Only special case variable offsets.
5525 FIXME: This code can't handle nested references with variable indexes
5526 (they will be handled only by iteration of ccp). Perhaps we can bring
5527 get_ref_base_and_extent here and make it use a valueize callback. */
5528 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5529 && valueize
5530 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5531 && TREE_CODE (idx) == INTEGER_CST)
5533 tree low_bound, unit_size;
5535 /* If the resulting bit-offset is constant, track it. */
5536 if ((low_bound = array_ref_low_bound (t),
5537 TREE_CODE (low_bound) == INTEGER_CST)
5538 && (unit_size = array_ref_element_size (t),
5539 tree_fits_uhwi_p (unit_size)))
5541 offset_int woffset
5542 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5543 TYPE_PRECISION (TREE_TYPE (idx)));
5545 if (wi::fits_shwi_p (woffset))
5547 offset = woffset.to_shwi ();
5548 /* TODO: This code seems wrong, multiply then check
5549 to see if it fits. */
5550 offset *= tree_to_uhwi (unit_size);
5551 offset *= BITS_PER_UNIT;
5553 base = TREE_OPERAND (t, 0);
5554 ctor = get_base_constructor (base, &offset, valueize);
5555 /* Empty constructor. Always fold to 0. */
5556 if (ctor == error_mark_node)
5557 return build_zero_cst (TREE_TYPE (t));
5558 /* Out of bound array access. Value is undefined,
5559 but don't fold. */
5560 if (offset < 0)
5561 return NULL_TREE;
5562 /* We can not determine ctor. */
5563 if (!ctor)
5564 return NULL_TREE;
5565 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5566 tree_to_uhwi (unit_size)
5567 * BITS_PER_UNIT,
5568 base);
5572 /* Fallthru. */
5574 case COMPONENT_REF:
5575 case BIT_FIELD_REF:
5576 case TARGET_MEM_REF:
5577 case MEM_REF:
5578 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5579 ctor = get_base_constructor (base, &offset, valueize);
5581 /* Empty constructor. Always fold to 0. */
5582 if (ctor == error_mark_node)
5583 return build_zero_cst (TREE_TYPE (t));
5584 /* We do not know precise address. */
5585 if (max_size == -1 || max_size != size)
5586 return NULL_TREE;
5587 /* We can not determine ctor. */
5588 if (!ctor)
5589 return NULL_TREE;
5591 /* Out of bound array access. Value is undefined, but don't fold. */
5592 if (offset < 0)
5593 return NULL_TREE;
5595 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5596 base);
5598 case REALPART_EXPR:
5599 case IMAGPART_EXPR:
5601 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5602 if (c && TREE_CODE (c) == COMPLEX_CST)
5603 return fold_build1_loc (EXPR_LOCATION (t),
5604 TREE_CODE (t), TREE_TYPE (t), c);
5605 break;
5608 default:
5609 break;
5612 return NULL_TREE;
5615 tree
5616 fold_const_aggregate_ref (tree t)
5618 return fold_const_aggregate_ref_1 (t, NULL);
5621 /* Lookup virtual method with index TOKEN in a virtual table V
5622 at OFFSET.
5623 Set CAN_REFER if non-NULL to false if method
5624 is not referable or if the virtual table is ill-formed (such as rewriten
5625 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5627 tree
5628 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5629 tree v,
5630 unsigned HOST_WIDE_INT offset,
5631 bool *can_refer)
5633 tree vtable = v, init, fn;
5634 unsigned HOST_WIDE_INT size;
5635 unsigned HOST_WIDE_INT elt_size, access_index;
5636 tree domain_type;
5638 if (can_refer)
5639 *can_refer = true;
5641 /* First of all double check we have virtual table. */
5642 if (TREE_CODE (v) != VAR_DECL
5643 || !DECL_VIRTUAL_P (v))
5645 gcc_assert (in_lto_p);
5646 /* Pass down that we lost track of the target. */
5647 if (can_refer)
5648 *can_refer = false;
5649 return NULL_TREE;
5652 init = ctor_for_folding (v);
5654 /* The virtual tables should always be born with constructors
5655 and we always should assume that they are avaialble for
5656 folding. At the moment we do not stream them in all cases,
5657 but it should never happen that ctor seem unreachable. */
5658 gcc_assert (init);
5659 if (init == error_mark_node)
5661 gcc_assert (in_lto_p);
5662 /* Pass down that we lost track of the target. */
5663 if (can_refer)
5664 *can_refer = false;
5665 return NULL_TREE;
5667 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5668 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5669 offset *= BITS_PER_UNIT;
5670 offset += token * size;
5672 /* Lookup the value in the constructor that is assumed to be array.
5673 This is equivalent to
5674 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5675 offset, size, NULL);
5676 but in a constant time. We expect that frontend produced a simple
5677 array without indexed initializers. */
5679 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5680 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5681 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5682 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5684 access_index = offset / BITS_PER_UNIT / elt_size;
5685 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5687 /* This code makes an assumption that there are no
5688 indexed fileds produced by C++ FE, so we can directly index the array. */
5689 if (access_index < CONSTRUCTOR_NELTS (init))
5691 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5692 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5693 STRIP_NOPS (fn);
5695 else
5696 fn = NULL;
5698 /* For type inconsistent program we may end up looking up virtual method
5699 in virtual table that does not contain TOKEN entries. We may overrun
5700 the virtual table and pick up a constant or RTTI info pointer.
5701 In any case the call is undefined. */
5702 if (!fn
5703 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5704 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5705 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5706 else
5708 fn = TREE_OPERAND (fn, 0);
5710 /* When cgraph node is missing and function is not public, we cannot
5711 devirtualize. This can happen in WHOPR when the actual method
5712 ends up in other partition, because we found devirtualization
5713 possibility too late. */
5714 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5716 if (can_refer)
5718 *can_refer = false;
5719 return fn;
5721 return NULL_TREE;
5725 /* Make sure we create a cgraph node for functions we'll reference.
5726 They can be non-existent if the reference comes from an entry
5727 of an external vtable for example. */
5728 cgraph_node::get_create (fn);
5730 return fn;
5733 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5734 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5735 KNOWN_BINFO carries the binfo describing the true type of
5736 OBJ_TYPE_REF_OBJECT(REF).
5737 Set CAN_REFER if non-NULL to false if method
5738 is not referable or if the virtual table is ill-formed (such as rewriten
5739 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5741 tree
5742 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5743 bool *can_refer)
5745 unsigned HOST_WIDE_INT offset;
5746 tree v;
5748 v = BINFO_VTABLE (known_binfo);
5749 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5750 if (!v)
5751 return NULL_TREE;
5753 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5755 if (can_refer)
5756 *can_refer = false;
5757 return NULL_TREE;
5759 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5762 /* Return true iff VAL is a gimple expression that is known to be
5763 non-negative. Restricted to floating-point inputs. */
5765 bool
5766 gimple_val_nonnegative_real_p (tree val)
5768 gimple def_stmt;
5770 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5772 /* Use existing logic for non-gimple trees. */
5773 if (tree_expr_nonnegative_p (val))
5774 return true;
5776 if (TREE_CODE (val) != SSA_NAME)
5777 return false;
5779 /* Currently we look only at the immediately defining statement
5780 to make this determination, since recursion on defining
5781 statements of operands can lead to quadratic behavior in the
5782 worst case. This is expected to catch almost all occurrences
5783 in practice. It would be possible to implement limited-depth
5784 recursion if important cases are lost. Alternatively, passes
5785 that need this information (such as the pow/powi lowering code
5786 in the cse_sincos pass) could be revised to provide it through
5787 dataflow propagation. */
5789 def_stmt = SSA_NAME_DEF_STMT (val);
5791 if (is_gimple_assign (def_stmt))
5793 tree op0, op1;
5795 /* See fold-const.c:tree_expr_nonnegative_p for additional
5796 cases that could be handled with recursion. */
5798 switch (gimple_assign_rhs_code (def_stmt))
5800 case ABS_EXPR:
5801 /* Always true for floating-point operands. */
5802 return true;
5804 case MULT_EXPR:
5805 /* True if the two operands are identical (since we are
5806 restricted to floating-point inputs). */
5807 op0 = gimple_assign_rhs1 (def_stmt);
5808 op1 = gimple_assign_rhs2 (def_stmt);
5810 if (op0 == op1
5811 || operand_equal_p (op0, op1, 0))
5812 return true;
5814 default:
5815 return false;
5818 else if (is_gimple_call (def_stmt))
5820 tree fndecl = gimple_call_fndecl (def_stmt);
5821 if (fndecl
5822 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5824 tree arg1;
5826 switch (DECL_FUNCTION_CODE (fndecl))
5828 CASE_FLT_FN (BUILT_IN_ACOS):
5829 CASE_FLT_FN (BUILT_IN_ACOSH):
5830 CASE_FLT_FN (BUILT_IN_CABS):
5831 CASE_FLT_FN (BUILT_IN_COSH):
5832 CASE_FLT_FN (BUILT_IN_ERFC):
5833 CASE_FLT_FN (BUILT_IN_EXP):
5834 CASE_FLT_FN (BUILT_IN_EXP10):
5835 CASE_FLT_FN (BUILT_IN_EXP2):
5836 CASE_FLT_FN (BUILT_IN_FABS):
5837 CASE_FLT_FN (BUILT_IN_FDIM):
5838 CASE_FLT_FN (BUILT_IN_HYPOT):
5839 CASE_FLT_FN (BUILT_IN_POW10):
5840 return true;
5842 CASE_FLT_FN (BUILT_IN_SQRT):
5843 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5844 nonnegative inputs. */
5845 if (!HONOR_SIGNED_ZEROS (val))
5846 return true;
5848 break;
5850 CASE_FLT_FN (BUILT_IN_POWI):
5851 /* True if the second argument is an even integer. */
5852 arg1 = gimple_call_arg (def_stmt, 1);
5854 if (TREE_CODE (arg1) == INTEGER_CST
5855 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5856 return true;
5858 break;
5860 CASE_FLT_FN (BUILT_IN_POW):
5861 /* True if the second argument is an even integer-valued
5862 real. */
5863 arg1 = gimple_call_arg (def_stmt, 1);
5865 if (TREE_CODE (arg1) == REAL_CST)
5867 REAL_VALUE_TYPE c;
5868 HOST_WIDE_INT n;
5870 c = TREE_REAL_CST (arg1);
5871 n = real_to_integer (&c);
5873 if ((n & 1) == 0)
5875 REAL_VALUE_TYPE cint;
5876 real_from_integer (&cint, VOIDmode, n, SIGNED);
5877 if (real_identical (&c, &cint))
5878 return true;
5882 break;
5884 default:
5885 return false;
5890 return false;
5893 /* Given a pointer value OP0, return a simplified version of an
5894 indirection through OP0, or NULL_TREE if no simplification is
5895 possible. Note that the resulting type may be different from
5896 the type pointed to in the sense that it is still compatible
5897 from the langhooks point of view. */
5899 tree
5900 gimple_fold_indirect_ref (tree t)
5902 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5903 tree sub = t;
5904 tree subtype;
5906 STRIP_NOPS (sub);
5907 subtype = TREE_TYPE (sub);
5908 if (!POINTER_TYPE_P (subtype))
5909 return NULL_TREE;
5911 if (TREE_CODE (sub) == ADDR_EXPR)
5913 tree op = TREE_OPERAND (sub, 0);
5914 tree optype = TREE_TYPE (op);
5915 /* *&p => p */
5916 if (useless_type_conversion_p (type, optype))
5917 return op;
5919 /* *(foo *)&fooarray => fooarray[0] */
5920 if (TREE_CODE (optype) == ARRAY_TYPE
5921 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5922 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5924 tree type_domain = TYPE_DOMAIN (optype);
5925 tree min_val = size_zero_node;
5926 if (type_domain && TYPE_MIN_VALUE (type_domain))
5927 min_val = TYPE_MIN_VALUE (type_domain);
5928 if (TREE_CODE (min_val) == INTEGER_CST)
5929 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5931 /* *(foo *)&complexfoo => __real__ complexfoo */
5932 else if (TREE_CODE (optype) == COMPLEX_TYPE
5933 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5934 return fold_build1 (REALPART_EXPR, type, op);
5935 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5936 else if (TREE_CODE (optype) == VECTOR_TYPE
5937 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5939 tree part_width = TYPE_SIZE (type);
5940 tree index = bitsize_int (0);
5941 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5945 /* *(p + CST) -> ... */
5946 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5947 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5949 tree addr = TREE_OPERAND (sub, 0);
5950 tree off = TREE_OPERAND (sub, 1);
5951 tree addrtype;
5953 STRIP_NOPS (addr);
5954 addrtype = TREE_TYPE (addr);
5956 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5957 if (TREE_CODE (addr) == ADDR_EXPR
5958 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5959 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5960 && tree_fits_uhwi_p (off))
5962 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5963 tree part_width = TYPE_SIZE (type);
5964 unsigned HOST_WIDE_INT part_widthi
5965 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5966 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5967 tree index = bitsize_int (indexi);
5968 if (offset / part_widthi
5969 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5970 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5971 part_width, index);
5974 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5975 if (TREE_CODE (addr) == ADDR_EXPR
5976 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5977 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5979 tree size = TYPE_SIZE_UNIT (type);
5980 if (tree_int_cst_equal (size, off))
5981 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5984 /* *(p + CST) -> MEM_REF <p, CST>. */
5985 if (TREE_CODE (addr) != ADDR_EXPR
5986 || DECL_P (TREE_OPERAND (addr, 0)))
5987 return fold_build2 (MEM_REF, type,
5988 addr,
5989 wide_int_to_tree (ptype, off));
5992 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5993 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5994 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5995 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5997 tree type_domain;
5998 tree min_val = size_zero_node;
5999 tree osub = sub;
6000 sub = gimple_fold_indirect_ref (sub);
6001 if (! sub)
6002 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6003 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6004 if (type_domain && TYPE_MIN_VALUE (type_domain))
6005 min_val = TYPE_MIN_VALUE (type_domain);
6006 if (TREE_CODE (min_val) == INTEGER_CST)
6007 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6010 return NULL_TREE;
6013 /* Return true if CODE is an operation that when operating on signed
6014 integer types involves undefined behavior on overflow and the
6015 operation can be expressed with unsigned arithmetic. */
6017 bool
6018 arith_code_with_undefined_signed_overflow (tree_code code)
6020 switch (code)
6022 case PLUS_EXPR:
6023 case MINUS_EXPR:
6024 case MULT_EXPR:
6025 case NEGATE_EXPR:
6026 case POINTER_PLUS_EXPR:
6027 return true;
6028 default:
6029 return false;
6033 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6034 operation that can be transformed to unsigned arithmetic by converting
6035 its operand, carrying out the operation in the corresponding unsigned
6036 type and converting the result back to the original type.
6038 Returns a sequence of statements that replace STMT and also contain
6039 a modified form of STMT itself. */
6041 gimple_seq
6042 rewrite_to_defined_overflow (gimple stmt)
6044 if (dump_file && (dump_flags & TDF_DETAILS))
6046 fprintf (dump_file, "rewriting stmt with undefined signed "
6047 "overflow ");
6048 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6051 tree lhs = gimple_assign_lhs (stmt);
6052 tree type = unsigned_type_for (TREE_TYPE (lhs));
6053 gimple_seq stmts = NULL;
6054 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6056 gimple_seq stmts2 = NULL;
6057 gimple_set_op (stmt, i,
6058 force_gimple_operand (fold_convert (type,
6059 gimple_op (stmt, i)),
6060 &stmts2, true, NULL_TREE));
6061 gimple_seq_add_seq (&stmts, stmts2);
6063 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6064 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6065 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6066 gimple_seq_add_stmt (&stmts, stmt);
6067 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6068 gimple_seq_add_stmt (&stmts, cvt);
6070 return stmts;
6074 /* Build the expression CODE OP0 of type TYPE with location LOC,
6075 simplifying it first if possible using VALUEIZE if not NULL.
6076 OP0 is expected to be valueized already. Returns the built
6077 expression value and appends statements possibly defining it
6078 to SEQ. */
6080 tree
6081 gimple_build (gimple_seq *seq, location_t loc,
6082 enum tree_code code, tree type, tree op0,
6083 tree (*valueize)(tree))
6085 tree res = gimple_simplify (code, type, op0, seq, valueize);
6086 if (!res)
6088 if (gimple_in_ssa_p (cfun))
6089 res = make_ssa_name (type);
6090 else
6091 res = create_tmp_reg (type);
6092 gimple stmt;
6093 if (code == REALPART_EXPR
6094 || code == IMAGPART_EXPR
6095 || code == VIEW_CONVERT_EXPR)
6096 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6097 else
6098 stmt = gimple_build_assign (res, code, op0);
6099 gimple_set_location (stmt, loc);
6100 gimple_seq_add_stmt_without_update (seq, stmt);
6102 return res;
6105 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6106 simplifying it first if possible using VALUEIZE if not NULL.
6107 OP0 and OP1 are expected to be valueized already. Returns the built
6108 expression value and appends statements possibly defining it
6109 to SEQ. */
6111 tree
6112 gimple_build (gimple_seq *seq, location_t loc,
6113 enum tree_code code, tree type, tree op0, tree op1,
6114 tree (*valueize)(tree))
6116 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
6117 if (!res)
6119 if (gimple_in_ssa_p (cfun))
6120 res = make_ssa_name (type);
6121 else
6122 res = create_tmp_reg (type);
6123 gimple stmt = gimple_build_assign (res, code, op0, op1);
6124 gimple_set_location (stmt, loc);
6125 gimple_seq_add_stmt_without_update (seq, stmt);
6127 return res;
6130 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6131 simplifying it first if possible using VALUEIZE if not NULL.
6132 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
6133 expression value and appends statements possibly defining it
6134 to SEQ. */
6136 tree
6137 gimple_build (gimple_seq *seq, location_t loc,
6138 enum tree_code code, tree type, tree op0, tree op1, tree op2,
6139 tree (*valueize)(tree))
6141 tree res = gimple_simplify (code, type, op0, op1, op2,
6142 seq, valueize);
6143 if (!res)
6145 if (gimple_in_ssa_p (cfun))
6146 res = make_ssa_name (type);
6147 else
6148 res = create_tmp_reg (type);
6149 gimple stmt;
6150 if (code == BIT_FIELD_REF)
6151 stmt = gimple_build_assign (res, code,
6152 build3 (code, type, op0, op1, op2));
6153 else
6154 stmt = gimple_build_assign (res, code, op0, op1, op2);
6155 gimple_set_location (stmt, loc);
6156 gimple_seq_add_stmt_without_update (seq, stmt);
6158 return res;
6161 /* Build the call FN (ARG0) with a result of type TYPE
6162 (or no result if TYPE is void) with location LOC,
6163 simplifying it first if possible using VALUEIZE if not NULL.
6164 ARG0 is expected to be valueized already. Returns the built
6165 expression value (or NULL_TREE if TYPE is void) and appends
6166 statements possibly defining it to SEQ. */
6168 tree
6169 gimple_build (gimple_seq *seq, location_t loc,
6170 enum built_in_function fn, tree type, tree arg0,
6171 tree (*valueize)(tree))
6173 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
6174 if (!res)
6176 tree decl = builtin_decl_implicit (fn);
6177 gimple stmt = gimple_build_call (decl, 1, arg0);
6178 if (!VOID_TYPE_P (type))
6180 if (gimple_in_ssa_p (cfun))
6181 res = make_ssa_name (type);
6182 else
6183 res = create_tmp_reg (type);
6184 gimple_call_set_lhs (stmt, res);
6186 gimple_set_location (stmt, loc);
6187 gimple_seq_add_stmt_without_update (seq, stmt);
6189 return res;
6192 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6193 (or no result if TYPE is void) with location LOC,
6194 simplifying it first if possible using VALUEIZE if not NULL.
6195 ARG0 is expected to be valueized already. Returns the built
6196 expression value (or NULL_TREE if TYPE is void) and appends
6197 statements possibly defining it to SEQ. */
6199 tree
6200 gimple_build (gimple_seq *seq, location_t loc,
6201 enum built_in_function fn, tree type, tree arg0, tree arg1,
6202 tree (*valueize)(tree))
6204 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
6205 if (!res)
6207 tree decl = builtin_decl_implicit (fn);
6208 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6209 if (!VOID_TYPE_P (type))
6211 if (gimple_in_ssa_p (cfun))
6212 res = make_ssa_name (type);
6213 else
6214 res = create_tmp_reg (type);
6215 gimple_call_set_lhs (stmt, res);
6217 gimple_set_location (stmt, loc);
6218 gimple_seq_add_stmt_without_update (seq, stmt);
6220 return res;
6223 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6224 (or no result if TYPE is void) with location LOC,
6225 simplifying it first if possible using VALUEIZE if not NULL.
6226 ARG0 is expected to be valueized already. Returns the built
6227 expression value (or NULL_TREE if TYPE is void) and appends
6228 statements possibly defining it to SEQ. */
6230 tree
6231 gimple_build (gimple_seq *seq, location_t loc,
6232 enum built_in_function fn, tree type,
6233 tree arg0, tree arg1, tree arg2,
6234 tree (*valueize)(tree))
6236 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
6237 if (!res)
6239 tree decl = builtin_decl_implicit (fn);
6240 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6241 if (!VOID_TYPE_P (type))
6243 if (gimple_in_ssa_p (cfun))
6244 res = make_ssa_name (type);
6245 else
6246 res = create_tmp_reg (type);
6247 gimple_call_set_lhs (stmt, res);
6249 gimple_set_location (stmt, loc);
6250 gimple_seq_add_stmt_without_update (seq, stmt);
6252 return res;
6255 /* Build the conversion (TYPE) OP with a result of type TYPE
6256 with location LOC if such conversion is neccesary in GIMPLE,
6257 simplifying it first.
6258 Returns the built expression value and appends
6259 statements possibly defining it to SEQ. */
6261 tree
6262 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6264 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6265 return op;
6266 return gimple_build (seq, loc, NOP_EXPR, type, op);