2014-10-31 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / gimple-fold.c
blob088a0dd0f33ba1a20002da4cba894264f465f4fc
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "vec.h"
34 #include "machmode.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dumpfile.h"
39 #include "bitmap.h"
40 #include "predict.h"
41 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "gimplify.h"
50 #include "gimple-iterator.h"
51 #include "gimple-ssa.h"
52 #include "tree-ssanames.h"
53 #include "tree-into-ssa.h"
54 #include "tree-dfa.h"
55 #include "tree-ssa.h"
56 #include "tree-ssa-propagate.h"
57 #include "target.h"
58 #include "hash-map.h"
59 #include "plugin-api.h"
60 #include "ipa-ref.h"
61 #include "cgraph.h"
62 #include "ipa-utils.h"
63 #include "gimple-pretty-print.h"
64 #include "tree-ssa-address.h"
65 #include "langhooks.h"
66 #include "gimplify-me.h"
67 #include "dbgcnt.h"
68 #include "builtins.h"
69 #include "output.h"
70 #include "tree-eh.h"
71 #include "gimple-match.h"
72 #include "tree-phinodes.h"
73 #include "ssa-iterators.h"
75 /* Return true when DECL can be referenced from current unit.
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
78 reasons:
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
85 set.
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
90 declaring the body.
91 3) COMDAT functions referred by external vtables that
92 we devirtualize only during final compilation stage.
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
95 directly. */
97 static bool
98 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
100 varpool_node *vnode;
101 struct cgraph_node *node;
102 symtab_node *snode;
104 if (DECL_ABSTRACT_P (decl))
105 return false;
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
109 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
110 return true;
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
117 if (symtab->function_flags_ready)
118 return true;
119 snode = symtab_node::get (decl);
120 if (!snode || !snode->definition)
121 return false;
122 node = dyn_cast <cgraph_node *> (snode);
123 return !node || !node->global.inlined_to;
126 /* We will later output the initializer, so we can refer to it.
127 So we are concerned only when DECL comes from initializer of
128 external var or var that has been optimized out. */
129 if (!from_decl
130 || TREE_CODE (from_decl) != VAR_DECL
131 || (!DECL_EXTERNAL (from_decl)
132 && (vnode = varpool_node::get (from_decl)) != NULL
133 && vnode->definition)
134 || (flag_ltrans
135 && (vnode = varpool_node::get (from_decl)) != NULL
136 && vnode->in_other_partition))
137 return true;
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl)
142 && DECL_EXTERNAL (decl)
143 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
144 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
145 return false;
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
150 return true;
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
155 As observed in PR20991 for already optimized out comdat virtual functions
156 it may be tempting to not necessarily give up because the copy will be
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
161 was privatized. */
162 if (!symtab->function_flags_ready)
163 return true;
165 snode = symtab_node::get (decl);
166 if (!snode
167 || ((!snode->definition || DECL_EXTERNAL (decl))
168 && (!snode->in_other_partition
169 || (!snode->forced_by_abi && !snode->force_output))))
170 return false;
171 node = dyn_cast <cgraph_node *> (snode);
172 return !node || !node->global.inlined_to;
175 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
179 tree
180 canonicalize_constructor_val (tree cval, tree from_decl)
182 tree orig_cval = cval;
183 STRIP_NOPS (cval);
184 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
187 tree ptr = TREE_OPERAND (cval, 0);
188 if (is_gimple_min_invariant (ptr))
189 cval = build1_loc (EXPR_LOCATION (cval),
190 ADDR_EXPR, TREE_TYPE (ptr),
191 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
192 ptr,
193 fold_convert (ptr_type_node,
194 TREE_OPERAND (cval, 1))));
196 if (TREE_CODE (cval) == ADDR_EXPR)
198 tree base = NULL_TREE;
199 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
201 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
202 if (base)
203 TREE_OPERAND (cval, 0) = base;
205 else
206 base = get_base_address (TREE_OPERAND (cval, 0));
207 if (!base)
208 return NULL_TREE;
210 if ((TREE_CODE (base) == VAR_DECL
211 || TREE_CODE (base) == FUNCTION_DECL)
212 && !can_refer_decl_in_current_unit_p (base, from_decl))
213 return NULL_TREE;
214 if (TREE_CODE (base) == VAR_DECL)
215 TREE_ADDRESSABLE (base) = 1;
216 else if (TREE_CODE (base) == FUNCTION_DECL)
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
221 cgraph_node::get_create (base);
223 /* Fixup types in global initializers. */
224 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
225 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
228 cval = fold_convert (TREE_TYPE (orig_cval), cval);
229 return cval;
231 if (TREE_OVERFLOW_P (cval))
232 return drop_tree_overflow (cval);
233 return orig_cval;
236 /* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
239 tree
240 get_symbol_constant_value (tree sym)
242 tree val = ctor_for_folding (sym);
243 if (val != error_mark_node)
245 if (val)
247 val = canonicalize_constructor_val (unshare_expr (val), sym);
248 if (val && is_gimple_min_invariant (val))
249 return val;
250 else
251 return NULL_TREE;
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
256 if (!val
257 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
258 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
259 return build_zero_cst (TREE_TYPE (sym));
262 return NULL_TREE;
267 /* Subroutine of fold_stmt. We perform several simplifications of the
268 memory reference tree EXPR and make sure to re-gimplify them properly
269 after propagation of constant addresses. IS_LHS is true if the
270 reference is supposed to be an lvalue. */
272 static tree
273 maybe_fold_reference (tree expr, bool is_lhs)
275 tree result;
277 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
278 || TREE_CODE (expr) == REALPART_EXPR
279 || TREE_CODE (expr) == IMAGPART_EXPR)
280 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
281 return fold_unary_loc (EXPR_LOCATION (expr),
282 TREE_CODE (expr),
283 TREE_TYPE (expr),
284 TREE_OPERAND (expr, 0));
285 else if (TREE_CODE (expr) == BIT_FIELD_REF
286 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
287 return fold_ternary_loc (EXPR_LOCATION (expr),
288 TREE_CODE (expr),
289 TREE_TYPE (expr),
290 TREE_OPERAND (expr, 0),
291 TREE_OPERAND (expr, 1),
292 TREE_OPERAND (expr, 2));
294 if (!is_lhs
295 && (result = fold_const_aggregate_ref (expr))
296 && is_gimple_min_invariant (result))
297 return result;
299 return NULL_TREE;
303 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
304 replacement rhs for the statement or NULL_TREE if no simplification
305 could be made. It is assumed that the operands have been previously
306 folded. */
308 static tree
309 fold_gimple_assign (gimple_stmt_iterator *si)
311 gimple stmt = gsi_stmt (*si);
312 enum tree_code subcode = gimple_assign_rhs_code (stmt);
313 location_t loc = gimple_location (stmt);
315 tree result = NULL_TREE;
317 switch (get_gimple_rhs_class (subcode))
319 case GIMPLE_SINGLE_RHS:
321 tree rhs = gimple_assign_rhs1 (stmt);
323 if (REFERENCE_CLASS_P (rhs))
324 return maybe_fold_reference (rhs, false);
326 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
328 tree val = OBJ_TYPE_REF_EXPR (rhs);
329 if (is_gimple_min_invariant (val))
330 return val;
331 else if (flag_devirtualize && virtual_method_call_p (rhs))
333 bool final;
334 vec <cgraph_node *>targets
335 = possible_polymorphic_call_targets (rhs, stmt, &final);
336 if (final && targets.length () <= 1 && dbg_cnt (devirt))
338 if (dump_enabled_p ())
340 location_t loc = gimple_location_safe (stmt);
341 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
342 "resolving virtual function address "
343 "reference to function %s\n",
344 targets.length () == 1
345 ? targets[0]->name ()
346 : "NULL");
348 if (targets.length () == 1)
350 val = fold_convert (TREE_TYPE (val),
351 build_fold_addr_expr_loc
352 (loc, targets[0]->decl));
353 STRIP_USELESS_TYPE_CONVERSION (val);
355 else
356 /* We can not use __builtin_unreachable here because it
357 can not have address taken. */
358 val = build_int_cst (TREE_TYPE (val), 0);
359 return val;
364 else if (TREE_CODE (rhs) == ADDR_EXPR)
366 tree ref = TREE_OPERAND (rhs, 0);
367 tree tem = maybe_fold_reference (ref, true);
368 if (tem
369 && TREE_CODE (tem) == MEM_REF
370 && integer_zerop (TREE_OPERAND (tem, 1)))
371 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
372 else if (tem)
373 result = fold_convert (TREE_TYPE (rhs),
374 build_fold_addr_expr_loc (loc, tem));
375 else if (TREE_CODE (ref) == MEM_REF
376 && integer_zerop (TREE_OPERAND (ref, 1)))
377 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
380 else if (TREE_CODE (rhs) == CONSTRUCTOR
381 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
382 && (CONSTRUCTOR_NELTS (rhs)
383 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
385 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
386 unsigned i;
387 tree val;
389 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
390 if (TREE_CODE (val) != INTEGER_CST
391 && TREE_CODE (val) != REAL_CST
392 && TREE_CODE (val) != FIXED_CST)
393 return NULL_TREE;
395 return build_vector_from_ctor (TREE_TYPE (rhs),
396 CONSTRUCTOR_ELTS (rhs));
399 else if (DECL_P (rhs))
400 return get_symbol_constant_value (rhs);
402 /* If we couldn't fold the RHS, hand over to the generic
403 fold routines. */
404 if (result == NULL_TREE)
405 result = fold (rhs);
407 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
408 that may have been added by fold, and "useless" type
409 conversions that might now be apparent due to propagation. */
410 STRIP_USELESS_TYPE_CONVERSION (result);
412 if (result != rhs && valid_gimple_rhs_p (result))
413 return result;
415 return NULL_TREE;
417 break;
419 case GIMPLE_UNARY_RHS:
421 tree rhs = gimple_assign_rhs1 (stmt);
423 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
424 if (result)
426 /* If the operation was a conversion do _not_ mark a
427 resulting constant with TREE_OVERFLOW if the original
428 constant was not. These conversions have implementation
429 defined behavior and retaining the TREE_OVERFLOW flag
430 here would confuse later passes such as VRP. */
431 if (CONVERT_EXPR_CODE_P (subcode)
432 && TREE_CODE (result) == INTEGER_CST
433 && TREE_CODE (rhs) == INTEGER_CST)
434 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
441 break;
443 case GIMPLE_BINARY_RHS:
444 /* Try to canonicalize for boolean-typed X the comparisons
445 X == 0, X == 1, X != 0, and X != 1. */
446 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
447 || gimple_assign_rhs_code (stmt) == NE_EXPR)
449 tree lhs = gimple_assign_lhs (stmt);
450 tree op1 = gimple_assign_rhs1 (stmt);
451 tree op2 = gimple_assign_rhs2 (stmt);
452 tree type = TREE_TYPE (op1);
454 /* Check whether the comparison operands are of the same boolean
455 type as the result type is.
456 Check that second operand is an integer-constant with value
457 one or zero. */
458 if (TREE_CODE (op2) == INTEGER_CST
459 && (integer_zerop (op2) || integer_onep (op2))
460 && useless_type_conversion_p (TREE_TYPE (lhs), type))
462 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
463 bool is_logical_not = false;
465 /* X == 0 and X != 1 is a logical-not.of X
466 X == 1 and X != 0 is X */
467 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
468 || (cmp_code == NE_EXPR && integer_onep (op2)))
469 is_logical_not = true;
471 if (is_logical_not == false)
472 result = op1;
473 /* Only for one-bit precision typed X the transformation
474 !X -> ~X is valied. */
475 else if (TYPE_PRECISION (type) == 1)
476 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
477 type, op1);
478 /* Otherwise we use !X -> X ^ 1. */
479 else
480 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
481 type, op1, build_int_cst (type, 1));
486 if (!result)
487 result = fold_binary_loc (loc, subcode,
488 TREE_TYPE (gimple_assign_lhs (stmt)),
489 gimple_assign_rhs1 (stmt),
490 gimple_assign_rhs2 (stmt));
492 if (result)
494 STRIP_USELESS_TYPE_CONVERSION (result);
495 if (valid_gimple_rhs_p (result))
496 return result;
498 break;
500 case GIMPLE_TERNARY_RHS:
501 /* Try to fold a conditional expression. */
502 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
504 tree op0 = gimple_assign_rhs1 (stmt);
505 tree tem;
506 bool set = false;
507 location_t cond_loc = gimple_location (stmt);
509 if (COMPARISON_CLASS_P (op0))
511 fold_defer_overflow_warnings ();
512 tem = fold_binary_loc (cond_loc,
513 TREE_CODE (op0), TREE_TYPE (op0),
514 TREE_OPERAND (op0, 0),
515 TREE_OPERAND (op0, 1));
516 /* This is actually a conditional expression, not a GIMPLE
517 conditional statement, however, the valid_gimple_rhs_p
518 test still applies. */
519 set = (tem && is_gimple_condexpr (tem)
520 && valid_gimple_rhs_p (tem));
521 fold_undefer_overflow_warnings (set, stmt, 0);
523 else if (is_gimple_min_invariant (op0))
525 tem = op0;
526 set = true;
528 else
529 return NULL_TREE;
531 if (set)
532 result = fold_build3_loc (cond_loc, COND_EXPR,
533 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
534 gimple_assign_rhs2 (stmt),
535 gimple_assign_rhs3 (stmt));
538 if (!result)
539 result = fold_ternary_loc (loc, subcode,
540 TREE_TYPE (gimple_assign_lhs (stmt)),
541 gimple_assign_rhs1 (stmt),
542 gimple_assign_rhs2 (stmt),
543 gimple_assign_rhs3 (stmt));
545 if (result)
547 STRIP_USELESS_TYPE_CONVERSION (result);
548 if (valid_gimple_rhs_p (result))
549 return result;
551 break;
553 case GIMPLE_INVALID_RHS:
554 gcc_unreachable ();
557 return NULL_TREE;
560 /* Attempt to fold a conditional statement. Return true if any changes were
561 made. We only attempt to fold the condition expression, and do not perform
562 any transformation that would require alteration of the cfg. It is
563 assumed that the operands have been previously folded. */
565 static bool
566 fold_gimple_cond (gimple stmt)
568 tree result = fold_binary_loc (gimple_location (stmt),
569 gimple_cond_code (stmt),
570 boolean_type_node,
571 gimple_cond_lhs (stmt),
572 gimple_cond_rhs (stmt));
574 if (result)
576 STRIP_USELESS_TYPE_CONVERSION (result);
577 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
579 gimple_cond_set_condition_from_tree (stmt, result);
580 return true;
584 return false;
588 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
589 adjusting the replacement stmts location and virtual operands.
590 If the statement has a lhs the last stmt in the sequence is expected
591 to assign to that lhs. */
593 static void
594 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
596 gimple stmt = gsi_stmt (*si_p);
598 if (gimple_has_location (stmt))
599 annotate_all_with_location (stmts, gimple_location (stmt));
601 /* First iterate over the replacement statements backward, assigning
602 virtual operands to their defining statements. */
603 gimple laststore = NULL;
604 for (gimple_stmt_iterator i = gsi_last (stmts);
605 !gsi_end_p (i); gsi_prev (&i))
607 gimple new_stmt = gsi_stmt (i);
608 if ((gimple_assign_single_p (new_stmt)
609 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
610 || (is_gimple_call (new_stmt)
611 && (gimple_call_flags (new_stmt)
612 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
614 tree vdef;
615 if (!laststore)
616 vdef = gimple_vdef (stmt);
617 else
618 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
619 gimple_set_vdef (new_stmt, vdef);
620 if (vdef && TREE_CODE (vdef) == SSA_NAME)
621 SSA_NAME_DEF_STMT (vdef) = new_stmt;
622 laststore = new_stmt;
626 /* Second iterate over the statements forward, assigning virtual
627 operands to their uses. */
628 tree reaching_vuse = gimple_vuse (stmt);
629 for (gimple_stmt_iterator i = gsi_start (stmts);
630 !gsi_end_p (i); gsi_next (&i))
632 gimple new_stmt = gsi_stmt (i);
633 /* If the new statement possibly has a VUSE, update it with exact SSA
634 name we know will reach this one. */
635 if (gimple_has_mem_ops (new_stmt))
636 gimple_set_vuse (new_stmt, reaching_vuse);
637 gimple_set_modified (new_stmt, true);
638 if (gimple_vdef (new_stmt))
639 reaching_vuse = gimple_vdef (new_stmt);
642 /* If the new sequence does not do a store release the virtual
643 definition of the original statement. */
644 if (reaching_vuse
645 && reaching_vuse == gimple_vuse (stmt))
647 tree vdef = gimple_vdef (stmt);
648 if (vdef
649 && TREE_CODE (vdef) == SSA_NAME)
651 unlink_stmt_vdef (stmt);
652 release_ssa_name (vdef);
656 /* Finally replace the original statement with the sequence. */
657 gsi_replace_with_seq (si_p, stmts, false);
660 /* Convert EXPR into a GIMPLE value suitable for substitution on the
661 RHS of an assignment. Insert the necessary statements before
662 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
663 is replaced. If the call is expected to produces a result, then it
664 is replaced by an assignment of the new RHS to the result variable.
665 If the result is to be ignored, then the call is replaced by a
666 GIMPLE_NOP. A proper VDEF chain is retained by making the first
667 VUSE and the last VDEF of the whole sequence be the same as the replaced
668 statement and using new SSA names for stores in between. */
670 void
671 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
673 tree lhs;
674 gimple stmt, new_stmt;
675 gimple_stmt_iterator i;
676 gimple_seq stmts = NULL;
678 stmt = gsi_stmt (*si_p);
680 gcc_assert (is_gimple_call (stmt));
682 push_gimplify_context (gimple_in_ssa_p (cfun));
684 lhs = gimple_call_lhs (stmt);
685 if (lhs == NULL_TREE)
687 gimplify_and_add (expr, &stmts);
688 /* We can end up with folding a memcpy of an empty class assignment
689 which gets optimized away by C++ gimplification. */
690 if (gimple_seq_empty_p (stmts))
692 pop_gimplify_context (NULL);
693 if (gimple_in_ssa_p (cfun))
695 unlink_stmt_vdef (stmt);
696 release_defs (stmt);
698 gsi_replace (si_p, gimple_build_nop (), true);
699 return;
702 else
704 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
705 new_stmt = gimple_build_assign (lhs, tmp);
706 i = gsi_last (stmts);
707 gsi_insert_after_without_update (&i, new_stmt,
708 GSI_CONTINUE_LINKING);
711 pop_gimplify_context (NULL);
713 gsi_replace_with_seq_vops (si_p, stmts);
717 /* Replace the call at *GSI with the gimple value VAL. */
719 static void
720 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
722 gimple stmt = gsi_stmt (*gsi);
723 tree lhs = gimple_call_lhs (stmt);
724 gimple repl;
725 if (lhs)
727 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
728 val = fold_convert (TREE_TYPE (lhs), val);
729 repl = gimple_build_assign (lhs, val);
731 else
732 repl = gimple_build_nop ();
733 tree vdef = gimple_vdef (stmt);
734 if (vdef && TREE_CODE (vdef) == SSA_NAME)
736 unlink_stmt_vdef (stmt);
737 release_ssa_name (vdef);
739 gsi_replace (gsi, repl, true);
742 /* Replace the call at *GSI with the new call REPL and fold that
743 again. */
745 static void
746 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
748 gimple stmt = gsi_stmt (*gsi);
749 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
750 gimple_set_location (repl, gimple_location (stmt));
751 if (gimple_vdef (stmt)
752 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
754 gimple_set_vdef (repl, gimple_vdef (stmt));
755 gimple_set_vuse (repl, gimple_vuse (stmt));
756 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
758 gsi_replace (gsi, repl, true);
759 fold_stmt (gsi);
762 /* Return true if VAR is a VAR_DECL or a component thereof. */
764 static bool
765 var_decl_component_p (tree var)
767 tree inner = var;
768 while (handled_component_p (inner))
769 inner = TREE_OPERAND (inner, 0);
770 return SSA_VAR_P (inner);
773 /* Fold function call to builtin mem{{,p}cpy,move}. Return
774 NULL_TREE if no simplification can be made.
775 If ENDP is 0, return DEST (like memcpy).
776 If ENDP is 1, return DEST+LEN (like mempcpy).
777 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
778 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
779 (memmove). */
781 static bool
782 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
783 tree dest, tree src, int endp)
785 gimple stmt = gsi_stmt (*gsi);
786 tree lhs = gimple_call_lhs (stmt);
787 tree len = gimple_call_arg (stmt, 2);
788 tree destvar, srcvar;
789 location_t loc = gimple_location (stmt);
791 /* If the LEN parameter is zero, return DEST. */
792 if (integer_zerop (len))
794 gimple repl;
795 if (gimple_call_lhs (stmt))
796 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
797 else
798 repl = gimple_build_nop ();
799 tree vdef = gimple_vdef (stmt);
800 if (vdef && TREE_CODE (vdef) == SSA_NAME)
802 unlink_stmt_vdef (stmt);
803 release_ssa_name (vdef);
805 gsi_replace (gsi, repl, true);
806 return true;
809 /* If SRC and DEST are the same (and not volatile), return
810 DEST{,+LEN,+LEN-1}. */
811 if (operand_equal_p (src, dest, 0))
813 unlink_stmt_vdef (stmt);
814 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
815 release_ssa_name (gimple_vdef (stmt));
816 if (!lhs)
818 gsi_replace (gsi, gimple_build_nop (), true);
819 return true;
821 goto done;
823 else
825 tree srctype, desttype;
826 unsigned int src_align, dest_align;
827 tree off0;
829 /* Build accesses at offset zero with a ref-all character type. */
830 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
831 ptr_mode, true), 0);
833 /* If we can perform the copy efficiently with first doing all loads
834 and then all stores inline it that way. Currently efficiently
835 means that we can load all the memory into a single integer
836 register which is what MOVE_MAX gives us. */
837 src_align = get_pointer_alignment (src);
838 dest_align = get_pointer_alignment (dest);
839 if (tree_fits_uhwi_p (len)
840 && compare_tree_int (len, MOVE_MAX) <= 0
841 /* ??? Don't transform copies from strings with known length this
842 confuses the tree-ssa-strlen.c. This doesn't handle
843 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
844 reason. */
845 && !c_strlen (src, 2))
847 unsigned ilen = tree_to_uhwi (len);
848 if (exact_log2 (ilen) != -1)
850 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
851 if (type
852 && TYPE_MODE (type) != BLKmode
853 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
854 == ilen * 8)
855 /* If the destination pointer is not aligned we must be able
856 to emit an unaligned store. */
857 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
858 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
860 tree srctype = type;
861 tree desttype = type;
862 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
863 srctype = build_aligned_type (type, src_align);
864 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
865 tree tem = fold_const_aggregate_ref (srcmem);
866 if (tem)
867 srcmem = tem;
868 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
869 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
870 src_align))
871 srcmem = NULL_TREE;
872 if (srcmem)
874 gimple new_stmt;
875 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
877 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
878 if (gimple_in_ssa_p (cfun))
879 srcmem = make_ssa_name (TREE_TYPE (srcmem),
880 new_stmt);
881 else
882 srcmem = create_tmp_reg (TREE_TYPE (srcmem),
883 NULL);
884 gimple_assign_set_lhs (new_stmt, srcmem);
885 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
886 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
888 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
889 desttype = build_aligned_type (type, dest_align);
890 new_stmt
891 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
892 dest, off0),
893 srcmem);
894 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
895 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
896 if (gimple_vdef (new_stmt)
897 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
898 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
899 if (!lhs)
901 gsi_replace (gsi, new_stmt, true);
902 return true;
904 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
905 goto done;
911 if (endp == 3)
913 /* Both DEST and SRC must be pointer types.
914 ??? This is what old code did. Is the testing for pointer types
915 really mandatory?
917 If either SRC is readonly or length is 1, we can use memcpy. */
918 if (!dest_align || !src_align)
919 return false;
920 if (readonly_data_expr (src)
921 || (tree_fits_uhwi_p (len)
922 && (MIN (src_align, dest_align) / BITS_PER_UNIT
923 >= tree_to_uhwi (len))))
925 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
926 if (!fn)
927 return false;
928 gimple_call_set_fndecl (stmt, fn);
929 gimple_call_set_arg (stmt, 0, dest);
930 gimple_call_set_arg (stmt, 1, src);
931 fold_stmt (gsi);
932 return true;
935 /* If *src and *dest can't overlap, optimize into memcpy as well. */
936 if (TREE_CODE (src) == ADDR_EXPR
937 && TREE_CODE (dest) == ADDR_EXPR)
939 tree src_base, dest_base, fn;
940 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
941 HOST_WIDE_INT size = -1;
942 HOST_WIDE_INT maxsize = -1;
944 srcvar = TREE_OPERAND (src, 0);
945 src_base = get_ref_base_and_extent (srcvar, &src_offset,
946 &size, &maxsize);
947 destvar = TREE_OPERAND (dest, 0);
948 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
949 &size, &maxsize);
950 if (tree_fits_uhwi_p (len))
951 maxsize = tree_to_uhwi (len);
952 else
953 maxsize = -1;
954 src_offset /= BITS_PER_UNIT;
955 dest_offset /= BITS_PER_UNIT;
956 if (SSA_VAR_P (src_base)
957 && SSA_VAR_P (dest_base))
959 if (operand_equal_p (src_base, dest_base, 0)
960 && ranges_overlap_p (src_offset, maxsize,
961 dest_offset, maxsize))
962 return false;
964 else if (TREE_CODE (src_base) == MEM_REF
965 && TREE_CODE (dest_base) == MEM_REF)
967 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
968 TREE_OPERAND (dest_base, 0), 0))
969 return false;
970 offset_int off = mem_ref_offset (src_base) + src_offset;
971 if (!wi::fits_shwi_p (off))
972 return false;
973 src_offset = off.to_shwi ();
975 off = mem_ref_offset (dest_base) + dest_offset;
976 if (!wi::fits_shwi_p (off))
977 return false;
978 dest_offset = off.to_shwi ();
979 if (ranges_overlap_p (src_offset, maxsize,
980 dest_offset, maxsize))
981 return false;
983 else
984 return false;
986 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
987 if (!fn)
988 return false;
989 gimple_call_set_fndecl (stmt, fn);
990 gimple_call_set_arg (stmt, 0, dest);
991 gimple_call_set_arg (stmt, 1, src);
992 fold_stmt (gsi);
993 return true;
996 /* If the destination and source do not alias optimize into
997 memcpy as well. */
998 if ((is_gimple_min_invariant (dest)
999 || TREE_CODE (dest) == SSA_NAME)
1000 && (is_gimple_min_invariant (src)
1001 || TREE_CODE (src) == SSA_NAME))
1003 ao_ref destr, srcr;
1004 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1005 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1006 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1008 tree fn;
1009 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1010 if (!fn)
1011 return false;
1012 gimple_call_set_fndecl (stmt, fn);
1013 gimple_call_set_arg (stmt, 0, dest);
1014 gimple_call_set_arg (stmt, 1, src);
1015 fold_stmt (gsi);
1016 return true;
1020 return false;
1023 if (!tree_fits_shwi_p (len))
1024 return false;
1025 /* FIXME:
1026 This logic lose for arguments like (type *)malloc (sizeof (type)),
1027 since we strip the casts of up to VOID return value from malloc.
1028 Perhaps we ought to inherit type from non-VOID argument here? */
1029 STRIP_NOPS (src);
1030 STRIP_NOPS (dest);
1031 if (!POINTER_TYPE_P (TREE_TYPE (src))
1032 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1033 return false;
1034 /* In the following try to find a type that is most natural to be
1035 used for the memcpy source and destination and that allows
1036 the most optimization when memcpy is turned into a plain assignment
1037 using that type. In theory we could always use a char[len] type
1038 but that only gains us that the destination and source possibly
1039 no longer will have their address taken. */
1040 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1041 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1043 tree tem = TREE_OPERAND (src, 0);
1044 STRIP_NOPS (tem);
1045 if (tem != TREE_OPERAND (src, 0))
1046 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1048 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1050 tree tem = TREE_OPERAND (dest, 0);
1051 STRIP_NOPS (tem);
1052 if (tem != TREE_OPERAND (dest, 0))
1053 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1055 srctype = TREE_TYPE (TREE_TYPE (src));
1056 if (TREE_CODE (srctype) == ARRAY_TYPE
1057 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1059 srctype = TREE_TYPE (srctype);
1060 STRIP_NOPS (src);
1061 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1063 desttype = TREE_TYPE (TREE_TYPE (dest));
1064 if (TREE_CODE (desttype) == ARRAY_TYPE
1065 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1067 desttype = TREE_TYPE (desttype);
1068 STRIP_NOPS (dest);
1069 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1071 if (TREE_ADDRESSABLE (srctype)
1072 || TREE_ADDRESSABLE (desttype))
1073 return false;
1075 /* Make sure we are not copying using a floating-point mode or
1076 a type whose size possibly does not match its precision. */
1077 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1078 || TREE_CODE (desttype) == BOOLEAN_TYPE
1079 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1080 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1081 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1082 || TREE_CODE (srctype) == BOOLEAN_TYPE
1083 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1084 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1085 if (!srctype)
1086 srctype = desttype;
1087 if (!desttype)
1088 desttype = srctype;
1089 if (!srctype)
1090 return false;
1092 src_align = get_pointer_alignment (src);
1093 dest_align = get_pointer_alignment (dest);
1094 if (dest_align < TYPE_ALIGN (desttype)
1095 || src_align < TYPE_ALIGN (srctype))
1096 return false;
1098 destvar = dest;
1099 STRIP_NOPS (destvar);
1100 if (TREE_CODE (destvar) == ADDR_EXPR
1101 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1102 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1103 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1104 else
1105 destvar = NULL_TREE;
1107 srcvar = src;
1108 STRIP_NOPS (srcvar);
1109 if (TREE_CODE (srcvar) == ADDR_EXPR
1110 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1111 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1113 if (!destvar
1114 || src_align >= TYPE_ALIGN (desttype))
1115 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1116 srcvar, off0);
1117 else if (!STRICT_ALIGNMENT)
1119 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1120 src_align);
1121 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1123 else
1124 srcvar = NULL_TREE;
1126 else
1127 srcvar = NULL_TREE;
1129 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1130 return false;
1132 if (srcvar == NULL_TREE)
1134 STRIP_NOPS (src);
1135 if (src_align >= TYPE_ALIGN (desttype))
1136 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1137 else
1139 if (STRICT_ALIGNMENT)
1140 return false;
1141 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1142 src_align);
1143 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1146 else if (destvar == NULL_TREE)
1148 STRIP_NOPS (dest);
1149 if (dest_align >= TYPE_ALIGN (srctype))
1150 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1151 else
1153 if (STRICT_ALIGNMENT)
1154 return false;
1155 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1156 dest_align);
1157 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1161 gimple new_stmt;
1162 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1164 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1165 if (gimple_in_ssa_p (cfun))
1166 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1167 else
1168 srcvar = create_tmp_reg (TREE_TYPE (srcvar), NULL);
1169 gimple_assign_set_lhs (new_stmt, srcvar);
1170 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1171 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1173 new_stmt = gimple_build_assign (destvar, srcvar);
1174 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1175 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1176 if (gimple_vdef (new_stmt)
1177 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1178 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1179 if (!lhs)
1181 gsi_replace (gsi, new_stmt, true);
1182 return true;
1184 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1187 done:
1188 if (endp == 0 || endp == 3)
1189 len = NULL_TREE;
1190 else if (endp == 2)
1191 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1192 ssize_int (1));
1193 if (endp == 2 || endp == 1)
1194 dest = fold_build_pointer_plus_loc (loc, dest, len);
1196 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1197 GSI_SAME_STMT);
1198 gimple repl = gimple_build_assign (lhs, dest);
1199 gsi_replace (gsi, repl, true);
1200 return true;
1203 /* Fold function call to builtin memset or bzero at *GSI setting the
1204 memory of size LEN to VAL. Return whether a simplification was made. */
1206 static bool
1207 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1209 gimple stmt = gsi_stmt (*gsi);
1210 tree etype;
1211 unsigned HOST_WIDE_INT length, cval;
1213 /* If the LEN parameter is zero, return DEST. */
1214 if (integer_zerop (len))
1216 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1217 return true;
1220 if (! tree_fits_uhwi_p (len))
1221 return false;
1223 if (TREE_CODE (c) != INTEGER_CST)
1224 return false;
1226 tree dest = gimple_call_arg (stmt, 0);
1227 tree var = dest;
1228 if (TREE_CODE (var) != ADDR_EXPR)
1229 return false;
1231 var = TREE_OPERAND (var, 0);
1232 if (TREE_THIS_VOLATILE (var))
1233 return false;
1235 etype = TREE_TYPE (var);
1236 if (TREE_CODE (etype) == ARRAY_TYPE)
1237 etype = TREE_TYPE (etype);
1239 if (!INTEGRAL_TYPE_P (etype)
1240 && !POINTER_TYPE_P (etype))
1241 return NULL_TREE;
1243 if (! var_decl_component_p (var))
1244 return NULL_TREE;
1246 length = tree_to_uhwi (len);
1247 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1248 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1249 return NULL_TREE;
1251 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1252 return NULL_TREE;
1254 if (integer_zerop (c))
1255 cval = 0;
1256 else
1258 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1259 return NULL_TREE;
1261 cval = TREE_INT_CST_LOW (c);
1262 cval &= 0xff;
1263 cval |= cval << 8;
1264 cval |= cval << 16;
1265 cval |= (cval << 31) << 1;
1268 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1269 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1270 gimple_set_vuse (store, gimple_vuse (stmt));
1271 tree vdef = gimple_vdef (stmt);
1272 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1274 gimple_set_vdef (store, gimple_vdef (stmt));
1275 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1277 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1278 if (gimple_call_lhs (stmt))
1280 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1281 gsi_replace (gsi, asgn, true);
1283 else
1285 gimple_stmt_iterator gsi2 = *gsi;
1286 gsi_prev (gsi);
1287 gsi_remove (&gsi2, true);
1290 return true;
1294 /* Return the string length, maximum string length or maximum value of
1295 ARG in LENGTH.
1296 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1297 is not NULL and, for TYPE == 0, its value is not equal to the length
1298 we determine or if we are unable to determine the length or value,
1299 return false. VISITED is a bitmap of visited variables.
1300 TYPE is 0 if string length should be returned, 1 for maximum string
1301 length and 2 for maximum value ARG can have. */
1303 static bool
1304 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1306 tree var, val;
1307 gimple def_stmt;
1309 if (TREE_CODE (arg) != SSA_NAME)
1311 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1312 if (TREE_CODE (arg) == ADDR_EXPR
1313 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1314 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1316 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1317 if (TREE_CODE (aop0) == INDIRECT_REF
1318 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1319 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1320 length, visited, type);
1323 if (type == 2)
1325 val = arg;
1326 if (TREE_CODE (val) != INTEGER_CST
1327 || tree_int_cst_sgn (val) < 0)
1328 return false;
1330 else
1331 val = c_strlen (arg, 1);
1332 if (!val)
1333 return false;
1335 if (*length)
1337 if (type > 0)
1339 if (TREE_CODE (*length) != INTEGER_CST
1340 || TREE_CODE (val) != INTEGER_CST)
1341 return false;
1343 if (tree_int_cst_lt (*length, val))
1344 *length = val;
1345 return true;
1347 else if (simple_cst_equal (val, *length) != 1)
1348 return false;
1351 *length = val;
1352 return true;
1355 /* If ARG is registered for SSA update we cannot look at its defining
1356 statement. */
1357 if (name_registered_for_update_p (arg))
1358 return false;
1360 /* If we were already here, break the infinite cycle. */
1361 if (!*visited)
1362 *visited = BITMAP_ALLOC (NULL);
1363 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1364 return true;
1366 var = arg;
1367 def_stmt = SSA_NAME_DEF_STMT (var);
1369 switch (gimple_code (def_stmt))
1371 case GIMPLE_ASSIGN:
1372 /* The RHS of the statement defining VAR must either have a
1373 constant length or come from another SSA_NAME with a constant
1374 length. */
1375 if (gimple_assign_single_p (def_stmt)
1376 || gimple_assign_unary_nop_p (def_stmt))
1378 tree rhs = gimple_assign_rhs1 (def_stmt);
1379 return get_maxval_strlen (rhs, length, visited, type);
1381 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1383 tree op2 = gimple_assign_rhs2 (def_stmt);
1384 tree op3 = gimple_assign_rhs3 (def_stmt);
1385 return get_maxval_strlen (op2, length, visited, type)
1386 && get_maxval_strlen (op3, length, visited, type);
1388 return false;
1390 case GIMPLE_PHI:
1392 /* All the arguments of the PHI node must have the same constant
1393 length. */
1394 unsigned i;
1396 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1398 tree arg = gimple_phi_arg (def_stmt, i)->def;
1400 /* If this PHI has itself as an argument, we cannot
1401 determine the string length of this argument. However,
1402 if we can find a constant string length for the other
1403 PHI args then we can still be sure that this is a
1404 constant string length. So be optimistic and just
1405 continue with the next argument. */
1406 if (arg == gimple_phi_result (def_stmt))
1407 continue;
1409 if (!get_maxval_strlen (arg, length, visited, type))
1410 return false;
1413 return true;
1415 default:
1416 return false;
1420 tree
1421 get_maxval_strlen (tree arg, int type)
1423 bitmap visited = NULL;
1424 tree len = NULL_TREE;
1425 if (!get_maxval_strlen (arg, &len, &visited, type))
1426 len = NULL_TREE;
1427 if (visited)
1428 BITMAP_FREE (visited);
1430 return len;
1434 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1435 If LEN is not NULL, it represents the length of the string to be
1436 copied. Return NULL_TREE if no simplification can be made. */
1438 static bool
1439 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1440 tree dest, tree src)
1442 location_t loc = gimple_location (gsi_stmt (*gsi));
1443 tree fn;
1445 /* If SRC and DEST are the same (and not volatile), return DEST. */
1446 if (operand_equal_p (src, dest, 0))
1448 replace_call_with_value (gsi, dest);
1449 return true;
1452 if (optimize_function_for_size_p (cfun))
1453 return false;
1455 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1456 if (!fn)
1457 return false;
1459 tree len = get_maxval_strlen (src, 0);
1460 if (!len)
1461 return false;
1463 len = fold_convert_loc (loc, size_type_node, len);
1464 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1465 len = force_gimple_operand_gsi (gsi, len, true,
1466 NULL_TREE, true, GSI_SAME_STMT);
1467 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1468 replace_call_with_call_and_fold (gsi, repl);
1469 return true;
1472 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1473 If SLEN is not NULL, it represents the length of the source string.
1474 Return NULL_TREE if no simplification can be made. */
1476 static bool
1477 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1478 tree dest, tree src, tree len)
1480 location_t loc = gimple_location (gsi_stmt (*gsi));
1481 tree fn;
1483 /* If the LEN parameter is zero, return DEST. */
1484 if (integer_zerop (len))
1486 replace_call_with_value (gsi, dest);
1487 return true;
1490 /* We can't compare slen with len as constants below if len is not a
1491 constant. */
1492 if (TREE_CODE (len) != INTEGER_CST)
1493 return false;
1495 /* Now, we must be passed a constant src ptr parameter. */
1496 tree slen = get_maxval_strlen (src, 0);
1497 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1498 return false;
1500 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1502 /* We do not support simplification of this case, though we do
1503 support it when expanding trees into RTL. */
1504 /* FIXME: generate a call to __builtin_memset. */
1505 if (tree_int_cst_lt (slen, len))
1506 return false;
1508 /* OK transform into builtin memcpy. */
1509 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1510 if (!fn)
1511 return false;
1513 len = fold_convert_loc (loc, size_type_node, len);
1514 len = force_gimple_operand_gsi (gsi, len, true,
1515 NULL_TREE, true, GSI_SAME_STMT);
1516 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1517 replace_call_with_call_and_fold (gsi, repl);
1518 return true;
1521 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1522 to the call.
1524 Return NULL_TREE if no simplification was possible, otherwise return the
1525 simplified form of the call as a tree.
1527 The simplified form may be a constant or other expression which
1528 computes the same value, but in a more efficient manner (including
1529 calls to other builtin functions).
1531 The call may contain arguments which need to be evaluated, but
1532 which are not useful to determine the result of the call. In
1533 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1534 COMPOUND_EXPR will be an argument which must be evaluated.
1535 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1536 COMPOUND_EXPR in the chain will contain the tree for the simplified
1537 form of the builtin function call. */
1539 static bool
1540 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1542 gimple stmt = gsi_stmt (*gsi);
1543 location_t loc = gimple_location (stmt);
1545 const char *p = c_getstr (src);
1547 /* If the string length is zero, return the dst parameter. */
1548 if (p && *p == '\0')
1550 replace_call_with_value (gsi, dst);
1551 return true;
1554 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1555 return false;
1557 /* See if we can store by pieces into (dst + strlen(dst)). */
1558 tree newdst;
1559 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1560 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1562 if (!strlen_fn || !memcpy_fn)
1563 return false;
1565 /* If the length of the source string isn't computable don't
1566 split strcat into strlen and memcpy. */
1567 tree len = get_maxval_strlen (src, 0);
1568 if (! len)
1569 return false;
1571 /* Create strlen (dst). */
1572 gimple_seq stmts = NULL, stmts2;
1573 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1574 gimple_set_location (repl, loc);
1575 if (gimple_in_ssa_p (cfun))
1576 newdst = make_ssa_name (size_type_node, NULL);
1577 else
1578 newdst = create_tmp_reg (size_type_node, NULL);
1579 gimple_call_set_lhs (repl, newdst);
1580 gimple_seq_add_stmt_without_update (&stmts, repl);
1582 /* Create (dst p+ strlen (dst)). */
1583 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1584 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1585 gimple_seq_add_seq_without_update (&stmts, stmts2);
1587 len = fold_convert_loc (loc, size_type_node, len);
1588 len = size_binop_loc (loc, PLUS_EXPR, len,
1589 build_int_cst (size_type_node, 1));
1590 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1591 gimple_seq_add_seq_without_update (&stmts, stmts2);
1593 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1594 gimple_seq_add_stmt_without_update (&stmts, repl);
1595 if (gimple_call_lhs (stmt))
1597 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1598 gimple_seq_add_stmt_without_update (&stmts, repl);
1599 gsi_replace_with_seq_vops (gsi, stmts);
1600 /* gsi now points at the assignment to the lhs, get a
1601 stmt iterator to the memcpy call.
1602 ??? We can't use gsi_for_stmt as that doesn't work when the
1603 CFG isn't built yet. */
1604 gimple_stmt_iterator gsi2 = *gsi;
1605 gsi_prev (&gsi2);
1606 fold_stmt (&gsi2);
1608 else
1610 gsi_replace_with_seq_vops (gsi, stmts);
1611 fold_stmt (gsi);
1613 return true;
1616 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1617 are the arguments to the call. */
1619 static bool
1620 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1622 gimple stmt = gsi_stmt (*gsi);
1623 tree dest = gimple_call_arg (stmt, 0);
1624 tree src = gimple_call_arg (stmt, 1);
1625 tree size = gimple_call_arg (stmt, 2);
1626 tree fn;
1627 const char *p;
1630 p = c_getstr (src);
1631 /* If the SRC parameter is "", return DEST. */
1632 if (p && *p == '\0')
1634 replace_call_with_value (gsi, dest);
1635 return true;
1638 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1639 return false;
1641 /* If __builtin_strcat_chk is used, assume strcat is available. */
1642 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1643 if (!fn)
1644 return false;
1646 gimple repl = gimple_build_call (fn, 2, dest, src);
1647 replace_call_with_call_and_fold (gsi, repl);
1648 return true;
1651 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1652 LEN, and SIZE. */
1654 static bool
1655 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1657 gimple stmt = gsi_stmt (*gsi);
1658 tree dest = gimple_call_arg (stmt, 0);
1659 tree src = gimple_call_arg (stmt, 1);
1660 tree len = gimple_call_arg (stmt, 2);
1661 tree size = gimple_call_arg (stmt, 3);
1662 tree fn;
1663 const char *p;
1665 p = c_getstr (src);
1666 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1667 if ((p && *p == '\0')
1668 || integer_zerop (len))
1670 replace_call_with_value (gsi, dest);
1671 return true;
1674 if (! tree_fits_uhwi_p (size))
1675 return false;
1677 if (! integer_all_onesp (size))
1679 tree src_len = c_strlen (src, 1);
1680 if (src_len
1681 && tree_fits_uhwi_p (src_len)
1682 && tree_fits_uhwi_p (len)
1683 && ! tree_int_cst_lt (len, src_len))
1685 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1686 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1687 if (!fn)
1688 return false;
1690 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1691 replace_call_with_call_and_fold (gsi, repl);
1692 return true;
1694 return false;
1697 /* If __builtin_strncat_chk is used, assume strncat is available. */
1698 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1699 if (!fn)
1700 return false;
1702 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1703 replace_call_with_call_and_fold (gsi, repl);
1704 return true;
1707 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1708 to the call. IGNORE is true if the value returned
1709 by the builtin will be ignored. UNLOCKED is true is true if this
1710 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1711 the known length of the string. Return NULL_TREE if no simplification
1712 was possible. */
1714 static bool
1715 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1716 tree arg0, tree arg1,
1717 bool unlocked)
1719 gimple stmt = gsi_stmt (*gsi);
1721 /* If we're using an unlocked function, assume the other unlocked
1722 functions exist explicitly. */
1723 tree const fn_fputc = (unlocked
1724 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1725 : builtin_decl_implicit (BUILT_IN_FPUTC));
1726 tree const fn_fwrite = (unlocked
1727 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1728 : builtin_decl_implicit (BUILT_IN_FWRITE));
1730 /* If the return value is used, don't do the transformation. */
1731 if (gimple_call_lhs (stmt))
1732 return false;
1734 /* Get the length of the string passed to fputs. If the length
1735 can't be determined, punt. */
1736 tree len = get_maxval_strlen (arg0, 0);
1737 if (!len
1738 || TREE_CODE (len) != INTEGER_CST)
1739 return false;
1741 switch (compare_tree_int (len, 1))
1743 case -1: /* length is 0, delete the call entirely . */
1744 replace_call_with_value (gsi, integer_zero_node);
1745 return true;
1747 case 0: /* length is 1, call fputc. */
1749 const char *p = c_getstr (arg0);
1750 if (p != NULL)
1752 if (!fn_fputc)
1753 return false;
1755 gimple repl = gimple_build_call (fn_fputc, 2,
1756 build_int_cst
1757 (integer_type_node, p[0]), arg1);
1758 replace_call_with_call_and_fold (gsi, repl);
1759 return true;
1762 /* FALLTHROUGH */
1763 case 1: /* length is greater than 1, call fwrite. */
1765 /* If optimizing for size keep fputs. */
1766 if (optimize_function_for_size_p (cfun))
1767 return false;
1768 /* New argument list transforming fputs(string, stream) to
1769 fwrite(string, 1, len, stream). */
1770 if (!fn_fwrite)
1771 return false;
1773 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1774 size_one_node, len, arg1);
1775 replace_call_with_call_and_fold (gsi, repl);
1776 return true;
1778 default:
1779 gcc_unreachable ();
1781 return false;
1784 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1785 DEST, SRC, LEN, and SIZE are the arguments to the call.
1786 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1787 code of the builtin. If MAXLEN is not NULL, it is maximum length
1788 passed as third argument. */
1790 static bool
1791 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1792 tree dest, tree src, tree len, tree size,
1793 enum built_in_function fcode)
1795 gimple stmt = gsi_stmt (*gsi);
1796 location_t loc = gimple_location (stmt);
1797 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1798 tree fn;
1800 /* If SRC and DEST are the same (and not volatile), return DEST
1801 (resp. DEST+LEN for __mempcpy_chk). */
1802 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1804 if (fcode != BUILT_IN_MEMPCPY_CHK)
1806 replace_call_with_value (gsi, dest);
1807 return true;
1809 else
1811 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1812 temp = force_gimple_operand_gsi (gsi, temp,
1813 false, NULL_TREE, true,
1814 GSI_SAME_STMT);
1815 replace_call_with_value (gsi, temp);
1816 return true;
1820 if (! tree_fits_uhwi_p (size))
1821 return false;
1823 tree maxlen = get_maxval_strlen (len, 2);
1824 if (! integer_all_onesp (size))
1826 if (! tree_fits_uhwi_p (len))
1828 /* If LEN is not constant, try MAXLEN too.
1829 For MAXLEN only allow optimizing into non-_ocs function
1830 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1831 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1833 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1835 /* (void) __mempcpy_chk () can be optimized into
1836 (void) __memcpy_chk (). */
1837 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1838 if (!fn)
1839 return false;
1841 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1842 replace_call_with_call_and_fold (gsi, repl);
1843 return true;
1845 return false;
1848 else
1849 maxlen = len;
1851 if (tree_int_cst_lt (size, maxlen))
1852 return false;
1855 fn = NULL_TREE;
1856 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1857 mem{cpy,pcpy,move,set} is available. */
1858 switch (fcode)
1860 case BUILT_IN_MEMCPY_CHK:
1861 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1862 break;
1863 case BUILT_IN_MEMPCPY_CHK:
1864 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1865 break;
1866 case BUILT_IN_MEMMOVE_CHK:
1867 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1868 break;
1869 case BUILT_IN_MEMSET_CHK:
1870 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1871 break;
1872 default:
1873 break;
1876 if (!fn)
1877 return false;
1879 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1880 replace_call_with_call_and_fold (gsi, repl);
1881 return true;
1884 /* Fold a call to the __st[rp]cpy_chk builtin.
1885 DEST, SRC, and SIZE are the arguments to the call.
1886 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1887 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1888 strings passed as second argument. */
1890 static bool
1891 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1892 tree dest,
1893 tree src, tree size,
1894 enum built_in_function fcode)
1896 gimple stmt = gsi_stmt (*gsi);
1897 location_t loc = gimple_location (stmt);
1898 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1899 tree len, fn;
1901 /* If SRC and DEST are the same (and not volatile), return DEST. */
1902 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1904 replace_call_with_value (gsi, dest);
1905 return true;
1908 if (! tree_fits_uhwi_p (size))
1909 return false;
1911 tree maxlen = get_maxval_strlen (src, 1);
1912 if (! integer_all_onesp (size))
1914 len = c_strlen (src, 1);
1915 if (! len || ! tree_fits_uhwi_p (len))
1917 /* If LEN is not constant, try MAXLEN too.
1918 For MAXLEN only allow optimizing into non-_ocs function
1919 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1920 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1922 if (fcode == BUILT_IN_STPCPY_CHK)
1924 if (! ignore)
1925 return false;
1927 /* If return value of __stpcpy_chk is ignored,
1928 optimize into __strcpy_chk. */
1929 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1930 if (!fn)
1931 return false;
1933 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1934 replace_call_with_call_and_fold (gsi, repl);
1935 return true;
1938 if (! len || TREE_SIDE_EFFECTS (len))
1939 return false;
1941 /* If c_strlen returned something, but not a constant,
1942 transform __strcpy_chk into __memcpy_chk. */
1943 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1944 if (!fn)
1945 return false;
1947 len = fold_convert_loc (loc, size_type_node, len);
1948 len = size_binop_loc (loc, PLUS_EXPR, len,
1949 build_int_cst (size_type_node, 1));
1950 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1951 true, GSI_SAME_STMT);
1952 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1953 replace_call_with_call_and_fold (gsi, repl);
1954 return true;
1957 else
1958 maxlen = len;
1960 if (! tree_int_cst_lt (maxlen, size))
1961 return false;
1964 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1965 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1966 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1967 if (!fn)
1968 return false;
1970 gimple repl = gimple_build_call (fn, 2, dest, src);
1971 replace_call_with_call_and_fold (gsi, repl);
1972 return true;
1975 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1976 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1977 length passed as third argument. IGNORE is true if return value can be
1978 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1980 static bool
1981 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1982 tree dest, tree src,
1983 tree len, tree size,
1984 enum built_in_function fcode)
1986 gimple stmt = gsi_stmt (*gsi);
1987 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1988 tree fn;
1990 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1992 /* If return value of __stpncpy_chk is ignored,
1993 optimize into __strncpy_chk. */
1994 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1995 if (fn)
1997 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1998 replace_call_with_call_and_fold (gsi, repl);
1999 return true;
2003 if (! tree_fits_uhwi_p (size))
2004 return false;
2006 tree maxlen = get_maxval_strlen (len, 2);
2007 if (! integer_all_onesp (size))
2009 if (! tree_fits_uhwi_p (len))
2011 /* If LEN is not constant, try MAXLEN too.
2012 For MAXLEN only allow optimizing into non-_ocs function
2013 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2014 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2015 return false;
2017 else
2018 maxlen = len;
2020 if (tree_int_cst_lt (size, maxlen))
2021 return false;
2024 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2025 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2026 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2027 if (!fn)
2028 return false;
2030 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2031 replace_call_with_call_and_fold (gsi, repl);
2032 return true;
2035 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2036 NULL_TREE if a normal call should be emitted rather than expanding
2037 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2038 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2039 passed as second argument. */
2041 static bool
2042 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2043 enum built_in_function fcode)
2045 gimple stmt = gsi_stmt (*gsi);
2046 tree dest, size, len, fn, fmt, flag;
2047 const char *fmt_str;
2049 /* Verify the required arguments in the original call. */
2050 if (gimple_call_num_args (stmt) < 5)
2051 return false;
2053 dest = gimple_call_arg (stmt, 0);
2054 len = gimple_call_arg (stmt, 1);
2055 flag = gimple_call_arg (stmt, 2);
2056 size = gimple_call_arg (stmt, 3);
2057 fmt = gimple_call_arg (stmt, 4);
2059 if (! tree_fits_uhwi_p (size))
2060 return false;
2062 if (! integer_all_onesp (size))
2064 tree maxlen = get_maxval_strlen (len, 2);
2065 if (! tree_fits_uhwi_p (len))
2067 /* If LEN is not constant, try MAXLEN too.
2068 For MAXLEN only allow optimizing into non-_ocs function
2069 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2070 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2071 return false;
2073 else
2074 maxlen = len;
2076 if (tree_int_cst_lt (size, maxlen))
2077 return false;
2080 if (!init_target_chars ())
2081 return false;
2083 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2084 or if format doesn't contain % chars or is "%s". */
2085 if (! integer_zerop (flag))
2087 fmt_str = c_getstr (fmt);
2088 if (fmt_str == NULL)
2089 return false;
2090 if (strchr (fmt_str, target_percent) != NULL
2091 && strcmp (fmt_str, target_percent_s))
2092 return false;
2095 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2096 available. */
2097 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2098 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2099 if (!fn)
2100 return false;
2102 /* Replace the called function and the first 5 argument by 3 retaining
2103 trailing varargs. */
2104 gimple_call_set_fndecl (stmt, fn);
2105 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2106 gimple_call_set_arg (stmt, 0, dest);
2107 gimple_call_set_arg (stmt, 1, len);
2108 gimple_call_set_arg (stmt, 2, fmt);
2109 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2110 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2111 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2112 fold_stmt (gsi);
2113 return true;
2116 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2117 Return NULL_TREE if a normal call should be emitted rather than
2118 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2119 or BUILT_IN_VSPRINTF_CHK. */
2121 static bool
2122 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2123 enum built_in_function fcode)
2125 gimple stmt = gsi_stmt (*gsi);
2126 tree dest, size, len, fn, fmt, flag;
2127 const char *fmt_str;
2128 unsigned nargs = gimple_call_num_args (stmt);
2130 /* Verify the required arguments in the original call. */
2131 if (nargs < 4)
2132 return false;
2133 dest = gimple_call_arg (stmt, 0);
2134 flag = gimple_call_arg (stmt, 1);
2135 size = gimple_call_arg (stmt, 2);
2136 fmt = gimple_call_arg (stmt, 3);
2138 if (! tree_fits_uhwi_p (size))
2139 return false;
2141 len = NULL_TREE;
2143 if (!init_target_chars ())
2144 return false;
2146 /* Check whether the format is a literal string constant. */
2147 fmt_str = c_getstr (fmt);
2148 if (fmt_str != NULL)
2150 /* If the format doesn't contain % args or %%, we know the size. */
2151 if (strchr (fmt_str, target_percent) == 0)
2153 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2154 len = build_int_cstu (size_type_node, strlen (fmt_str));
2156 /* If the format is "%s" and first ... argument is a string literal,
2157 we know the size too. */
2158 else if (fcode == BUILT_IN_SPRINTF_CHK
2159 && strcmp (fmt_str, target_percent_s) == 0)
2161 tree arg;
2163 if (nargs == 5)
2165 arg = gimple_call_arg (stmt, 4);
2166 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2168 len = c_strlen (arg, 1);
2169 if (! len || ! tree_fits_uhwi_p (len))
2170 len = NULL_TREE;
2176 if (! integer_all_onesp (size))
2178 if (! len || ! tree_int_cst_lt (len, size))
2179 return false;
2182 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2183 or if format doesn't contain % chars or is "%s". */
2184 if (! integer_zerop (flag))
2186 if (fmt_str == NULL)
2187 return false;
2188 if (strchr (fmt_str, target_percent) != NULL
2189 && strcmp (fmt_str, target_percent_s))
2190 return false;
2193 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2194 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2195 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2196 if (!fn)
2197 return false;
2199 /* Replace the called function and the first 4 argument by 2 retaining
2200 trailing varargs. */
2201 gimple_call_set_fndecl (stmt, fn);
2202 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2203 gimple_call_set_arg (stmt, 0, dest);
2204 gimple_call_set_arg (stmt, 1, fmt);
2205 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2206 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2207 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2208 fold_stmt (gsi);
2209 return true;
2212 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2213 ORIG may be null if this is a 2-argument call. We don't attempt to
2214 simplify calls with more than 3 arguments.
2216 Return NULL_TREE if no simplification was possible, otherwise return the
2217 simplified form of the call as a tree. If IGNORED is true, it means that
2218 the caller does not use the returned value of the function. */
2220 static bool
2221 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2223 gimple stmt = gsi_stmt (*gsi);
2224 tree dest = gimple_call_arg (stmt, 0);
2225 tree fmt = gimple_call_arg (stmt, 1);
2226 tree orig = NULL_TREE;
2227 const char *fmt_str = NULL;
2229 /* Verify the required arguments in the original call. We deal with two
2230 types of sprintf() calls: 'sprintf (str, fmt)' and
2231 'sprintf (dest, "%s", orig)'. */
2232 if (gimple_call_num_args (stmt) > 3)
2233 return false;
2235 if (gimple_call_num_args (stmt) == 3)
2236 orig = gimple_call_arg (stmt, 2);
2238 /* Check whether the format is a literal string constant. */
2239 fmt_str = c_getstr (fmt);
2240 if (fmt_str == NULL)
2241 return false;
2243 if (!init_target_chars ())
2244 return false;
2246 /* If the format doesn't contain % args or %%, use strcpy. */
2247 if (strchr (fmt_str, target_percent) == NULL)
2249 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2251 if (!fn)
2252 return false;
2254 /* Don't optimize sprintf (buf, "abc", ptr++). */
2255 if (orig)
2256 return false;
2258 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2259 'format' is known to contain no % formats. */
2260 gimple_seq stmts = NULL;
2261 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2262 gimple_seq_add_stmt_without_update (&stmts, repl);
2263 if (gimple_call_lhs (stmt))
2265 repl = gimple_build_assign (gimple_call_lhs (stmt),
2266 build_int_cst (integer_type_node,
2267 strlen (fmt_str)));
2268 gimple_seq_add_stmt_without_update (&stmts, repl);
2269 gsi_replace_with_seq_vops (gsi, stmts);
2270 /* gsi now points at the assignment to the lhs, get a
2271 stmt iterator to the memcpy call.
2272 ??? We can't use gsi_for_stmt as that doesn't work when the
2273 CFG isn't built yet. */
2274 gimple_stmt_iterator gsi2 = *gsi;
2275 gsi_prev (&gsi2);
2276 fold_stmt (&gsi2);
2278 else
2280 gsi_replace_with_seq_vops (gsi, stmts);
2281 fold_stmt (gsi);
2283 return true;
2286 /* If the format is "%s", use strcpy if the result isn't used. */
2287 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2289 tree fn;
2290 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2292 if (!fn)
2293 return false;
2295 /* Don't crash on sprintf (str1, "%s"). */
2296 if (!orig)
2297 return false;
2299 tree orig_len = NULL_TREE;
2300 if (gimple_call_lhs (stmt))
2302 orig_len = get_maxval_strlen (orig, 0);
2303 if (!orig_len)
2304 return false;
2307 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2308 gimple_seq stmts = NULL;
2309 gimple repl = gimple_build_call (fn, 2, dest, orig);
2310 gimple_seq_add_stmt_without_update (&stmts, repl);
2311 if (gimple_call_lhs (stmt))
2313 if (!useless_type_conversion_p (integer_type_node,
2314 TREE_TYPE (orig_len)))
2315 orig_len = fold_convert (integer_type_node, orig_len);
2316 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2317 gimple_seq_add_stmt_without_update (&stmts, repl);
2318 gsi_replace_with_seq_vops (gsi, stmts);
2319 /* gsi now points at the assignment to the lhs, get a
2320 stmt iterator to the memcpy call.
2321 ??? We can't use gsi_for_stmt as that doesn't work when the
2322 CFG isn't built yet. */
2323 gimple_stmt_iterator gsi2 = *gsi;
2324 gsi_prev (&gsi2);
2325 fold_stmt (&gsi2);
2327 else
2329 gsi_replace_with_seq_vops (gsi, stmts);
2330 fold_stmt (gsi);
2332 return true;
2334 return false;
2337 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2338 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2339 attempt to simplify calls with more than 4 arguments.
2341 Return NULL_TREE if no simplification was possible, otherwise return the
2342 simplified form of the call as a tree. If IGNORED is true, it means that
2343 the caller does not use the returned value of the function. */
2345 static bool
2346 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2348 gimple stmt = gsi_stmt (*gsi);
2349 tree dest = gimple_call_arg (stmt, 0);
2350 tree destsize = gimple_call_arg (stmt, 1);
2351 tree fmt = gimple_call_arg (stmt, 2);
2352 tree orig = NULL_TREE;
2353 const char *fmt_str = NULL;
2355 if (gimple_call_num_args (stmt) > 4)
2356 return false;
2358 if (gimple_call_num_args (stmt) == 4)
2359 orig = gimple_call_arg (stmt, 3);
2361 if (!tree_fits_uhwi_p (destsize))
2362 return false;
2363 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2365 /* Check whether the format is a literal string constant. */
2366 fmt_str = c_getstr (fmt);
2367 if (fmt_str == NULL)
2368 return false;
2370 if (!init_target_chars ())
2371 return false;
2373 /* If the format doesn't contain % args or %%, use strcpy. */
2374 if (strchr (fmt_str, target_percent) == NULL)
2376 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2377 if (!fn)
2378 return false;
2380 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2381 if (orig)
2382 return false;
2384 /* We could expand this as
2385 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2386 or to
2387 memcpy (str, fmt_with_nul_at_cstm1, cst);
2388 but in the former case that might increase code size
2389 and in the latter case grow .rodata section too much.
2390 So punt for now. */
2391 size_t len = strlen (fmt_str);
2392 if (len >= destlen)
2393 return false;
2395 gimple_seq stmts = NULL;
2396 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2397 gimple_seq_add_stmt_without_update (&stmts, repl);
2398 if (gimple_call_lhs (stmt))
2400 repl = gimple_build_assign (gimple_call_lhs (stmt),
2401 build_int_cst (integer_type_node, len));
2402 gimple_seq_add_stmt_without_update (&stmts, repl);
2403 gsi_replace_with_seq_vops (gsi, stmts);
2404 /* gsi now points at the assignment to the lhs, get a
2405 stmt iterator to the memcpy call.
2406 ??? We can't use gsi_for_stmt as that doesn't work when the
2407 CFG isn't built yet. */
2408 gimple_stmt_iterator gsi2 = *gsi;
2409 gsi_prev (&gsi2);
2410 fold_stmt (&gsi2);
2412 else
2414 gsi_replace_with_seq_vops (gsi, stmts);
2415 fold_stmt (gsi);
2417 return true;
2420 /* If the format is "%s", use strcpy if the result isn't used. */
2421 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2423 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2424 if (!fn)
2425 return false;
2427 /* Don't crash on snprintf (str1, cst, "%s"). */
2428 if (!orig)
2429 return false;
2431 tree orig_len = get_maxval_strlen (orig, 0);
2432 if (!orig_len)
2433 return false;
2435 /* We could expand this as
2436 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2437 or to
2438 memcpy (str1, str2_with_nul_at_cstm1, cst);
2439 but in the former case that might increase code size
2440 and in the latter case grow .rodata section too much.
2441 So punt for now. */
2442 if (compare_tree_int (orig_len, destlen) >= 0)
2443 return false;
2445 /* Convert snprintf (str1, cst, "%s", str2) into
2446 strcpy (str1, str2) if strlen (str2) < cst. */
2447 gimple_seq stmts = NULL;
2448 gimple repl = gimple_build_call (fn, 2, dest, orig);
2449 gimple_seq_add_stmt_without_update (&stmts, repl);
2450 if (gimple_call_lhs (stmt))
2452 if (!useless_type_conversion_p (integer_type_node,
2453 TREE_TYPE (orig_len)))
2454 orig_len = fold_convert (integer_type_node, orig_len);
2455 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2456 gimple_seq_add_stmt_without_update (&stmts, repl);
2457 gsi_replace_with_seq_vops (gsi, stmts);
2458 /* gsi now points at the assignment to the lhs, get a
2459 stmt iterator to the memcpy call.
2460 ??? We can't use gsi_for_stmt as that doesn't work when the
2461 CFG isn't built yet. */
2462 gimple_stmt_iterator gsi2 = *gsi;
2463 gsi_prev (&gsi2);
2464 fold_stmt (&gsi2);
2466 else
2468 gsi_replace_with_seq_vops (gsi, stmts);
2469 fold_stmt (gsi);
2471 return true;
2473 return false;
2477 /* Fold a call to __builtin_strlen with known length LEN. */
2479 static bool
2480 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2482 gimple stmt = gsi_stmt (*gsi);
2483 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2484 if (!len)
2485 return false;
2486 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2487 replace_call_with_value (gsi, len);
2488 return true;
2492 /* Fold the non-target builtin at *GSI and return whether any simplification
2493 was made. */
2495 static bool
2496 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2498 gimple stmt = gsi_stmt (*gsi);
2499 tree callee = gimple_call_fndecl (stmt);
2501 /* Give up for always_inline inline builtins until they are
2502 inlined. */
2503 if (avoid_folding_inline_builtin (callee))
2504 return false;
2506 switch (DECL_FUNCTION_CODE (callee))
2508 case BUILT_IN_BZERO:
2509 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2510 gimple_call_arg (stmt, 1));
2511 case BUILT_IN_MEMSET:
2512 return gimple_fold_builtin_memset (gsi,
2513 gimple_call_arg (stmt, 1),
2514 gimple_call_arg (stmt, 2));
2515 case BUILT_IN_BCOPY:
2516 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2517 gimple_call_arg (stmt, 0), 3);
2518 case BUILT_IN_MEMCPY:
2519 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2520 gimple_call_arg (stmt, 1), 0);
2521 case BUILT_IN_MEMPCPY:
2522 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2523 gimple_call_arg (stmt, 1), 1);
2524 case BUILT_IN_MEMMOVE:
2525 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2526 gimple_call_arg (stmt, 1), 3);
2527 case BUILT_IN_SPRINTF_CHK:
2528 case BUILT_IN_VSPRINTF_CHK:
2529 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2530 case BUILT_IN_STRCAT_CHK:
2531 return gimple_fold_builtin_strcat_chk (gsi);
2532 case BUILT_IN_STRNCAT_CHK:
2533 return gimple_fold_builtin_strncat_chk (gsi);
2534 case BUILT_IN_STRLEN:
2535 return gimple_fold_builtin_strlen (gsi);
2536 case BUILT_IN_STRCPY:
2537 return gimple_fold_builtin_strcpy (gsi,
2538 gimple_call_arg (stmt, 0),
2539 gimple_call_arg (stmt, 1));
2540 case BUILT_IN_STRNCPY:
2541 return gimple_fold_builtin_strncpy (gsi,
2542 gimple_call_arg (stmt, 0),
2543 gimple_call_arg (stmt, 1),
2544 gimple_call_arg (stmt, 2));
2545 case BUILT_IN_STRCAT:
2546 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2547 gimple_call_arg (stmt, 1));
2548 case BUILT_IN_FPUTS:
2549 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2550 gimple_call_arg (stmt, 1), false);
2551 case BUILT_IN_FPUTS_UNLOCKED:
2552 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2553 gimple_call_arg (stmt, 1), true);
2554 case BUILT_IN_MEMCPY_CHK:
2555 case BUILT_IN_MEMPCPY_CHK:
2556 case BUILT_IN_MEMMOVE_CHK:
2557 case BUILT_IN_MEMSET_CHK:
2558 return gimple_fold_builtin_memory_chk (gsi,
2559 gimple_call_arg (stmt, 0),
2560 gimple_call_arg (stmt, 1),
2561 gimple_call_arg (stmt, 2),
2562 gimple_call_arg (stmt, 3),
2563 DECL_FUNCTION_CODE (callee));
2564 case BUILT_IN_STRCPY_CHK:
2565 case BUILT_IN_STPCPY_CHK:
2566 return gimple_fold_builtin_stxcpy_chk (gsi,
2567 gimple_call_arg (stmt, 0),
2568 gimple_call_arg (stmt, 1),
2569 gimple_call_arg (stmt, 2),
2570 DECL_FUNCTION_CODE (callee));
2571 case BUILT_IN_STRNCPY_CHK:
2572 case BUILT_IN_STPNCPY_CHK:
2573 return gimple_fold_builtin_stxncpy_chk (gsi,
2574 gimple_call_arg (stmt, 0),
2575 gimple_call_arg (stmt, 1),
2576 gimple_call_arg (stmt, 2),
2577 gimple_call_arg (stmt, 3),
2578 DECL_FUNCTION_CODE (callee));
2579 case BUILT_IN_SNPRINTF_CHK:
2580 case BUILT_IN_VSNPRINTF_CHK:
2581 return gimple_fold_builtin_snprintf_chk (gsi,
2582 DECL_FUNCTION_CODE (callee));
2583 case BUILT_IN_SNPRINTF:
2584 return gimple_fold_builtin_snprintf (gsi);
2585 case BUILT_IN_SPRINTF:
2586 return gimple_fold_builtin_sprintf (gsi);
2587 default:;
2590 /* Try the generic builtin folder. */
2591 bool ignore = (gimple_call_lhs (stmt) == NULL);
2592 tree result = fold_call_stmt (stmt, ignore);
2593 if (result)
2595 if (ignore)
2596 STRIP_NOPS (result);
2597 else
2598 result = fold_convert (gimple_call_return_type (stmt), result);
2599 if (!update_call_from_tree (gsi, result))
2600 gimplify_and_update_call_from_tree (gsi, result);
2601 return true;
2604 return false;
2607 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2608 The statement may be replaced by another statement, e.g., if the call
2609 simplifies to a constant value. Return true if any changes were made.
2610 It is assumed that the operands have been previously folded. */
2612 static bool
2613 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2615 gimple stmt = gsi_stmt (*gsi);
2616 tree callee;
2617 bool changed = false;
2618 unsigned i;
2620 /* Fold *& in call arguments. */
2621 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2622 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2624 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2625 if (tmp)
2627 gimple_call_set_arg (stmt, i, tmp);
2628 changed = true;
2632 /* Check for virtual calls that became direct calls. */
2633 callee = gimple_call_fn (stmt);
2634 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2636 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2638 if (dump_file && virtual_method_call_p (callee)
2639 && !possible_polymorphic_call_target_p
2640 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2641 (OBJ_TYPE_REF_EXPR (callee)))))
2643 fprintf (dump_file,
2644 "Type inheritance inconsistent devirtualization of ");
2645 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2646 fprintf (dump_file, " to ");
2647 print_generic_expr (dump_file, callee, TDF_SLIM);
2648 fprintf (dump_file, "\n");
2651 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
2652 changed = true;
2654 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
2656 bool final;
2657 vec <cgraph_node *>targets
2658 = possible_polymorphic_call_targets (callee, stmt, &final);
2659 if (final && targets.length () <= 1 && dbg_cnt (devirt))
2661 tree lhs = gimple_call_lhs (stmt);
2662 if (dump_enabled_p ())
2664 location_t loc = gimple_location_safe (stmt);
2665 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2666 "folding virtual function call to %s\n",
2667 targets.length () == 1
2668 ? targets[0]->name ()
2669 : "__builtin_unreachable");
2671 if (targets.length () == 1)
2673 gimple_call_set_fndecl (stmt, targets[0]->decl);
2674 changed = true;
2675 /* If the call becomes noreturn, remove the lhs. */
2676 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2678 if (TREE_CODE (lhs) == SSA_NAME)
2680 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2681 tree def = get_or_create_ssa_default_def (cfun, var);
2682 gimple new_stmt = gimple_build_assign (lhs, def);
2683 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2685 gimple_call_set_lhs (stmt, NULL_TREE);
2688 else
2690 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2691 gimple new_stmt = gimple_build_call (fndecl, 0);
2692 gimple_set_location (new_stmt, gimple_location (stmt));
2693 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2695 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2696 tree def = get_or_create_ssa_default_def (cfun, var);
2698 /* To satisfy condition for
2699 cgraph_update_edges_for_call_stmt_node,
2700 we need to preserve GIMPLE_CALL statement
2701 at position of GSI iterator. */
2702 update_call_from_tree (gsi, def);
2703 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
2705 else
2707 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2708 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2709 gsi_replace (gsi, new_stmt, false);
2711 return true;
2717 if (inplace)
2718 return changed;
2720 /* Check for builtins that CCP can handle using information not
2721 available in the generic fold routines. */
2722 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2724 if (gimple_fold_builtin (gsi))
2725 changed = true;
2727 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
2729 changed |= targetm.gimple_fold_builtin (gsi);
2731 else if (gimple_call_internal_p (stmt))
2733 enum tree_code subcode = ERROR_MARK;
2734 tree result = NULL_TREE;
2735 switch (gimple_call_internal_fn (stmt))
2737 case IFN_BUILTIN_EXPECT:
2738 result = fold_builtin_expect (gimple_location (stmt),
2739 gimple_call_arg (stmt, 0),
2740 gimple_call_arg (stmt, 1),
2741 gimple_call_arg (stmt, 2));
2742 break;
2743 case IFN_UBSAN_OBJECT_SIZE:
2744 if (integer_all_onesp (gimple_call_arg (stmt, 2))
2745 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
2746 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
2747 && tree_int_cst_le (gimple_call_arg (stmt, 1),
2748 gimple_call_arg (stmt, 2))))
2750 gsi_replace (gsi, gimple_build_nop (), true);
2751 unlink_stmt_vdef (stmt);
2752 release_defs (stmt);
2753 return true;
2755 break;
2756 case IFN_UBSAN_CHECK_ADD:
2757 subcode = PLUS_EXPR;
2758 break;
2759 case IFN_UBSAN_CHECK_SUB:
2760 subcode = MINUS_EXPR;
2761 break;
2762 case IFN_UBSAN_CHECK_MUL:
2763 subcode = MULT_EXPR;
2764 break;
2765 default:
2766 break;
2768 if (subcode != ERROR_MARK)
2770 tree arg0 = gimple_call_arg (stmt, 0);
2771 tree arg1 = gimple_call_arg (stmt, 1);
2772 /* x = y + 0; x = y - 0; x = y * 0; */
2773 if (integer_zerop (arg1))
2774 result = subcode == MULT_EXPR
2775 ? build_zero_cst (TREE_TYPE (arg0))
2776 : arg0;
2777 /* x = 0 + y; x = 0 * y; */
2778 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
2779 result = subcode == MULT_EXPR
2780 ? build_zero_cst (TREE_TYPE (arg0))
2781 : arg1;
2782 /* x = y - y; */
2783 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
2784 result = build_zero_cst (TREE_TYPE (arg0));
2785 /* x = y * 1; x = 1 * y; */
2786 else if (subcode == MULT_EXPR)
2788 if (integer_onep (arg1))
2789 result = arg0;
2790 else if (integer_onep (arg0))
2791 result = arg1;
2794 if (result)
2796 if (!update_call_from_tree (gsi, result))
2797 gimplify_and_update_call_from_tree (gsi, result);
2798 changed = true;
2802 return changed;
2806 /* Worker for fold_stmt_1 dispatch to pattern based folding with
2807 gimple_simplify.
2809 Replaces *GSI with the simplification result in RCODE and OPS
2810 and the associated statements in *SEQ. Does the replacement
2811 according to INPLACE and returns true if the operation succeeded. */
2813 static bool
2814 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
2815 code_helper rcode, tree *ops,
2816 gimple_seq *seq, bool inplace)
2818 gimple stmt = gsi_stmt (*gsi);
2820 /* Play safe and do not allow abnormals to be mentioned in
2821 newly created statements. See also maybe_push_res_to_seq. */
2822 if ((TREE_CODE (ops[0]) == SSA_NAME
2823 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
2824 || (ops[1]
2825 && TREE_CODE (ops[1]) == SSA_NAME
2826 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
2827 || (ops[2]
2828 && TREE_CODE (ops[2]) == SSA_NAME
2829 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
2830 return false;
2832 if (gimple_code (stmt) == GIMPLE_COND)
2834 gcc_assert (rcode.is_tree_code ());
2835 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
2836 /* GIMPLE_CONDs condition may not throw. */
2837 && (!flag_exceptions
2838 || !cfun->can_throw_non_call_exceptions
2839 || !operation_could_trap_p (rcode,
2840 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
2841 false, NULL_TREE)))
2842 gimple_cond_set_condition (stmt, rcode, ops[0], ops[1]);
2843 else if (rcode == SSA_NAME)
2844 gimple_cond_set_condition (stmt, NE_EXPR, ops[0],
2845 build_zero_cst (TREE_TYPE (ops[0])));
2846 else if (rcode == INTEGER_CST)
2848 if (integer_zerop (ops[0]))
2849 gimple_cond_make_false (stmt);
2850 else
2851 gimple_cond_make_true (stmt);
2853 else if (!inplace)
2855 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
2856 ops, seq);
2857 if (!res)
2858 return false;
2859 gimple_cond_set_condition (stmt, NE_EXPR, res,
2860 build_zero_cst (TREE_TYPE (res)));
2862 else
2863 return false;
2864 if (dump_file && (dump_flags & TDF_DETAILS))
2866 fprintf (dump_file, "gimple_simplified to ");
2867 if (!gimple_seq_empty_p (*seq))
2868 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2869 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2870 0, TDF_SLIM);
2872 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2873 return true;
2875 else if (is_gimple_assign (stmt)
2876 && rcode.is_tree_code ())
2878 if (!inplace
2879 || gimple_num_ops (stmt) <= get_gimple_rhs_num_ops (rcode))
2881 maybe_build_generic_op (rcode,
2882 TREE_TYPE (gimple_assign_lhs (stmt)),
2883 &ops[0], ops[1], ops[2]);
2884 gimple_assign_set_rhs_with_ops_1 (gsi, rcode,
2885 ops[0], ops[1], ops[2]);
2886 if (dump_file && (dump_flags & TDF_DETAILS))
2888 fprintf (dump_file, "gimple_simplified to ");
2889 if (!gimple_seq_empty_p (*seq))
2890 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2891 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2892 0, TDF_SLIM);
2894 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2895 return true;
2898 else if (!inplace)
2900 if (gimple_has_lhs (stmt))
2902 tree lhs = gimple_get_lhs (stmt);
2903 maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
2904 ops, seq, lhs);
2905 if (dump_file && (dump_flags & TDF_DETAILS))
2907 fprintf (dump_file, "gimple_simplified to ");
2908 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2910 gsi_replace_with_seq_vops (gsi, *seq);
2911 return true;
2913 else
2914 gcc_unreachable ();
2917 return false;
2920 /* Canonicalize MEM_REFs invariant address operand after propagation. */
2922 static bool
2923 maybe_canonicalize_mem_ref_addr (tree *t)
2925 bool res = false;
2927 if (TREE_CODE (*t) == ADDR_EXPR)
2928 t = &TREE_OPERAND (*t, 0);
2930 while (handled_component_p (*t))
2931 t = &TREE_OPERAND (*t, 0);
2933 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
2934 of invariant addresses into a SSA name MEM_REF address. */
2935 if (TREE_CODE (*t) == MEM_REF
2936 || TREE_CODE (*t) == TARGET_MEM_REF)
2938 tree addr = TREE_OPERAND (*t, 0);
2939 if (TREE_CODE (addr) == ADDR_EXPR
2940 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
2941 || handled_component_p (TREE_OPERAND (addr, 0))))
2943 tree base;
2944 HOST_WIDE_INT coffset;
2945 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
2946 &coffset);
2947 if (!base)
2948 gcc_unreachable ();
2950 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
2951 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
2952 TREE_OPERAND (*t, 1),
2953 size_int (coffset));
2954 res = true;
2956 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
2957 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
2960 /* Canonicalize back MEM_REFs to plain reference trees if the object
2961 accessed is a decl that has the same access semantics as the MEM_REF. */
2962 if (TREE_CODE (*t) == MEM_REF
2963 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
2964 && integer_zerop (TREE_OPERAND (*t, 1)))
2966 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2967 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
2968 if (/* Same volatile qualification. */
2969 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
2970 /* Same TBAA behavior with -fstrict-aliasing. */
2971 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
2972 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
2973 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
2974 /* Same alignment. */
2975 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
2976 /* We have to look out here to not drop a required conversion
2977 from the rhs to the lhs if *t appears on the lhs or vice-versa
2978 if it appears on the rhs. Thus require strict type
2979 compatibility. */
2980 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
2982 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2983 res = true;
2987 /* Canonicalize TARGET_MEM_REF in particular with respect to
2988 the indexes becoming constant. */
2989 else if (TREE_CODE (*t) == TARGET_MEM_REF)
2991 tree tem = maybe_fold_tmr (*t);
2992 if (tem)
2994 *t = tem;
2995 res = true;
2999 return res;
3002 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3003 distinguishes both cases. */
3005 static bool
3006 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3008 bool changed = false;
3009 gimple stmt = gsi_stmt (*gsi);
3010 unsigned i;
3012 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3013 after propagation.
3014 ??? This shouldn't be done in generic folding but in the
3015 propagation helpers which also know whether an address was
3016 propagated. */
3017 switch (gimple_code (stmt))
3019 case GIMPLE_ASSIGN:
3020 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3022 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3023 if ((REFERENCE_CLASS_P (*rhs)
3024 || TREE_CODE (*rhs) == ADDR_EXPR)
3025 && maybe_canonicalize_mem_ref_addr (rhs))
3026 changed = true;
3027 tree *lhs = gimple_assign_lhs_ptr (stmt);
3028 if (REFERENCE_CLASS_P (*lhs)
3029 && maybe_canonicalize_mem_ref_addr (lhs))
3030 changed = true;
3032 break;
3033 case GIMPLE_CALL:
3035 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3037 tree *arg = gimple_call_arg_ptr (stmt, i);
3038 if (REFERENCE_CLASS_P (*arg)
3039 && maybe_canonicalize_mem_ref_addr (arg))
3040 changed = true;
3042 tree *lhs = gimple_call_lhs_ptr (stmt);
3043 if (*lhs
3044 && REFERENCE_CLASS_P (*lhs)
3045 && maybe_canonicalize_mem_ref_addr (lhs))
3046 changed = true;
3047 break;
3049 case GIMPLE_ASM:
3051 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3053 tree link = gimple_asm_output_op (stmt, i);
3054 tree op = TREE_VALUE (link);
3055 if (REFERENCE_CLASS_P (op)
3056 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3057 changed = true;
3059 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3061 tree link = gimple_asm_input_op (stmt, i);
3062 tree op = TREE_VALUE (link);
3063 if ((REFERENCE_CLASS_P (op)
3064 || TREE_CODE (op) == ADDR_EXPR)
3065 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3066 changed = true;
3069 break;
3070 case GIMPLE_DEBUG:
3071 if (gimple_debug_bind_p (stmt))
3073 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3074 if (*val
3075 && (REFERENCE_CLASS_P (*val)
3076 || TREE_CODE (*val) == ADDR_EXPR)
3077 && maybe_canonicalize_mem_ref_addr (val))
3078 changed = true;
3080 break;
3081 default:;
3084 /* Dispatch to pattern-based folding. */
3085 if (!inplace
3086 || is_gimple_assign (stmt)
3087 || gimple_code (stmt) == GIMPLE_COND)
3089 gimple_seq seq = NULL;
3090 code_helper rcode;
3091 tree ops[3] = {};
3092 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3094 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3095 changed = true;
3096 else
3097 gimple_seq_discard (seq);
3101 stmt = gsi_stmt (*gsi);
3103 /* Fold the main computation performed by the statement. */
3104 switch (gimple_code (stmt))
3106 case GIMPLE_ASSIGN:
3108 unsigned old_num_ops = gimple_num_ops (stmt);
3109 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3110 tree lhs = gimple_assign_lhs (stmt);
3111 tree new_rhs;
3112 /* First canonicalize operand order. This avoids building new
3113 trees if this is the only thing fold would later do. */
3114 if ((commutative_tree_code (subcode)
3115 || commutative_ternary_tree_code (subcode))
3116 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3117 gimple_assign_rhs2 (stmt), false))
3119 tree tem = gimple_assign_rhs1 (stmt);
3120 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3121 gimple_assign_set_rhs2 (stmt, tem);
3122 changed = true;
3124 new_rhs = fold_gimple_assign (gsi);
3125 if (new_rhs
3126 && !useless_type_conversion_p (TREE_TYPE (lhs),
3127 TREE_TYPE (new_rhs)))
3128 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3129 if (new_rhs
3130 && (!inplace
3131 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3133 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3134 changed = true;
3136 break;
3139 case GIMPLE_COND:
3140 changed |= fold_gimple_cond (stmt);
3141 break;
3143 case GIMPLE_CALL:
3144 changed |= gimple_fold_call (gsi, inplace);
3145 break;
3147 case GIMPLE_ASM:
3148 /* Fold *& in asm operands. */
3150 size_t noutputs;
3151 const char **oconstraints;
3152 const char *constraint;
3153 bool allows_mem, allows_reg;
3155 noutputs = gimple_asm_noutputs (stmt);
3156 oconstraints = XALLOCAVEC (const char *, noutputs);
3158 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3160 tree link = gimple_asm_output_op (stmt, i);
3161 tree op = TREE_VALUE (link);
3162 oconstraints[i]
3163 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3164 if (REFERENCE_CLASS_P (op)
3165 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3167 TREE_VALUE (link) = op;
3168 changed = true;
3171 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3173 tree link = gimple_asm_input_op (stmt, i);
3174 tree op = TREE_VALUE (link);
3175 constraint
3176 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3177 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3178 oconstraints, &allows_mem, &allows_reg);
3179 if (REFERENCE_CLASS_P (op)
3180 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3181 != NULL_TREE)
3183 TREE_VALUE (link) = op;
3184 changed = true;
3188 break;
3190 case GIMPLE_DEBUG:
3191 if (gimple_debug_bind_p (stmt))
3193 tree val = gimple_debug_bind_get_value (stmt);
3194 if (val
3195 && REFERENCE_CLASS_P (val))
3197 tree tem = maybe_fold_reference (val, false);
3198 if (tem)
3200 gimple_debug_bind_set_value (stmt, tem);
3201 changed = true;
3204 else if (val
3205 && TREE_CODE (val) == ADDR_EXPR)
3207 tree ref = TREE_OPERAND (val, 0);
3208 tree tem = maybe_fold_reference (ref, false);
3209 if (tem)
3211 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3212 gimple_debug_bind_set_value (stmt, tem);
3213 changed = true;
3217 break;
3219 default:;
3222 stmt = gsi_stmt (*gsi);
3224 /* Fold *& on the lhs. */
3225 if (gimple_has_lhs (stmt))
3227 tree lhs = gimple_get_lhs (stmt);
3228 if (lhs && REFERENCE_CLASS_P (lhs))
3230 tree new_lhs = maybe_fold_reference (lhs, true);
3231 if (new_lhs)
3233 gimple_set_lhs (stmt, new_lhs);
3234 changed = true;
3239 return changed;
3242 /* Valueziation callback that ends up not following SSA edges. */
3244 tree
3245 no_follow_ssa_edges (tree)
3247 return NULL_TREE;
3250 /* Valueization callback that ends up following single-use SSA edges only. */
3252 tree
3253 follow_single_use_edges (tree val)
3255 if (TREE_CODE (val) == SSA_NAME
3256 && !has_single_use (val))
3257 return NULL_TREE;
3258 return val;
3261 /* Fold the statement pointed to by GSI. In some cases, this function may
3262 replace the whole statement with a new one. Returns true iff folding
3263 makes any changes.
3264 The statement pointed to by GSI should be in valid gimple form but may
3265 be in unfolded state as resulting from for example constant propagation
3266 which can produce *&x = 0. */
3268 bool
3269 fold_stmt (gimple_stmt_iterator *gsi)
3271 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3274 bool
3275 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3277 return fold_stmt_1 (gsi, false, valueize);
3280 /* Perform the minimal folding on statement *GSI. Only operations like
3281 *&x created by constant propagation are handled. The statement cannot
3282 be replaced with a new one. Return true if the statement was
3283 changed, false otherwise.
3284 The statement *GSI should be in valid gimple form but may
3285 be in unfolded state as resulting from for example constant propagation
3286 which can produce *&x = 0. */
3288 bool
3289 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3291 gimple stmt = gsi_stmt (*gsi);
3292 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3293 gcc_assert (gsi_stmt (*gsi) == stmt);
3294 return changed;
3297 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3298 if EXPR is null or we don't know how.
3299 If non-null, the result always has boolean type. */
3301 static tree
3302 canonicalize_bool (tree expr, bool invert)
3304 if (!expr)
3305 return NULL_TREE;
3306 else if (invert)
3308 if (integer_nonzerop (expr))
3309 return boolean_false_node;
3310 else if (integer_zerop (expr))
3311 return boolean_true_node;
3312 else if (TREE_CODE (expr) == SSA_NAME)
3313 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3314 build_int_cst (TREE_TYPE (expr), 0));
3315 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3316 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3317 boolean_type_node,
3318 TREE_OPERAND (expr, 0),
3319 TREE_OPERAND (expr, 1));
3320 else
3321 return NULL_TREE;
3323 else
3325 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3326 return expr;
3327 if (integer_nonzerop (expr))
3328 return boolean_true_node;
3329 else if (integer_zerop (expr))
3330 return boolean_false_node;
3331 else if (TREE_CODE (expr) == SSA_NAME)
3332 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3333 build_int_cst (TREE_TYPE (expr), 0));
3334 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3335 return fold_build2 (TREE_CODE (expr),
3336 boolean_type_node,
3337 TREE_OPERAND (expr, 0),
3338 TREE_OPERAND (expr, 1));
3339 else
3340 return NULL_TREE;
3344 /* Check to see if a boolean expression EXPR is logically equivalent to the
3345 comparison (OP1 CODE OP2). Check for various identities involving
3346 SSA_NAMEs. */
3348 static bool
3349 same_bool_comparison_p (const_tree expr, enum tree_code code,
3350 const_tree op1, const_tree op2)
3352 gimple s;
3354 /* The obvious case. */
3355 if (TREE_CODE (expr) == code
3356 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3357 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3358 return true;
3360 /* Check for comparing (name, name != 0) and the case where expr
3361 is an SSA_NAME with a definition matching the comparison. */
3362 if (TREE_CODE (expr) == SSA_NAME
3363 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3365 if (operand_equal_p (expr, op1, 0))
3366 return ((code == NE_EXPR && integer_zerop (op2))
3367 || (code == EQ_EXPR && integer_nonzerop (op2)));
3368 s = SSA_NAME_DEF_STMT (expr);
3369 if (is_gimple_assign (s)
3370 && gimple_assign_rhs_code (s) == code
3371 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3372 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3373 return true;
3376 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3377 of name is a comparison, recurse. */
3378 if (TREE_CODE (op1) == SSA_NAME
3379 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3381 s = SSA_NAME_DEF_STMT (op1);
3382 if (is_gimple_assign (s)
3383 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3385 enum tree_code c = gimple_assign_rhs_code (s);
3386 if ((c == NE_EXPR && integer_zerop (op2))
3387 || (c == EQ_EXPR && integer_nonzerop (op2)))
3388 return same_bool_comparison_p (expr, c,
3389 gimple_assign_rhs1 (s),
3390 gimple_assign_rhs2 (s));
3391 if ((c == EQ_EXPR && integer_zerop (op2))
3392 || (c == NE_EXPR && integer_nonzerop (op2)))
3393 return same_bool_comparison_p (expr,
3394 invert_tree_comparison (c, false),
3395 gimple_assign_rhs1 (s),
3396 gimple_assign_rhs2 (s));
3399 return false;
3402 /* Check to see if two boolean expressions OP1 and OP2 are logically
3403 equivalent. */
3405 static bool
3406 same_bool_result_p (const_tree op1, const_tree op2)
3408 /* Simple cases first. */
3409 if (operand_equal_p (op1, op2, 0))
3410 return true;
3412 /* Check the cases where at least one of the operands is a comparison.
3413 These are a bit smarter than operand_equal_p in that they apply some
3414 identifies on SSA_NAMEs. */
3415 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3416 && same_bool_comparison_p (op1, TREE_CODE (op2),
3417 TREE_OPERAND (op2, 0),
3418 TREE_OPERAND (op2, 1)))
3419 return true;
3420 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3421 && same_bool_comparison_p (op2, TREE_CODE (op1),
3422 TREE_OPERAND (op1, 0),
3423 TREE_OPERAND (op1, 1)))
3424 return true;
3426 /* Default case. */
3427 return false;
3430 /* Forward declarations for some mutually recursive functions. */
3432 static tree
3433 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3434 enum tree_code code2, tree op2a, tree op2b);
3435 static tree
3436 and_var_with_comparison (tree var, bool invert,
3437 enum tree_code code2, tree op2a, tree op2b);
3438 static tree
3439 and_var_with_comparison_1 (gimple stmt,
3440 enum tree_code code2, tree op2a, tree op2b);
3441 static tree
3442 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3443 enum tree_code code2, tree op2a, tree op2b);
3444 static tree
3445 or_var_with_comparison (tree var, bool invert,
3446 enum tree_code code2, tree op2a, tree op2b);
3447 static tree
3448 or_var_with_comparison_1 (gimple stmt,
3449 enum tree_code code2, tree op2a, tree op2b);
3451 /* Helper function for and_comparisons_1: try to simplify the AND of the
3452 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3453 If INVERT is true, invert the value of the VAR before doing the AND.
3454 Return NULL_EXPR if we can't simplify this to a single expression. */
3456 static tree
3457 and_var_with_comparison (tree var, bool invert,
3458 enum tree_code code2, tree op2a, tree op2b)
3460 tree t;
3461 gimple stmt = SSA_NAME_DEF_STMT (var);
3463 /* We can only deal with variables whose definitions are assignments. */
3464 if (!is_gimple_assign (stmt))
3465 return NULL_TREE;
3467 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3468 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3469 Then we only have to consider the simpler non-inverted cases. */
3470 if (invert)
3471 t = or_var_with_comparison_1 (stmt,
3472 invert_tree_comparison (code2, false),
3473 op2a, op2b);
3474 else
3475 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3476 return canonicalize_bool (t, invert);
3479 /* Try to simplify the AND of the ssa variable defined by the assignment
3480 STMT with the comparison specified by (OP2A CODE2 OP2B).
3481 Return NULL_EXPR if we can't simplify this to a single expression. */
3483 static tree
3484 and_var_with_comparison_1 (gimple stmt,
3485 enum tree_code code2, tree op2a, tree op2b)
3487 tree var = gimple_assign_lhs (stmt);
3488 tree true_test_var = NULL_TREE;
3489 tree false_test_var = NULL_TREE;
3490 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3492 /* Check for identities like (var AND (var == 0)) => false. */
3493 if (TREE_CODE (op2a) == SSA_NAME
3494 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3496 if ((code2 == NE_EXPR && integer_zerop (op2b))
3497 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3499 true_test_var = op2a;
3500 if (var == true_test_var)
3501 return var;
3503 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3504 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3506 false_test_var = op2a;
3507 if (var == false_test_var)
3508 return boolean_false_node;
3512 /* If the definition is a comparison, recurse on it. */
3513 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3515 tree t = and_comparisons_1 (innercode,
3516 gimple_assign_rhs1 (stmt),
3517 gimple_assign_rhs2 (stmt),
3518 code2,
3519 op2a,
3520 op2b);
3521 if (t)
3522 return t;
3525 /* If the definition is an AND or OR expression, we may be able to
3526 simplify by reassociating. */
3527 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3528 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3530 tree inner1 = gimple_assign_rhs1 (stmt);
3531 tree inner2 = gimple_assign_rhs2 (stmt);
3532 gimple s;
3533 tree t;
3534 tree partial = NULL_TREE;
3535 bool is_and = (innercode == BIT_AND_EXPR);
3537 /* Check for boolean identities that don't require recursive examination
3538 of inner1/inner2:
3539 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3540 inner1 AND (inner1 OR inner2) => inner1
3541 !inner1 AND (inner1 AND inner2) => false
3542 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3543 Likewise for similar cases involving inner2. */
3544 if (inner1 == true_test_var)
3545 return (is_and ? var : inner1);
3546 else if (inner2 == true_test_var)
3547 return (is_and ? var : inner2);
3548 else if (inner1 == false_test_var)
3549 return (is_and
3550 ? boolean_false_node
3551 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
3552 else if (inner2 == false_test_var)
3553 return (is_and
3554 ? boolean_false_node
3555 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
3557 /* Next, redistribute/reassociate the AND across the inner tests.
3558 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3559 if (TREE_CODE (inner1) == SSA_NAME
3560 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3561 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3562 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3563 gimple_assign_rhs1 (s),
3564 gimple_assign_rhs2 (s),
3565 code2, op2a, op2b)))
3567 /* Handle the AND case, where we are reassociating:
3568 (inner1 AND inner2) AND (op2a code2 op2b)
3569 => (t AND inner2)
3570 If the partial result t is a constant, we win. Otherwise
3571 continue on to try reassociating with the other inner test. */
3572 if (is_and)
3574 if (integer_onep (t))
3575 return inner2;
3576 else if (integer_zerop (t))
3577 return boolean_false_node;
3580 /* Handle the OR case, where we are redistributing:
3581 (inner1 OR inner2) AND (op2a code2 op2b)
3582 => (t OR (inner2 AND (op2a code2 op2b))) */
3583 else if (integer_onep (t))
3584 return boolean_true_node;
3586 /* Save partial result for later. */
3587 partial = t;
3590 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3591 if (TREE_CODE (inner2) == SSA_NAME
3592 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3593 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3594 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3595 gimple_assign_rhs1 (s),
3596 gimple_assign_rhs2 (s),
3597 code2, op2a, op2b)))
3599 /* Handle the AND case, where we are reassociating:
3600 (inner1 AND inner2) AND (op2a code2 op2b)
3601 => (inner1 AND t) */
3602 if (is_and)
3604 if (integer_onep (t))
3605 return inner1;
3606 else if (integer_zerop (t))
3607 return boolean_false_node;
3608 /* If both are the same, we can apply the identity
3609 (x AND x) == x. */
3610 else if (partial && same_bool_result_p (t, partial))
3611 return t;
3614 /* Handle the OR case. where we are redistributing:
3615 (inner1 OR inner2) AND (op2a code2 op2b)
3616 => (t OR (inner1 AND (op2a code2 op2b)))
3617 => (t OR partial) */
3618 else
3620 if (integer_onep (t))
3621 return boolean_true_node;
3622 else if (partial)
3624 /* We already got a simplification for the other
3625 operand to the redistributed OR expression. The
3626 interesting case is when at least one is false.
3627 Or, if both are the same, we can apply the identity
3628 (x OR x) == x. */
3629 if (integer_zerop (partial))
3630 return t;
3631 else if (integer_zerop (t))
3632 return partial;
3633 else if (same_bool_result_p (t, partial))
3634 return t;
3639 return NULL_TREE;
3642 /* Try to simplify the AND of two comparisons defined by
3643 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3644 If this can be done without constructing an intermediate value,
3645 return the resulting tree; otherwise NULL_TREE is returned.
3646 This function is deliberately asymmetric as it recurses on SSA_DEFs
3647 in the first comparison but not the second. */
3649 static tree
3650 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3651 enum tree_code code2, tree op2a, tree op2b)
3653 tree truth_type = truth_type_for (TREE_TYPE (op1a));
3655 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3656 if (operand_equal_p (op1a, op2a, 0)
3657 && operand_equal_p (op1b, op2b, 0))
3659 /* Result will be either NULL_TREE, or a combined comparison. */
3660 tree t = combine_comparisons (UNKNOWN_LOCATION,
3661 TRUTH_ANDIF_EXPR, code1, code2,
3662 truth_type, op1a, op1b);
3663 if (t)
3664 return t;
3667 /* Likewise the swapped case of the above. */
3668 if (operand_equal_p (op1a, op2b, 0)
3669 && operand_equal_p (op1b, op2a, 0))
3671 /* Result will be either NULL_TREE, or a combined comparison. */
3672 tree t = combine_comparisons (UNKNOWN_LOCATION,
3673 TRUTH_ANDIF_EXPR, code1,
3674 swap_tree_comparison (code2),
3675 truth_type, op1a, op1b);
3676 if (t)
3677 return t;
3680 /* If both comparisons are of the same value against constants, we might
3681 be able to merge them. */
3682 if (operand_equal_p (op1a, op2a, 0)
3683 && TREE_CODE (op1b) == INTEGER_CST
3684 && TREE_CODE (op2b) == INTEGER_CST)
3686 int cmp = tree_int_cst_compare (op1b, op2b);
3688 /* If we have (op1a == op1b), we should either be able to
3689 return that or FALSE, depending on whether the constant op1b
3690 also satisfies the other comparison against op2b. */
3691 if (code1 == EQ_EXPR)
3693 bool done = true;
3694 bool val;
3695 switch (code2)
3697 case EQ_EXPR: val = (cmp == 0); break;
3698 case NE_EXPR: val = (cmp != 0); break;
3699 case LT_EXPR: val = (cmp < 0); break;
3700 case GT_EXPR: val = (cmp > 0); break;
3701 case LE_EXPR: val = (cmp <= 0); break;
3702 case GE_EXPR: val = (cmp >= 0); break;
3703 default: done = false;
3705 if (done)
3707 if (val)
3708 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3709 else
3710 return boolean_false_node;
3713 /* Likewise if the second comparison is an == comparison. */
3714 else if (code2 == EQ_EXPR)
3716 bool done = true;
3717 bool val;
3718 switch (code1)
3720 case EQ_EXPR: val = (cmp == 0); break;
3721 case NE_EXPR: val = (cmp != 0); break;
3722 case LT_EXPR: val = (cmp > 0); break;
3723 case GT_EXPR: val = (cmp < 0); break;
3724 case LE_EXPR: val = (cmp >= 0); break;
3725 case GE_EXPR: val = (cmp <= 0); break;
3726 default: done = false;
3728 if (done)
3730 if (val)
3731 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3732 else
3733 return boolean_false_node;
3737 /* Same business with inequality tests. */
3738 else if (code1 == NE_EXPR)
3740 bool val;
3741 switch (code2)
3743 case EQ_EXPR: val = (cmp != 0); break;
3744 case NE_EXPR: val = (cmp == 0); break;
3745 case LT_EXPR: val = (cmp >= 0); break;
3746 case GT_EXPR: val = (cmp <= 0); break;
3747 case LE_EXPR: val = (cmp > 0); break;
3748 case GE_EXPR: val = (cmp < 0); break;
3749 default:
3750 val = false;
3752 if (val)
3753 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3755 else if (code2 == NE_EXPR)
3757 bool val;
3758 switch (code1)
3760 case EQ_EXPR: val = (cmp == 0); break;
3761 case NE_EXPR: val = (cmp != 0); break;
3762 case LT_EXPR: val = (cmp <= 0); break;
3763 case GT_EXPR: val = (cmp >= 0); break;
3764 case LE_EXPR: val = (cmp < 0); break;
3765 case GE_EXPR: val = (cmp > 0); break;
3766 default:
3767 val = false;
3769 if (val)
3770 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3773 /* Chose the more restrictive of two < or <= comparisons. */
3774 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3775 && (code2 == LT_EXPR || code2 == LE_EXPR))
3777 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3778 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3779 else
3780 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3783 /* Likewise chose the more restrictive of two > or >= comparisons. */
3784 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3785 && (code2 == GT_EXPR || code2 == GE_EXPR))
3787 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3788 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3789 else
3790 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3793 /* Check for singleton ranges. */
3794 else if (cmp == 0
3795 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3796 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3797 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3799 /* Check for disjoint ranges. */
3800 else if (cmp <= 0
3801 && (code1 == LT_EXPR || code1 == LE_EXPR)
3802 && (code2 == GT_EXPR || code2 == GE_EXPR))
3803 return boolean_false_node;
3804 else if (cmp >= 0
3805 && (code1 == GT_EXPR || code1 == GE_EXPR)
3806 && (code2 == LT_EXPR || code2 == LE_EXPR))
3807 return boolean_false_node;
3810 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3811 NAME's definition is a truth value. See if there are any simplifications
3812 that can be done against the NAME's definition. */
3813 if (TREE_CODE (op1a) == SSA_NAME
3814 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3815 && (integer_zerop (op1b) || integer_onep (op1b)))
3817 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3818 || (code1 == NE_EXPR && integer_onep (op1b)));
3819 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3820 switch (gimple_code (stmt))
3822 case GIMPLE_ASSIGN:
3823 /* Try to simplify by copy-propagating the definition. */
3824 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3826 case GIMPLE_PHI:
3827 /* If every argument to the PHI produces the same result when
3828 ANDed with the second comparison, we win.
3829 Do not do this unless the type is bool since we need a bool
3830 result here anyway. */
3831 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3833 tree result = NULL_TREE;
3834 unsigned i;
3835 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3837 tree arg = gimple_phi_arg_def (stmt, i);
3839 /* If this PHI has itself as an argument, ignore it.
3840 If all the other args produce the same result,
3841 we're still OK. */
3842 if (arg == gimple_phi_result (stmt))
3843 continue;
3844 else if (TREE_CODE (arg) == INTEGER_CST)
3846 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3848 if (!result)
3849 result = boolean_false_node;
3850 else if (!integer_zerop (result))
3851 return NULL_TREE;
3853 else if (!result)
3854 result = fold_build2 (code2, boolean_type_node,
3855 op2a, op2b);
3856 else if (!same_bool_comparison_p (result,
3857 code2, op2a, op2b))
3858 return NULL_TREE;
3860 else if (TREE_CODE (arg) == SSA_NAME
3861 && !SSA_NAME_IS_DEFAULT_DEF (arg))
3863 tree temp;
3864 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3865 /* In simple cases we can look through PHI nodes,
3866 but we have to be careful with loops.
3867 See PR49073. */
3868 if (! dom_info_available_p (CDI_DOMINATORS)
3869 || gimple_bb (def_stmt) == gimple_bb (stmt)
3870 || dominated_by_p (CDI_DOMINATORS,
3871 gimple_bb (def_stmt),
3872 gimple_bb (stmt)))
3873 return NULL_TREE;
3874 temp = and_var_with_comparison (arg, invert, code2,
3875 op2a, op2b);
3876 if (!temp)
3877 return NULL_TREE;
3878 else if (!result)
3879 result = temp;
3880 else if (!same_bool_result_p (result, temp))
3881 return NULL_TREE;
3883 else
3884 return NULL_TREE;
3886 return result;
3889 default:
3890 break;
3893 return NULL_TREE;
3896 /* Try to simplify the AND of two comparisons, specified by
3897 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3898 If this can be simplified to a single expression (without requiring
3899 introducing more SSA variables to hold intermediate values),
3900 return the resulting tree. Otherwise return NULL_TREE.
3901 If the result expression is non-null, it has boolean type. */
3903 tree
3904 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
3905 enum tree_code code2, tree op2a, tree op2b)
3907 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
3908 if (t)
3909 return t;
3910 else
3911 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
3914 /* Helper function for or_comparisons_1: try to simplify the OR of the
3915 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3916 If INVERT is true, invert the value of VAR before doing the OR.
3917 Return NULL_EXPR if we can't simplify this to a single expression. */
3919 static tree
3920 or_var_with_comparison (tree var, bool invert,
3921 enum tree_code code2, tree op2a, tree op2b)
3923 tree t;
3924 gimple stmt = SSA_NAME_DEF_STMT (var);
3926 /* We can only deal with variables whose definitions are assignments. */
3927 if (!is_gimple_assign (stmt))
3928 return NULL_TREE;
3930 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3931 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3932 Then we only have to consider the simpler non-inverted cases. */
3933 if (invert)
3934 t = and_var_with_comparison_1 (stmt,
3935 invert_tree_comparison (code2, false),
3936 op2a, op2b);
3937 else
3938 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
3939 return canonicalize_bool (t, invert);
3942 /* Try to simplify the OR of the ssa variable defined by the assignment
3943 STMT with the comparison specified by (OP2A CODE2 OP2B).
3944 Return NULL_EXPR if we can't simplify this to a single expression. */
3946 static tree
3947 or_var_with_comparison_1 (gimple stmt,
3948 enum tree_code code2, tree op2a, tree op2b)
3950 tree var = gimple_assign_lhs (stmt);
3951 tree true_test_var = NULL_TREE;
3952 tree false_test_var = NULL_TREE;
3953 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3955 /* Check for identities like (var OR (var != 0)) => true . */
3956 if (TREE_CODE (op2a) == SSA_NAME
3957 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3959 if ((code2 == NE_EXPR && integer_zerop (op2b))
3960 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3962 true_test_var = op2a;
3963 if (var == true_test_var)
3964 return var;
3966 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3967 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3969 false_test_var = op2a;
3970 if (var == false_test_var)
3971 return boolean_true_node;
3975 /* If the definition is a comparison, recurse on it. */
3976 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3978 tree t = or_comparisons_1 (innercode,
3979 gimple_assign_rhs1 (stmt),
3980 gimple_assign_rhs2 (stmt),
3981 code2,
3982 op2a,
3983 op2b);
3984 if (t)
3985 return t;
3988 /* If the definition is an AND or OR expression, we may be able to
3989 simplify by reassociating. */
3990 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3991 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3993 tree inner1 = gimple_assign_rhs1 (stmt);
3994 tree inner2 = gimple_assign_rhs2 (stmt);
3995 gimple s;
3996 tree t;
3997 tree partial = NULL_TREE;
3998 bool is_or = (innercode == BIT_IOR_EXPR);
4000 /* Check for boolean identities that don't require recursive examination
4001 of inner1/inner2:
4002 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4003 inner1 OR (inner1 AND inner2) => inner1
4004 !inner1 OR (inner1 OR inner2) => true
4005 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4007 if (inner1 == true_test_var)
4008 return (is_or ? var : inner1);
4009 else if (inner2 == true_test_var)
4010 return (is_or ? var : inner2);
4011 else if (inner1 == false_test_var)
4012 return (is_or
4013 ? boolean_true_node
4014 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4015 else if (inner2 == false_test_var)
4016 return (is_or
4017 ? boolean_true_node
4018 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4020 /* Next, redistribute/reassociate the OR across the inner tests.
4021 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4022 if (TREE_CODE (inner1) == SSA_NAME
4023 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4024 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4025 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4026 gimple_assign_rhs1 (s),
4027 gimple_assign_rhs2 (s),
4028 code2, op2a, op2b)))
4030 /* Handle the OR case, where we are reassociating:
4031 (inner1 OR inner2) OR (op2a code2 op2b)
4032 => (t OR inner2)
4033 If the partial result t is a constant, we win. Otherwise
4034 continue on to try reassociating with the other inner test. */
4035 if (is_or)
4037 if (integer_onep (t))
4038 return boolean_true_node;
4039 else if (integer_zerop (t))
4040 return inner2;
4043 /* Handle the AND case, where we are redistributing:
4044 (inner1 AND inner2) OR (op2a code2 op2b)
4045 => (t AND (inner2 OR (op2a code op2b))) */
4046 else if (integer_zerop (t))
4047 return boolean_false_node;
4049 /* Save partial result for later. */
4050 partial = t;
4053 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4054 if (TREE_CODE (inner2) == SSA_NAME
4055 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4056 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4057 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4058 gimple_assign_rhs1 (s),
4059 gimple_assign_rhs2 (s),
4060 code2, op2a, op2b)))
4062 /* Handle the OR case, where we are reassociating:
4063 (inner1 OR inner2) OR (op2a code2 op2b)
4064 => (inner1 OR t)
4065 => (t OR partial) */
4066 if (is_or)
4068 if (integer_zerop (t))
4069 return inner1;
4070 else if (integer_onep (t))
4071 return boolean_true_node;
4072 /* If both are the same, we can apply the identity
4073 (x OR x) == x. */
4074 else if (partial && same_bool_result_p (t, partial))
4075 return t;
4078 /* Handle the AND case, where we are redistributing:
4079 (inner1 AND inner2) OR (op2a code2 op2b)
4080 => (t AND (inner1 OR (op2a code2 op2b)))
4081 => (t AND partial) */
4082 else
4084 if (integer_zerop (t))
4085 return boolean_false_node;
4086 else if (partial)
4088 /* We already got a simplification for the other
4089 operand to the redistributed AND expression. The
4090 interesting case is when at least one is true.
4091 Or, if both are the same, we can apply the identity
4092 (x AND x) == x. */
4093 if (integer_onep (partial))
4094 return t;
4095 else if (integer_onep (t))
4096 return partial;
4097 else if (same_bool_result_p (t, partial))
4098 return t;
4103 return NULL_TREE;
4106 /* Try to simplify the OR of two comparisons defined by
4107 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4108 If this can be done without constructing an intermediate value,
4109 return the resulting tree; otherwise NULL_TREE is returned.
4110 This function is deliberately asymmetric as it recurses on SSA_DEFs
4111 in the first comparison but not the second. */
4113 static tree
4114 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4115 enum tree_code code2, tree op2a, tree op2b)
4117 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4119 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4120 if (operand_equal_p (op1a, op2a, 0)
4121 && operand_equal_p (op1b, op2b, 0))
4123 /* Result will be either NULL_TREE, or a combined comparison. */
4124 tree t = combine_comparisons (UNKNOWN_LOCATION,
4125 TRUTH_ORIF_EXPR, code1, code2,
4126 truth_type, op1a, op1b);
4127 if (t)
4128 return t;
4131 /* Likewise the swapped case of the above. */
4132 if (operand_equal_p (op1a, op2b, 0)
4133 && operand_equal_p (op1b, op2a, 0))
4135 /* Result will be either NULL_TREE, or a combined comparison. */
4136 tree t = combine_comparisons (UNKNOWN_LOCATION,
4137 TRUTH_ORIF_EXPR, code1,
4138 swap_tree_comparison (code2),
4139 truth_type, op1a, op1b);
4140 if (t)
4141 return t;
4144 /* If both comparisons are of the same value against constants, we might
4145 be able to merge them. */
4146 if (operand_equal_p (op1a, op2a, 0)
4147 && TREE_CODE (op1b) == INTEGER_CST
4148 && TREE_CODE (op2b) == INTEGER_CST)
4150 int cmp = tree_int_cst_compare (op1b, op2b);
4152 /* If we have (op1a != op1b), we should either be able to
4153 return that or TRUE, depending on whether the constant op1b
4154 also satisfies the other comparison against op2b. */
4155 if (code1 == NE_EXPR)
4157 bool done = true;
4158 bool val;
4159 switch (code2)
4161 case EQ_EXPR: val = (cmp == 0); break;
4162 case NE_EXPR: val = (cmp != 0); break;
4163 case LT_EXPR: val = (cmp < 0); break;
4164 case GT_EXPR: val = (cmp > 0); break;
4165 case LE_EXPR: val = (cmp <= 0); break;
4166 case GE_EXPR: val = (cmp >= 0); break;
4167 default: done = false;
4169 if (done)
4171 if (val)
4172 return boolean_true_node;
4173 else
4174 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4177 /* Likewise if the second comparison is a != comparison. */
4178 else if (code2 == NE_EXPR)
4180 bool done = true;
4181 bool val;
4182 switch (code1)
4184 case EQ_EXPR: val = (cmp == 0); break;
4185 case NE_EXPR: val = (cmp != 0); break;
4186 case LT_EXPR: val = (cmp > 0); break;
4187 case GT_EXPR: val = (cmp < 0); break;
4188 case LE_EXPR: val = (cmp >= 0); break;
4189 case GE_EXPR: val = (cmp <= 0); break;
4190 default: done = false;
4192 if (done)
4194 if (val)
4195 return boolean_true_node;
4196 else
4197 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4201 /* See if an equality test is redundant with the other comparison. */
4202 else if (code1 == EQ_EXPR)
4204 bool val;
4205 switch (code2)
4207 case EQ_EXPR: val = (cmp == 0); break;
4208 case NE_EXPR: val = (cmp != 0); break;
4209 case LT_EXPR: val = (cmp < 0); break;
4210 case GT_EXPR: val = (cmp > 0); break;
4211 case LE_EXPR: val = (cmp <= 0); break;
4212 case GE_EXPR: val = (cmp >= 0); break;
4213 default:
4214 val = false;
4216 if (val)
4217 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4219 else if (code2 == EQ_EXPR)
4221 bool val;
4222 switch (code1)
4224 case EQ_EXPR: val = (cmp == 0); break;
4225 case NE_EXPR: val = (cmp != 0); break;
4226 case LT_EXPR: val = (cmp > 0); break;
4227 case GT_EXPR: val = (cmp < 0); break;
4228 case LE_EXPR: val = (cmp >= 0); break;
4229 case GE_EXPR: val = (cmp <= 0); break;
4230 default:
4231 val = false;
4233 if (val)
4234 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4237 /* Chose the less restrictive of two < or <= comparisons. */
4238 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4239 && (code2 == LT_EXPR || code2 == LE_EXPR))
4241 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4242 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4243 else
4244 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4247 /* Likewise chose the less restrictive of two > or >= comparisons. */
4248 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4249 && (code2 == GT_EXPR || code2 == GE_EXPR))
4251 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4252 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4253 else
4254 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4257 /* Check for singleton ranges. */
4258 else if (cmp == 0
4259 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4260 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4261 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4263 /* Check for less/greater pairs that don't restrict the range at all. */
4264 else if (cmp >= 0
4265 && (code1 == LT_EXPR || code1 == LE_EXPR)
4266 && (code2 == GT_EXPR || code2 == GE_EXPR))
4267 return boolean_true_node;
4268 else if (cmp <= 0
4269 && (code1 == GT_EXPR || code1 == GE_EXPR)
4270 && (code2 == LT_EXPR || code2 == LE_EXPR))
4271 return boolean_true_node;
4274 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4275 NAME's definition is a truth value. See if there are any simplifications
4276 that can be done against the NAME's definition. */
4277 if (TREE_CODE (op1a) == SSA_NAME
4278 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4279 && (integer_zerop (op1b) || integer_onep (op1b)))
4281 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4282 || (code1 == NE_EXPR && integer_onep (op1b)));
4283 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4284 switch (gimple_code (stmt))
4286 case GIMPLE_ASSIGN:
4287 /* Try to simplify by copy-propagating the definition. */
4288 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4290 case GIMPLE_PHI:
4291 /* If every argument to the PHI produces the same result when
4292 ORed with the second comparison, we win.
4293 Do not do this unless the type is bool since we need a bool
4294 result here anyway. */
4295 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4297 tree result = NULL_TREE;
4298 unsigned i;
4299 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4301 tree arg = gimple_phi_arg_def (stmt, i);
4303 /* If this PHI has itself as an argument, ignore it.
4304 If all the other args produce the same result,
4305 we're still OK. */
4306 if (arg == gimple_phi_result (stmt))
4307 continue;
4308 else if (TREE_CODE (arg) == INTEGER_CST)
4310 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4312 if (!result)
4313 result = boolean_true_node;
4314 else if (!integer_onep (result))
4315 return NULL_TREE;
4317 else if (!result)
4318 result = fold_build2 (code2, boolean_type_node,
4319 op2a, op2b);
4320 else if (!same_bool_comparison_p (result,
4321 code2, op2a, op2b))
4322 return NULL_TREE;
4324 else if (TREE_CODE (arg) == SSA_NAME
4325 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4327 tree temp;
4328 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4329 /* In simple cases we can look through PHI nodes,
4330 but we have to be careful with loops.
4331 See PR49073. */
4332 if (! dom_info_available_p (CDI_DOMINATORS)
4333 || gimple_bb (def_stmt) == gimple_bb (stmt)
4334 || dominated_by_p (CDI_DOMINATORS,
4335 gimple_bb (def_stmt),
4336 gimple_bb (stmt)))
4337 return NULL_TREE;
4338 temp = or_var_with_comparison (arg, invert, code2,
4339 op2a, op2b);
4340 if (!temp)
4341 return NULL_TREE;
4342 else if (!result)
4343 result = temp;
4344 else if (!same_bool_result_p (result, temp))
4345 return NULL_TREE;
4347 else
4348 return NULL_TREE;
4350 return result;
4353 default:
4354 break;
4357 return NULL_TREE;
4360 /* Try to simplify the OR of two comparisons, specified by
4361 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4362 If this can be simplified to a single expression (without requiring
4363 introducing more SSA variables to hold intermediate values),
4364 return the resulting tree. Otherwise return NULL_TREE.
4365 If the result expression is non-null, it has boolean type. */
4367 tree
4368 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4369 enum tree_code code2, tree op2a, tree op2b)
4371 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4372 if (t)
4373 return t;
4374 else
4375 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4379 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4381 Either NULL_TREE, a simplified but non-constant or a constant
4382 is returned.
4384 ??? This should go into a gimple-fold-inline.h file to be eventually
4385 privatized with the single valueize function used in the various TUs
4386 to avoid the indirect function call overhead. */
4388 tree
4389 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
4391 code_helper rcode;
4392 tree ops[3] = {};
4393 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4394 edges if there are intermediate VARYING defs. For this reason
4395 do not follow SSA edges here even though SCCVN can technically
4396 just deal fine with that. */
4397 if (gimple_simplify (stmt, &rcode, ops, NULL, no_follow_ssa_edges)
4398 && rcode.is_tree_code ()
4399 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4400 || ((tree_code) rcode) == ADDR_EXPR)
4401 && is_gimple_val (ops[0]))
4403 tree res = ops[0];
4404 if (dump_file && dump_flags & TDF_DETAILS)
4406 fprintf (dump_file, "Match-and-simplified ");
4407 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4408 fprintf (dump_file, " to ");
4409 print_generic_expr (dump_file, res, 0);
4410 fprintf (dump_file, "\n");
4412 return res;
4415 location_t loc = gimple_location (stmt);
4416 switch (gimple_code (stmt))
4418 case GIMPLE_ASSIGN:
4420 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4422 switch (get_gimple_rhs_class (subcode))
4424 case GIMPLE_SINGLE_RHS:
4426 tree rhs = gimple_assign_rhs1 (stmt);
4427 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4429 if (TREE_CODE (rhs) == SSA_NAME)
4431 /* If the RHS is an SSA_NAME, return its known constant value,
4432 if any. */
4433 return (*valueize) (rhs);
4435 /* Handle propagating invariant addresses into address
4436 operations. */
4437 else if (TREE_CODE (rhs) == ADDR_EXPR
4438 && !is_gimple_min_invariant (rhs))
4440 HOST_WIDE_INT offset = 0;
4441 tree base;
4442 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4443 &offset,
4444 valueize);
4445 if (base
4446 && (CONSTANT_CLASS_P (base)
4447 || decl_address_invariant_p (base)))
4448 return build_invariant_address (TREE_TYPE (rhs),
4449 base, offset);
4451 else if (TREE_CODE (rhs) == CONSTRUCTOR
4452 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4453 && (CONSTRUCTOR_NELTS (rhs)
4454 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4456 unsigned i;
4457 tree val, *vec;
4459 vec = XALLOCAVEC (tree,
4460 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4461 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4463 val = (*valueize) (val);
4464 if (TREE_CODE (val) == INTEGER_CST
4465 || TREE_CODE (val) == REAL_CST
4466 || TREE_CODE (val) == FIXED_CST)
4467 vec[i] = val;
4468 else
4469 return NULL_TREE;
4472 return build_vector (TREE_TYPE (rhs), vec);
4474 if (subcode == OBJ_TYPE_REF)
4476 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4477 /* If callee is constant, we can fold away the wrapper. */
4478 if (is_gimple_min_invariant (val))
4479 return val;
4482 if (kind == tcc_reference)
4484 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4485 || TREE_CODE (rhs) == REALPART_EXPR
4486 || TREE_CODE (rhs) == IMAGPART_EXPR)
4487 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4489 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4490 return fold_unary_loc (EXPR_LOCATION (rhs),
4491 TREE_CODE (rhs),
4492 TREE_TYPE (rhs), val);
4494 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4495 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4497 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4498 return fold_ternary_loc (EXPR_LOCATION (rhs),
4499 TREE_CODE (rhs),
4500 TREE_TYPE (rhs), val,
4501 TREE_OPERAND (rhs, 1),
4502 TREE_OPERAND (rhs, 2));
4504 else if (TREE_CODE (rhs) == MEM_REF
4505 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4507 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4508 if (TREE_CODE (val) == ADDR_EXPR
4509 && is_gimple_min_invariant (val))
4511 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4512 unshare_expr (val),
4513 TREE_OPERAND (rhs, 1));
4514 if (tem)
4515 rhs = tem;
4518 return fold_const_aggregate_ref_1 (rhs, valueize);
4520 else if (kind == tcc_declaration)
4521 return get_symbol_constant_value (rhs);
4522 return rhs;
4525 case GIMPLE_UNARY_RHS:
4527 /* Handle unary operators that can appear in GIMPLE form.
4528 Note that we know the single operand must be a constant,
4529 so this should almost always return a simplified RHS. */
4530 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4532 return
4533 fold_unary_ignore_overflow_loc (loc, subcode,
4534 gimple_expr_type (stmt), op0);
4537 case GIMPLE_BINARY_RHS:
4539 /* Handle binary operators that can appear in GIMPLE form. */
4540 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4541 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4543 /* Translate &x + CST into an invariant form suitable for
4544 further propagation. */
4545 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4546 && TREE_CODE (op0) == ADDR_EXPR
4547 && TREE_CODE (op1) == INTEGER_CST)
4549 tree off = fold_convert (ptr_type_node, op1);
4550 return build_fold_addr_expr_loc
4551 (loc,
4552 fold_build2 (MEM_REF,
4553 TREE_TYPE (TREE_TYPE (op0)),
4554 unshare_expr (op0), off));
4557 return fold_binary_loc (loc, subcode,
4558 gimple_expr_type (stmt), op0, op1);
4561 case GIMPLE_TERNARY_RHS:
4563 /* Handle ternary operators that can appear in GIMPLE form. */
4564 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4565 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4566 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
4568 /* Fold embedded expressions in ternary codes. */
4569 if ((subcode == COND_EXPR
4570 || subcode == VEC_COND_EXPR)
4571 && COMPARISON_CLASS_P (op0))
4573 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
4574 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
4575 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
4576 TREE_TYPE (op0), op00, op01);
4577 if (tem)
4578 op0 = tem;
4581 return fold_ternary_loc (loc, subcode,
4582 gimple_expr_type (stmt), op0, op1, op2);
4585 default:
4586 gcc_unreachable ();
4590 case GIMPLE_CALL:
4592 tree fn;
4594 if (gimple_call_internal_p (stmt))
4596 enum tree_code subcode = ERROR_MARK;
4597 switch (gimple_call_internal_fn (stmt))
4599 case IFN_UBSAN_CHECK_ADD:
4600 subcode = PLUS_EXPR;
4601 break;
4602 case IFN_UBSAN_CHECK_SUB:
4603 subcode = MINUS_EXPR;
4604 break;
4605 case IFN_UBSAN_CHECK_MUL:
4606 subcode = MULT_EXPR;
4607 break;
4608 default:
4609 return NULL_TREE;
4611 tree arg0 = gimple_call_arg (stmt, 0);
4612 tree arg1 = gimple_call_arg (stmt, 1);
4613 tree op0 = (*valueize) (arg0);
4614 tree op1 = (*valueize) (arg1);
4616 if (TREE_CODE (op0) != INTEGER_CST
4617 || TREE_CODE (op1) != INTEGER_CST)
4619 switch (subcode)
4621 case MULT_EXPR:
4622 /* x * 0 = 0 * x = 0 without overflow. */
4623 if (integer_zerop (op0) || integer_zerop (op1))
4624 return build_zero_cst (TREE_TYPE (arg0));
4625 break;
4626 case MINUS_EXPR:
4627 /* y - y = 0 without overflow. */
4628 if (operand_equal_p (op0, op1, 0))
4629 return build_zero_cst (TREE_TYPE (arg0));
4630 break;
4631 default:
4632 break;
4635 tree res
4636 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
4637 if (res
4638 && TREE_CODE (res) == INTEGER_CST
4639 && !TREE_OVERFLOW (res))
4640 return res;
4641 return NULL_TREE;
4644 fn = (*valueize) (gimple_call_fn (stmt));
4645 if (TREE_CODE (fn) == ADDR_EXPR
4646 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4647 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4648 && gimple_builtin_call_types_compatible_p (stmt,
4649 TREE_OPERAND (fn, 0)))
4651 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4652 tree call, retval;
4653 unsigned i;
4654 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4655 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4656 call = build_call_array_loc (loc,
4657 gimple_call_return_type (stmt),
4658 fn, gimple_call_num_args (stmt), args);
4659 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4660 if (retval)
4662 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4663 STRIP_NOPS (retval);
4664 retval = fold_convert (gimple_call_return_type (stmt), retval);
4666 return retval;
4668 return NULL_TREE;
4671 default:
4672 return NULL_TREE;
4676 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4677 Returns NULL_TREE if folding to a constant is not possible, otherwise
4678 returns a constant according to is_gimple_min_invariant. */
4680 tree
4681 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4683 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4684 if (res && is_gimple_min_invariant (res))
4685 return res;
4686 return NULL_TREE;
4690 /* The following set of functions are supposed to fold references using
4691 their constant initializers. */
4693 static tree fold_ctor_reference (tree type, tree ctor,
4694 unsigned HOST_WIDE_INT offset,
4695 unsigned HOST_WIDE_INT size, tree);
4697 /* See if we can find constructor defining value of BASE.
4698 When we know the consructor with constant offset (such as
4699 base is array[40] and we do know constructor of array), then
4700 BIT_OFFSET is adjusted accordingly.
4702 As a special case, return error_mark_node when constructor
4703 is not explicitly available, but it is known to be zero
4704 such as 'static const int a;'. */
4705 static tree
4706 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4707 tree (*valueize)(tree))
4709 HOST_WIDE_INT bit_offset2, size, max_size;
4710 if (TREE_CODE (base) == MEM_REF)
4712 if (!integer_zerop (TREE_OPERAND (base, 1)))
4714 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
4715 return NULL_TREE;
4716 *bit_offset += (mem_ref_offset (base).to_short_addr ()
4717 * BITS_PER_UNIT);
4720 if (valueize
4721 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4722 base = valueize (TREE_OPERAND (base, 0));
4723 if (!base || TREE_CODE (base) != ADDR_EXPR)
4724 return NULL_TREE;
4725 base = TREE_OPERAND (base, 0);
4728 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4729 DECL_INITIAL. If BASE is a nested reference into another
4730 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4731 the inner reference. */
4732 switch (TREE_CODE (base))
4734 case VAR_DECL:
4735 case CONST_DECL:
4737 tree init = ctor_for_folding (base);
4739 /* Our semantic is exact opposite of ctor_for_folding;
4740 NULL means unknown, while error_mark_node is 0. */
4741 if (init == error_mark_node)
4742 return NULL_TREE;
4743 if (!init)
4744 return error_mark_node;
4745 return init;
4748 case ARRAY_REF:
4749 case COMPONENT_REF:
4750 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4751 if (max_size == -1 || size != max_size)
4752 return NULL_TREE;
4753 *bit_offset += bit_offset2;
4754 return get_base_constructor (base, bit_offset, valueize);
4756 case STRING_CST:
4757 case CONSTRUCTOR:
4758 return base;
4760 default:
4761 return NULL_TREE;
4765 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4766 SIZE to the memory at bit OFFSET. */
4768 static tree
4769 fold_array_ctor_reference (tree type, tree ctor,
4770 unsigned HOST_WIDE_INT offset,
4771 unsigned HOST_WIDE_INT size,
4772 tree from_decl)
4774 unsigned HOST_WIDE_INT cnt;
4775 tree cfield, cval;
4776 offset_int low_bound;
4777 offset_int elt_size;
4778 offset_int index, max_index;
4779 offset_int access_index;
4780 tree domain_type = NULL_TREE, index_type = NULL_TREE;
4781 HOST_WIDE_INT inner_offset;
4783 /* Compute low bound and elt size. */
4784 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4785 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
4786 if (domain_type && TYPE_MIN_VALUE (domain_type))
4788 /* Static constructors for variably sized objects makes no sense. */
4789 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
4790 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
4791 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
4793 else
4794 low_bound = 0;
4795 /* Static constructors for variably sized objects makes no sense. */
4796 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
4797 == INTEGER_CST);
4798 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
4800 /* We can handle only constantly sized accesses that are known to not
4801 be larger than size of array element. */
4802 if (!TYPE_SIZE_UNIT (type)
4803 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
4804 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4805 || elt_size == 0)
4806 return NULL_TREE;
4808 /* Compute the array index we look for. */
4809 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
4810 elt_size);
4811 access_index += low_bound;
4812 if (index_type)
4813 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4814 TYPE_SIGN (index_type));
4816 /* And offset within the access. */
4817 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
4819 /* See if the array field is large enough to span whole access. We do not
4820 care to fold accesses spanning multiple array indexes. */
4821 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
4822 return NULL_TREE;
4824 index = low_bound - 1;
4825 if (index_type)
4826 index = wi::ext (index, TYPE_PRECISION (index_type),
4827 TYPE_SIGN (index_type));
4829 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4831 /* Array constructor might explicitely set index, or specify range
4832 or leave index NULL meaning that it is next index after previous
4833 one. */
4834 if (cfield)
4836 if (TREE_CODE (cfield) == INTEGER_CST)
4837 max_index = index = wi::to_offset (cfield);
4838 else
4840 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
4841 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4842 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
4845 else
4847 index += 1;
4848 if (index_type)
4849 index = wi::ext (index, TYPE_PRECISION (index_type),
4850 TYPE_SIGN (index_type));
4851 max_index = index;
4854 /* Do we have match? */
4855 if (wi::cmpu (access_index, index) >= 0
4856 && wi::cmpu (access_index, max_index) <= 0)
4857 return fold_ctor_reference (type, cval, inner_offset, size,
4858 from_decl);
4860 /* When memory is not explicitely mentioned in constructor,
4861 it is 0 (or out of range). */
4862 return build_zero_cst (type);
4865 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4866 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4868 static tree
4869 fold_nonarray_ctor_reference (tree type, tree ctor,
4870 unsigned HOST_WIDE_INT offset,
4871 unsigned HOST_WIDE_INT size,
4872 tree from_decl)
4874 unsigned HOST_WIDE_INT cnt;
4875 tree cfield, cval;
4877 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4878 cval)
4880 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4881 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4882 tree field_size = DECL_SIZE (cfield);
4883 offset_int bitoffset;
4884 offset_int bitoffset_end, access_end;
4886 /* Variable sized objects in static constructors makes no sense,
4887 but field_size can be NULL for flexible array members. */
4888 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4889 && TREE_CODE (byte_offset) == INTEGER_CST
4890 && (field_size != NULL_TREE
4891 ? TREE_CODE (field_size) == INTEGER_CST
4892 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4894 /* Compute bit offset of the field. */
4895 bitoffset = (wi::to_offset (field_offset)
4896 + wi::lshift (wi::to_offset (byte_offset),
4897 LOG2_BITS_PER_UNIT));
4898 /* Compute bit offset where the field ends. */
4899 if (field_size != NULL_TREE)
4900 bitoffset_end = bitoffset + wi::to_offset (field_size);
4901 else
4902 bitoffset_end = 0;
4904 access_end = offset_int (offset) + size;
4906 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4907 [BITOFFSET, BITOFFSET_END)? */
4908 if (wi::cmps (access_end, bitoffset) > 0
4909 && (field_size == NULL_TREE
4910 || wi::lts_p (offset, bitoffset_end)))
4912 offset_int inner_offset = offset_int (offset) - bitoffset;
4913 /* We do have overlap. Now see if field is large enough to
4914 cover the access. Give up for accesses spanning multiple
4915 fields. */
4916 if (wi::cmps (access_end, bitoffset_end) > 0)
4917 return NULL_TREE;
4918 if (wi::lts_p (offset, bitoffset))
4919 return NULL_TREE;
4920 return fold_ctor_reference (type, cval,
4921 inner_offset.to_uhwi (), size,
4922 from_decl);
4925 /* When memory is not explicitely mentioned in constructor, it is 0. */
4926 return build_zero_cst (type);
4929 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4930 to the memory at bit OFFSET. */
4932 static tree
4933 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
4934 unsigned HOST_WIDE_INT size, tree from_decl)
4936 tree ret;
4938 /* We found the field with exact match. */
4939 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
4940 && !offset)
4941 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
4943 /* We are at the end of walk, see if we can view convert the
4944 result. */
4945 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
4946 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
4947 && !compare_tree_int (TYPE_SIZE (type), size)
4948 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
4950 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
4951 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
4952 if (ret)
4953 STRIP_NOPS (ret);
4954 return ret;
4956 /* For constants and byte-aligned/sized reads try to go through
4957 native_encode/interpret. */
4958 if (CONSTANT_CLASS_P (ctor)
4959 && BITS_PER_UNIT == 8
4960 && offset % BITS_PER_UNIT == 0
4961 && size % BITS_PER_UNIT == 0
4962 && size <= MAX_BITSIZE_MODE_ANY_MODE)
4964 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
4965 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
4966 offset / BITS_PER_UNIT) > 0)
4967 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
4969 if (TREE_CODE (ctor) == CONSTRUCTOR)
4972 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
4973 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
4974 return fold_array_ctor_reference (type, ctor, offset, size,
4975 from_decl);
4976 else
4977 return fold_nonarray_ctor_reference (type, ctor, offset, size,
4978 from_decl);
4981 return NULL_TREE;
4984 /* Return the tree representing the element referenced by T if T is an
4985 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4986 names using VALUEIZE. Return NULL_TREE otherwise. */
4988 tree
4989 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
4991 tree ctor, idx, base;
4992 HOST_WIDE_INT offset, size, max_size;
4993 tree tem;
4995 if (TREE_THIS_VOLATILE (t))
4996 return NULL_TREE;
4998 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4999 return get_symbol_constant_value (t);
5001 tem = fold_read_from_constant_string (t);
5002 if (tem)
5003 return tem;
5005 switch (TREE_CODE (t))
5007 case ARRAY_REF:
5008 case ARRAY_RANGE_REF:
5009 /* Constant indexes are handled well by get_base_constructor.
5010 Only special case variable offsets.
5011 FIXME: This code can't handle nested references with variable indexes
5012 (they will be handled only by iteration of ccp). Perhaps we can bring
5013 get_ref_base_and_extent here and make it use a valueize callback. */
5014 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5015 && valueize
5016 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5017 && TREE_CODE (idx) == INTEGER_CST)
5019 tree low_bound, unit_size;
5021 /* If the resulting bit-offset is constant, track it. */
5022 if ((low_bound = array_ref_low_bound (t),
5023 TREE_CODE (low_bound) == INTEGER_CST)
5024 && (unit_size = array_ref_element_size (t),
5025 tree_fits_uhwi_p (unit_size)))
5027 offset_int woffset
5028 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5029 TYPE_PRECISION (TREE_TYPE (idx)));
5031 if (wi::fits_shwi_p (woffset))
5033 offset = woffset.to_shwi ();
5034 /* TODO: This code seems wrong, multiply then check
5035 to see if it fits. */
5036 offset *= tree_to_uhwi (unit_size);
5037 offset *= BITS_PER_UNIT;
5039 base = TREE_OPERAND (t, 0);
5040 ctor = get_base_constructor (base, &offset, valueize);
5041 /* Empty constructor. Always fold to 0. */
5042 if (ctor == error_mark_node)
5043 return build_zero_cst (TREE_TYPE (t));
5044 /* Out of bound array access. Value is undefined,
5045 but don't fold. */
5046 if (offset < 0)
5047 return NULL_TREE;
5048 /* We can not determine ctor. */
5049 if (!ctor)
5050 return NULL_TREE;
5051 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5052 tree_to_uhwi (unit_size)
5053 * BITS_PER_UNIT,
5054 base);
5058 /* Fallthru. */
5060 case COMPONENT_REF:
5061 case BIT_FIELD_REF:
5062 case TARGET_MEM_REF:
5063 case MEM_REF:
5064 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5065 ctor = get_base_constructor (base, &offset, valueize);
5067 /* Empty constructor. Always fold to 0. */
5068 if (ctor == error_mark_node)
5069 return build_zero_cst (TREE_TYPE (t));
5070 /* We do not know precise address. */
5071 if (max_size == -1 || max_size != size)
5072 return NULL_TREE;
5073 /* We can not determine ctor. */
5074 if (!ctor)
5075 return NULL_TREE;
5077 /* Out of bound array access. Value is undefined, but don't fold. */
5078 if (offset < 0)
5079 return NULL_TREE;
5081 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5082 base);
5084 case REALPART_EXPR:
5085 case IMAGPART_EXPR:
5087 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5088 if (c && TREE_CODE (c) == COMPLEX_CST)
5089 return fold_build1_loc (EXPR_LOCATION (t),
5090 TREE_CODE (t), TREE_TYPE (t), c);
5091 break;
5094 default:
5095 break;
5098 return NULL_TREE;
5101 tree
5102 fold_const_aggregate_ref (tree t)
5104 return fold_const_aggregate_ref_1 (t, NULL);
5107 /* Lookup virtual method with index TOKEN in a virtual table V
5108 at OFFSET.
5109 Set CAN_REFER if non-NULL to false if method
5110 is not referable or if the virtual table is ill-formed (such as rewriten
5111 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5113 tree
5114 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5115 tree v,
5116 unsigned HOST_WIDE_INT offset,
5117 bool *can_refer)
5119 tree vtable = v, init, fn;
5120 unsigned HOST_WIDE_INT size;
5121 unsigned HOST_WIDE_INT elt_size, access_index;
5122 tree domain_type;
5124 if (can_refer)
5125 *can_refer = true;
5127 /* First of all double check we have virtual table. */
5128 if (TREE_CODE (v) != VAR_DECL
5129 || !DECL_VIRTUAL_P (v))
5131 gcc_assert (in_lto_p);
5132 /* Pass down that we lost track of the target. */
5133 if (can_refer)
5134 *can_refer = false;
5135 return NULL_TREE;
5138 init = ctor_for_folding (v);
5140 /* The virtual tables should always be born with constructors
5141 and we always should assume that they are avaialble for
5142 folding. At the moment we do not stream them in all cases,
5143 but it should never happen that ctor seem unreachable. */
5144 gcc_assert (init);
5145 if (init == error_mark_node)
5147 gcc_assert (in_lto_p);
5148 /* Pass down that we lost track of the target. */
5149 if (can_refer)
5150 *can_refer = false;
5151 return NULL_TREE;
5153 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5154 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5155 offset *= BITS_PER_UNIT;
5156 offset += token * size;
5158 /* Lookup the value in the constructor that is assumed to be array.
5159 This is equivalent to
5160 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5161 offset, size, NULL);
5162 but in a constant time. We expect that frontend produced a simple
5163 array without indexed initializers. */
5165 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5166 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5167 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5168 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5170 access_index = offset / BITS_PER_UNIT / elt_size;
5171 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5173 /* This code makes an assumption that there are no
5174 indexed fileds produced by C++ FE, so we can directly index the array. */
5175 if (access_index < CONSTRUCTOR_NELTS (init))
5177 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5178 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5179 STRIP_NOPS (fn);
5181 else
5182 fn = NULL;
5184 /* For type inconsistent program we may end up looking up virtual method
5185 in virtual table that does not contain TOKEN entries. We may overrun
5186 the virtual table and pick up a constant or RTTI info pointer.
5187 In any case the call is undefined. */
5188 if (!fn
5189 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5190 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5191 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5192 else
5194 fn = TREE_OPERAND (fn, 0);
5196 /* When cgraph node is missing and function is not public, we cannot
5197 devirtualize. This can happen in WHOPR when the actual method
5198 ends up in other partition, because we found devirtualization
5199 possibility too late. */
5200 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5202 if (can_refer)
5204 *can_refer = false;
5205 return fn;
5207 return NULL_TREE;
5211 /* Make sure we create a cgraph node for functions we'll reference.
5212 They can be non-existent if the reference comes from an entry
5213 of an external vtable for example. */
5214 cgraph_node::get_create (fn);
5216 return fn;
5219 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5220 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5221 KNOWN_BINFO carries the binfo describing the true type of
5222 OBJ_TYPE_REF_OBJECT(REF).
5223 Set CAN_REFER if non-NULL to false if method
5224 is not referable or if the virtual table is ill-formed (such as rewriten
5225 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5227 tree
5228 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5229 bool *can_refer)
5231 unsigned HOST_WIDE_INT offset;
5232 tree v;
5234 v = BINFO_VTABLE (known_binfo);
5235 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5236 if (!v)
5237 return NULL_TREE;
5239 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5241 if (can_refer)
5242 *can_refer = false;
5243 return NULL_TREE;
5245 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5248 /* Return true iff VAL is a gimple expression that is known to be
5249 non-negative. Restricted to floating-point inputs. */
5251 bool
5252 gimple_val_nonnegative_real_p (tree val)
5254 gimple def_stmt;
5256 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5258 /* Use existing logic for non-gimple trees. */
5259 if (tree_expr_nonnegative_p (val))
5260 return true;
5262 if (TREE_CODE (val) != SSA_NAME)
5263 return false;
5265 /* Currently we look only at the immediately defining statement
5266 to make this determination, since recursion on defining
5267 statements of operands can lead to quadratic behavior in the
5268 worst case. This is expected to catch almost all occurrences
5269 in practice. It would be possible to implement limited-depth
5270 recursion if important cases are lost. Alternatively, passes
5271 that need this information (such as the pow/powi lowering code
5272 in the cse_sincos pass) could be revised to provide it through
5273 dataflow propagation. */
5275 def_stmt = SSA_NAME_DEF_STMT (val);
5277 if (is_gimple_assign (def_stmt))
5279 tree op0, op1;
5281 /* See fold-const.c:tree_expr_nonnegative_p for additional
5282 cases that could be handled with recursion. */
5284 switch (gimple_assign_rhs_code (def_stmt))
5286 case ABS_EXPR:
5287 /* Always true for floating-point operands. */
5288 return true;
5290 case MULT_EXPR:
5291 /* True if the two operands are identical (since we are
5292 restricted to floating-point inputs). */
5293 op0 = gimple_assign_rhs1 (def_stmt);
5294 op1 = gimple_assign_rhs2 (def_stmt);
5296 if (op0 == op1
5297 || operand_equal_p (op0, op1, 0))
5298 return true;
5300 default:
5301 return false;
5304 else if (is_gimple_call (def_stmt))
5306 tree fndecl = gimple_call_fndecl (def_stmt);
5307 if (fndecl
5308 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5310 tree arg1;
5312 switch (DECL_FUNCTION_CODE (fndecl))
5314 CASE_FLT_FN (BUILT_IN_ACOS):
5315 CASE_FLT_FN (BUILT_IN_ACOSH):
5316 CASE_FLT_FN (BUILT_IN_CABS):
5317 CASE_FLT_FN (BUILT_IN_COSH):
5318 CASE_FLT_FN (BUILT_IN_ERFC):
5319 CASE_FLT_FN (BUILT_IN_EXP):
5320 CASE_FLT_FN (BUILT_IN_EXP10):
5321 CASE_FLT_FN (BUILT_IN_EXP2):
5322 CASE_FLT_FN (BUILT_IN_FABS):
5323 CASE_FLT_FN (BUILT_IN_FDIM):
5324 CASE_FLT_FN (BUILT_IN_HYPOT):
5325 CASE_FLT_FN (BUILT_IN_POW10):
5326 return true;
5328 CASE_FLT_FN (BUILT_IN_SQRT):
5329 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5330 nonnegative inputs. */
5331 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5332 return true;
5334 break;
5336 CASE_FLT_FN (BUILT_IN_POWI):
5337 /* True if the second argument is an even integer. */
5338 arg1 = gimple_call_arg (def_stmt, 1);
5340 if (TREE_CODE (arg1) == INTEGER_CST
5341 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5342 return true;
5344 break;
5346 CASE_FLT_FN (BUILT_IN_POW):
5347 /* True if the second argument is an even integer-valued
5348 real. */
5349 arg1 = gimple_call_arg (def_stmt, 1);
5351 if (TREE_CODE (arg1) == REAL_CST)
5353 REAL_VALUE_TYPE c;
5354 HOST_WIDE_INT n;
5356 c = TREE_REAL_CST (arg1);
5357 n = real_to_integer (&c);
5359 if ((n & 1) == 0)
5361 REAL_VALUE_TYPE cint;
5362 real_from_integer (&cint, VOIDmode, n, SIGNED);
5363 if (real_identical (&c, &cint))
5364 return true;
5368 break;
5370 default:
5371 return false;
5376 return false;
5379 /* Given a pointer value OP0, return a simplified version of an
5380 indirection through OP0, or NULL_TREE if no simplification is
5381 possible. Note that the resulting type may be different from
5382 the type pointed to in the sense that it is still compatible
5383 from the langhooks point of view. */
5385 tree
5386 gimple_fold_indirect_ref (tree t)
5388 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5389 tree sub = t;
5390 tree subtype;
5392 STRIP_NOPS (sub);
5393 subtype = TREE_TYPE (sub);
5394 if (!POINTER_TYPE_P (subtype))
5395 return NULL_TREE;
5397 if (TREE_CODE (sub) == ADDR_EXPR)
5399 tree op = TREE_OPERAND (sub, 0);
5400 tree optype = TREE_TYPE (op);
5401 /* *&p => p */
5402 if (useless_type_conversion_p (type, optype))
5403 return op;
5405 /* *(foo *)&fooarray => fooarray[0] */
5406 if (TREE_CODE (optype) == ARRAY_TYPE
5407 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5408 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5410 tree type_domain = TYPE_DOMAIN (optype);
5411 tree min_val = size_zero_node;
5412 if (type_domain && TYPE_MIN_VALUE (type_domain))
5413 min_val = TYPE_MIN_VALUE (type_domain);
5414 if (TREE_CODE (min_val) == INTEGER_CST)
5415 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5417 /* *(foo *)&complexfoo => __real__ complexfoo */
5418 else if (TREE_CODE (optype) == COMPLEX_TYPE
5419 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5420 return fold_build1 (REALPART_EXPR, type, op);
5421 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5422 else if (TREE_CODE (optype) == VECTOR_TYPE
5423 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5425 tree part_width = TYPE_SIZE (type);
5426 tree index = bitsize_int (0);
5427 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5431 /* *(p + CST) -> ... */
5432 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5433 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5435 tree addr = TREE_OPERAND (sub, 0);
5436 tree off = TREE_OPERAND (sub, 1);
5437 tree addrtype;
5439 STRIP_NOPS (addr);
5440 addrtype = TREE_TYPE (addr);
5442 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5443 if (TREE_CODE (addr) == ADDR_EXPR
5444 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5445 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5446 && tree_fits_uhwi_p (off))
5448 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5449 tree part_width = TYPE_SIZE (type);
5450 unsigned HOST_WIDE_INT part_widthi
5451 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5452 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5453 tree index = bitsize_int (indexi);
5454 if (offset / part_widthi
5455 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5456 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5457 part_width, index);
5460 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5461 if (TREE_CODE (addr) == ADDR_EXPR
5462 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5463 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5465 tree size = TYPE_SIZE_UNIT (type);
5466 if (tree_int_cst_equal (size, off))
5467 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5470 /* *(p + CST) -> MEM_REF <p, CST>. */
5471 if (TREE_CODE (addr) != ADDR_EXPR
5472 || DECL_P (TREE_OPERAND (addr, 0)))
5473 return fold_build2 (MEM_REF, type,
5474 addr,
5475 wide_int_to_tree (ptype, off));
5478 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5479 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5480 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5481 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5483 tree type_domain;
5484 tree min_val = size_zero_node;
5485 tree osub = sub;
5486 sub = gimple_fold_indirect_ref (sub);
5487 if (! sub)
5488 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5489 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5490 if (type_domain && TYPE_MIN_VALUE (type_domain))
5491 min_val = TYPE_MIN_VALUE (type_domain);
5492 if (TREE_CODE (min_val) == INTEGER_CST)
5493 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5496 return NULL_TREE;
5499 /* Return true if CODE is an operation that when operating on signed
5500 integer types involves undefined behavior on overflow and the
5501 operation can be expressed with unsigned arithmetic. */
5503 bool
5504 arith_code_with_undefined_signed_overflow (tree_code code)
5506 switch (code)
5508 case PLUS_EXPR:
5509 case MINUS_EXPR:
5510 case MULT_EXPR:
5511 case NEGATE_EXPR:
5512 case POINTER_PLUS_EXPR:
5513 return true;
5514 default:
5515 return false;
5519 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5520 operation that can be transformed to unsigned arithmetic by converting
5521 its operand, carrying out the operation in the corresponding unsigned
5522 type and converting the result back to the original type.
5524 Returns a sequence of statements that replace STMT and also contain
5525 a modified form of STMT itself. */
5527 gimple_seq
5528 rewrite_to_defined_overflow (gimple stmt)
5530 if (dump_file && (dump_flags & TDF_DETAILS))
5532 fprintf (dump_file, "rewriting stmt with undefined signed "
5533 "overflow ");
5534 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5537 tree lhs = gimple_assign_lhs (stmt);
5538 tree type = unsigned_type_for (TREE_TYPE (lhs));
5539 gimple_seq stmts = NULL;
5540 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5542 gimple_seq stmts2 = NULL;
5543 gimple_set_op (stmt, i,
5544 force_gimple_operand (fold_convert (type,
5545 gimple_op (stmt, i)),
5546 &stmts2, true, NULL_TREE));
5547 gimple_seq_add_seq (&stmts, stmts2);
5549 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5550 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5551 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5552 gimple_seq_add_stmt (&stmts, stmt);
5553 gimple cvt = gimple_build_assign_with_ops
5554 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
5555 gimple_seq_add_stmt (&stmts, cvt);
5557 return stmts;
5561 /* Build the expression CODE OP0 of type TYPE with location LOC,
5562 simplifying it first if possible using VALUEIZE if not NULL.
5563 OP0 is expected to be valueized already. Returns the built
5564 expression value and appends statements possibly defining it
5565 to SEQ. */
5567 tree
5568 gimple_build (gimple_seq *seq, location_t loc,
5569 enum tree_code code, tree type, tree op0,
5570 tree (*valueize)(tree))
5572 tree res = gimple_simplify (code, type, op0, seq, valueize);
5573 if (!res)
5575 if (gimple_in_ssa_p (cfun))
5576 res = make_ssa_name (type, NULL);
5577 else
5578 res = create_tmp_reg (type, NULL);
5579 gimple stmt;
5580 if (code == REALPART_EXPR
5581 || code == IMAGPART_EXPR
5582 || code == VIEW_CONVERT_EXPR)
5583 stmt = gimple_build_assign_with_ops (code, res,
5584 build1 (code, type,
5585 op0), NULL_TREE);
5586 else
5587 stmt = gimple_build_assign_with_ops (code, res, op0, NULL_TREE);
5588 gimple_set_location (stmt, loc);
5589 gimple_seq_add_stmt_without_update (seq, stmt);
5591 return res;
5594 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5595 simplifying it first if possible using VALUEIZE if not NULL.
5596 OP0 and OP1 are expected to be valueized already. Returns the built
5597 expression value and appends statements possibly defining it
5598 to SEQ. */
5600 tree
5601 gimple_build (gimple_seq *seq, location_t loc,
5602 enum tree_code code, tree type, tree op0, tree op1,
5603 tree (*valueize)(tree))
5605 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
5606 if (!res)
5608 if (gimple_in_ssa_p (cfun))
5609 res = make_ssa_name (type, NULL);
5610 else
5611 res = create_tmp_reg (type, NULL);
5612 gimple stmt = gimple_build_assign_with_ops (code, res, op0, op1);
5613 gimple_set_location (stmt, loc);
5614 gimple_seq_add_stmt_without_update (seq, stmt);
5616 return res;
5619 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5620 simplifying it first if possible using VALUEIZE if not NULL.
5621 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5622 expression value and appends statements possibly defining it
5623 to SEQ. */
5625 tree
5626 gimple_build (gimple_seq *seq, location_t loc,
5627 enum tree_code code, tree type, tree op0, tree op1, tree op2,
5628 tree (*valueize)(tree))
5630 tree res = gimple_simplify (code, type, op0, op1, op2,
5631 seq, valueize);
5632 if (!res)
5634 if (gimple_in_ssa_p (cfun))
5635 res = make_ssa_name (type, NULL);
5636 else
5637 res = create_tmp_reg (type, NULL);
5638 gimple stmt;
5639 if (code == BIT_FIELD_REF)
5640 stmt = gimple_build_assign_with_ops (code, res,
5641 build3 (BIT_FIELD_REF, type,
5642 op0, op1, op2),
5643 NULL_TREE);
5644 else
5645 stmt = gimple_build_assign_with_ops (code, res, op0, op1, op2);
5646 gimple_set_location (stmt, loc);
5647 gimple_seq_add_stmt_without_update (seq, stmt);
5649 return res;
5652 /* Build the call FN (ARG0) with a result of type TYPE
5653 (or no result if TYPE is void) with location LOC,
5654 simplifying it first if possible using VALUEIZE if not NULL.
5655 ARG0 is expected to be valueized already. Returns the built
5656 expression value (or NULL_TREE if TYPE is void) and appends
5657 statements possibly defining it to SEQ. */
5659 tree
5660 gimple_build (gimple_seq *seq, location_t loc,
5661 enum built_in_function fn, tree type, tree arg0,
5662 tree (*valueize)(tree))
5664 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
5665 if (!res)
5667 tree decl = builtin_decl_implicit (fn);
5668 gimple stmt = gimple_build_call (decl, 1, arg0);
5669 if (!VOID_TYPE_P (type))
5671 if (gimple_in_ssa_p (cfun))
5672 res = make_ssa_name (type, NULL);
5673 else
5674 res = create_tmp_reg (type, NULL);
5675 gimple_call_set_lhs (stmt, res);
5677 gimple_set_location (stmt, loc);
5678 gimple_seq_add_stmt_without_update (seq, stmt);
5680 return res;
5683 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
5684 (or no result if TYPE is void) with location LOC,
5685 simplifying it first if possible using VALUEIZE if not NULL.
5686 ARG0 is expected to be valueized already. Returns the built
5687 expression value (or NULL_TREE if TYPE is void) and appends
5688 statements possibly defining it to SEQ. */
5690 tree
5691 gimple_build (gimple_seq *seq, location_t loc,
5692 enum built_in_function fn, tree type, tree arg0, tree arg1,
5693 tree (*valueize)(tree))
5695 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
5696 if (!res)
5698 tree decl = builtin_decl_implicit (fn);
5699 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
5700 if (!VOID_TYPE_P (type))
5702 if (gimple_in_ssa_p (cfun))
5703 res = make_ssa_name (type, NULL);
5704 else
5705 res = create_tmp_reg (type, NULL);
5706 gimple_call_set_lhs (stmt, res);
5708 gimple_set_location (stmt, loc);
5709 gimple_seq_add_stmt_without_update (seq, stmt);
5711 return res;
5714 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5715 (or no result if TYPE is void) with location LOC,
5716 simplifying it first if possible using VALUEIZE if not NULL.
5717 ARG0 is expected to be valueized already. Returns the built
5718 expression value (or NULL_TREE if TYPE is void) and appends
5719 statements possibly defining it to SEQ. */
5721 tree
5722 gimple_build (gimple_seq *seq, location_t loc,
5723 enum built_in_function fn, tree type,
5724 tree arg0, tree arg1, tree arg2,
5725 tree (*valueize)(tree))
5727 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
5728 if (!res)
5730 tree decl = builtin_decl_implicit (fn);
5731 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
5732 if (!VOID_TYPE_P (type))
5734 if (gimple_in_ssa_p (cfun))
5735 res = make_ssa_name (type, NULL);
5736 else
5737 res = create_tmp_reg (type, NULL);
5738 gimple_call_set_lhs (stmt, res);
5740 gimple_set_location (stmt, loc);
5741 gimple_seq_add_stmt_without_update (seq, stmt);
5743 return res;
5746 /* Build the conversion (TYPE) OP with a result of type TYPE
5747 with location LOC if such conversion is neccesary in GIMPLE,
5748 simplifying it first.
5749 Returns the built expression value and appends
5750 statements possibly defining it to SEQ. */
5752 tree
5753 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
5755 if (useless_type_conversion_p (type, TREE_TYPE (op)))
5756 return op;
5757 return gimple_build (seq, loc, NOP_EXPR, type, op);