2014-10-28 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / gimple-fold.c
blobd0554f4e54f7ec5a07f62612f16e2738bcec753f
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "vec.h"
34 #include "machmode.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dumpfile.h"
39 #include "bitmap.h"
40 #include "predict.h"
41 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "gimplify.h"
50 #include "gimple-iterator.h"
51 #include "gimple-ssa.h"
52 #include "tree-ssanames.h"
53 #include "tree-into-ssa.h"
54 #include "tree-dfa.h"
55 #include "tree-ssa.h"
56 #include "tree-ssa-propagate.h"
57 #include "target.h"
58 #include "ipa-utils.h"
59 #include "gimple-pretty-print.h"
60 #include "tree-ssa-address.h"
61 #include "langhooks.h"
62 #include "gimplify-me.h"
63 #include "dbgcnt.h"
64 #include "builtins.h"
65 #include "output.h"
66 #include "tree-eh.h"
67 #include "gimple-match.h"
68 #include "tree-phinodes.h"
69 #include "ssa-iterators.h"
71 /* Return true when DECL can be referenced from current unit.
72 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
73 We can get declarations that are not possible to reference for various
74 reasons:
76 1) When analyzing C++ virtual tables.
77 C++ virtual tables do have known constructors even
78 when they are keyed to other compilation unit.
79 Those tables can contain pointers to methods and vars
80 in other units. Those methods have both STATIC and EXTERNAL
81 set.
82 2) In WHOPR mode devirtualization might lead to reference
83 to method that was partitioned elsehwere.
84 In this case we have static VAR_DECL or FUNCTION_DECL
85 that has no corresponding callgraph/varpool node
86 declaring the body.
87 3) COMDAT functions referred by external vtables that
88 we devirtualize only during final compilation stage.
89 At this time we already decided that we will not output
90 the function body and thus we can't reference the symbol
91 directly. */
93 static bool
94 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
96 varpool_node *vnode;
97 struct cgraph_node *node;
98 symtab_node *snode;
100 if (DECL_ABSTRACT_P (decl))
101 return false;
103 /* We are concerned only about static/external vars and functions. */
104 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
105 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
106 return true;
108 /* Static objects can be referred only if they was not optimized out yet. */
109 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
111 /* Before we start optimizing unreachable code we can be sure all
112 static objects are defined. */
113 if (symtab->function_flags_ready)
114 return true;
115 snode = symtab_node::get (decl);
116 if (!snode || !snode->definition)
117 return false;
118 node = dyn_cast <cgraph_node *> (snode);
119 return !node || !node->global.inlined_to;
122 /* We will later output the initializer, so we can refer to it.
123 So we are concerned only when DECL comes from initializer of
124 external var or var that has been optimized out. */
125 if (!from_decl
126 || TREE_CODE (from_decl) != VAR_DECL
127 || (!DECL_EXTERNAL (from_decl)
128 && (vnode = varpool_node::get (from_decl)) != NULL
129 && vnode->definition)
130 || (flag_ltrans
131 && (vnode = varpool_node::get (from_decl)) != NULL
132 && vnode->in_other_partition))
133 return true;
134 /* We are folding reference from external vtable. The vtable may reffer
135 to a symbol keyed to other compilation unit. The other compilation
136 unit may be in separate DSO and the symbol may be hidden. */
137 if (DECL_VISIBILITY_SPECIFIED (decl)
138 && DECL_EXTERNAL (decl)
139 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
140 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
141 return false;
142 /* When function is public, we always can introduce new reference.
143 Exception are the COMDAT functions where introducing a direct
144 reference imply need to include function body in the curren tunit. */
145 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
146 return true;
147 /* We have COMDAT. We are going to check if we still have definition
148 or if the definition is going to be output in other partition.
149 Bypass this when gimplifying; all needed functions will be produced.
151 As observed in PR20991 for already optimized out comdat virtual functions
152 it may be tempting to not necessarily give up because the copy will be
153 output elsewhere when corresponding vtable is output.
154 This is however not possible - ABI specify that COMDATs are output in
155 units where they are used and when the other unit was compiled with LTO
156 it is possible that vtable was kept public while the function itself
157 was privatized. */
158 if (!symtab->function_flags_ready)
159 return true;
161 snode = symtab_node::get (decl);
162 if (!snode
163 || ((!snode->definition || DECL_EXTERNAL (decl))
164 && (!snode->in_other_partition
165 || (!snode->forced_by_abi && !snode->force_output))))
166 return false;
167 node = dyn_cast <cgraph_node *> (snode);
168 return !node || !node->global.inlined_to;
171 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
172 acceptable form for is_gimple_min_invariant.
173 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
175 tree
176 canonicalize_constructor_val (tree cval, tree from_decl)
178 tree orig_cval = cval;
179 STRIP_NOPS (cval);
180 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
181 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
183 tree ptr = TREE_OPERAND (cval, 0);
184 if (is_gimple_min_invariant (ptr))
185 cval = build1_loc (EXPR_LOCATION (cval),
186 ADDR_EXPR, TREE_TYPE (ptr),
187 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
188 ptr,
189 fold_convert (ptr_type_node,
190 TREE_OPERAND (cval, 1))));
192 if (TREE_CODE (cval) == ADDR_EXPR)
194 tree base = NULL_TREE;
195 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
197 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
198 if (base)
199 TREE_OPERAND (cval, 0) = base;
201 else
202 base = get_base_address (TREE_OPERAND (cval, 0));
203 if (!base)
204 return NULL_TREE;
206 if ((TREE_CODE (base) == VAR_DECL
207 || TREE_CODE (base) == FUNCTION_DECL)
208 && !can_refer_decl_in_current_unit_p (base, from_decl))
209 return NULL_TREE;
210 if (TREE_CODE (base) == VAR_DECL)
211 TREE_ADDRESSABLE (base) = 1;
212 else if (TREE_CODE (base) == FUNCTION_DECL)
214 /* Make sure we create a cgraph node for functions we'll reference.
215 They can be non-existent if the reference comes from an entry
216 of an external vtable for example. */
217 cgraph_node::get_create (base);
219 /* Fixup types in global initializers. */
220 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
221 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
223 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
224 cval = fold_convert (TREE_TYPE (orig_cval), cval);
225 return cval;
227 if (TREE_OVERFLOW_P (cval))
228 return drop_tree_overflow (cval);
229 return orig_cval;
232 /* If SYM is a constant variable with known value, return the value.
233 NULL_TREE is returned otherwise. */
235 tree
236 get_symbol_constant_value (tree sym)
238 tree val = ctor_for_folding (sym);
239 if (val != error_mark_node)
241 if (val)
243 val = canonicalize_constructor_val (unshare_expr (val), sym);
244 if (val && is_gimple_min_invariant (val))
245 return val;
246 else
247 return NULL_TREE;
249 /* Variables declared 'const' without an initializer
250 have zero as the initializer if they may not be
251 overridden at link or run time. */
252 if (!val
253 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
254 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
255 return build_zero_cst (TREE_TYPE (sym));
258 return NULL_TREE;
263 /* Subroutine of fold_stmt. We perform several simplifications of the
264 memory reference tree EXPR and make sure to re-gimplify them properly
265 after propagation of constant addresses. IS_LHS is true if the
266 reference is supposed to be an lvalue. */
268 static tree
269 maybe_fold_reference (tree expr, bool is_lhs)
271 tree result;
273 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
274 || TREE_CODE (expr) == REALPART_EXPR
275 || TREE_CODE (expr) == IMAGPART_EXPR)
276 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
277 return fold_unary_loc (EXPR_LOCATION (expr),
278 TREE_CODE (expr),
279 TREE_TYPE (expr),
280 TREE_OPERAND (expr, 0));
281 else if (TREE_CODE (expr) == BIT_FIELD_REF
282 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
283 return fold_ternary_loc (EXPR_LOCATION (expr),
284 TREE_CODE (expr),
285 TREE_TYPE (expr),
286 TREE_OPERAND (expr, 0),
287 TREE_OPERAND (expr, 1),
288 TREE_OPERAND (expr, 2));
290 if (!is_lhs
291 && (result = fold_const_aggregate_ref (expr))
292 && is_gimple_min_invariant (result))
293 return result;
295 return NULL_TREE;
299 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
300 replacement rhs for the statement or NULL_TREE if no simplification
301 could be made. It is assumed that the operands have been previously
302 folded. */
304 static tree
305 fold_gimple_assign (gimple_stmt_iterator *si)
307 gimple stmt = gsi_stmt (*si);
308 enum tree_code subcode = gimple_assign_rhs_code (stmt);
309 location_t loc = gimple_location (stmt);
311 tree result = NULL_TREE;
313 switch (get_gimple_rhs_class (subcode))
315 case GIMPLE_SINGLE_RHS:
317 tree rhs = gimple_assign_rhs1 (stmt);
319 if (REFERENCE_CLASS_P (rhs))
320 return maybe_fold_reference (rhs, false);
322 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
324 tree val = OBJ_TYPE_REF_EXPR (rhs);
325 if (is_gimple_min_invariant (val))
326 return val;
327 else if (flag_devirtualize && virtual_method_call_p (rhs))
329 bool final;
330 vec <cgraph_node *>targets
331 = possible_polymorphic_call_targets (rhs, stmt, &final);
332 if (final && targets.length () <= 1 && dbg_cnt (devirt))
334 if (dump_enabled_p ())
336 location_t loc = gimple_location_safe (stmt);
337 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
338 "resolving virtual function address "
339 "reference to function %s\n",
340 targets.length () == 1
341 ? targets[0]->name ()
342 : "NULL");
344 if (targets.length () == 1)
346 val = fold_convert (TREE_TYPE (val),
347 build_fold_addr_expr_loc
348 (loc, targets[0]->decl));
349 STRIP_USELESS_TYPE_CONVERSION (val);
351 else
352 /* We can not use __builtin_unreachable here because it
353 can not have address taken. */
354 val = build_int_cst (TREE_TYPE (val), 0);
355 return val;
360 else if (TREE_CODE (rhs) == ADDR_EXPR)
362 tree ref = TREE_OPERAND (rhs, 0);
363 tree tem = maybe_fold_reference (ref, true);
364 if (tem
365 && TREE_CODE (tem) == MEM_REF
366 && integer_zerop (TREE_OPERAND (tem, 1)))
367 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
368 else if (tem)
369 result = fold_convert (TREE_TYPE (rhs),
370 build_fold_addr_expr_loc (loc, tem));
371 else if (TREE_CODE (ref) == MEM_REF
372 && integer_zerop (TREE_OPERAND (ref, 1)))
373 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
376 else if (TREE_CODE (rhs) == CONSTRUCTOR
377 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
378 && (CONSTRUCTOR_NELTS (rhs)
379 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
381 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
382 unsigned i;
383 tree val;
385 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
386 if (TREE_CODE (val) != INTEGER_CST
387 && TREE_CODE (val) != REAL_CST
388 && TREE_CODE (val) != FIXED_CST)
389 return NULL_TREE;
391 return build_vector_from_ctor (TREE_TYPE (rhs),
392 CONSTRUCTOR_ELTS (rhs));
395 else if (DECL_P (rhs))
396 return get_symbol_constant_value (rhs);
398 /* If we couldn't fold the RHS, hand over to the generic
399 fold routines. */
400 if (result == NULL_TREE)
401 result = fold (rhs);
403 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
404 that may have been added by fold, and "useless" type
405 conversions that might now be apparent due to propagation. */
406 STRIP_USELESS_TYPE_CONVERSION (result);
408 if (result != rhs && valid_gimple_rhs_p (result))
409 return result;
411 return NULL_TREE;
413 break;
415 case GIMPLE_UNARY_RHS:
417 tree rhs = gimple_assign_rhs1 (stmt);
419 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
420 if (result)
422 /* If the operation was a conversion do _not_ mark a
423 resulting constant with TREE_OVERFLOW if the original
424 constant was not. These conversions have implementation
425 defined behavior and retaining the TREE_OVERFLOW flag
426 here would confuse later passes such as VRP. */
427 if (CONVERT_EXPR_CODE_P (subcode)
428 && TREE_CODE (result) == INTEGER_CST
429 && TREE_CODE (rhs) == INTEGER_CST)
430 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
432 STRIP_USELESS_TYPE_CONVERSION (result);
433 if (valid_gimple_rhs_p (result))
434 return result;
437 break;
439 case GIMPLE_BINARY_RHS:
440 /* Try to canonicalize for boolean-typed X the comparisons
441 X == 0, X == 1, X != 0, and X != 1. */
442 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
443 || gimple_assign_rhs_code (stmt) == NE_EXPR)
445 tree lhs = gimple_assign_lhs (stmt);
446 tree op1 = gimple_assign_rhs1 (stmt);
447 tree op2 = gimple_assign_rhs2 (stmt);
448 tree type = TREE_TYPE (op1);
450 /* Check whether the comparison operands are of the same boolean
451 type as the result type is.
452 Check that second operand is an integer-constant with value
453 one or zero. */
454 if (TREE_CODE (op2) == INTEGER_CST
455 && (integer_zerop (op2) || integer_onep (op2))
456 && useless_type_conversion_p (TREE_TYPE (lhs), type))
458 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
459 bool is_logical_not = false;
461 /* X == 0 and X != 1 is a logical-not.of X
462 X == 1 and X != 0 is X */
463 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
464 || (cmp_code == NE_EXPR && integer_onep (op2)))
465 is_logical_not = true;
467 if (is_logical_not == false)
468 result = op1;
469 /* Only for one-bit precision typed X the transformation
470 !X -> ~X is valied. */
471 else if (TYPE_PRECISION (type) == 1)
472 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
473 type, op1);
474 /* Otherwise we use !X -> X ^ 1. */
475 else
476 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
477 type, op1, build_int_cst (type, 1));
482 if (!result)
483 result = fold_binary_loc (loc, subcode,
484 TREE_TYPE (gimple_assign_lhs (stmt)),
485 gimple_assign_rhs1 (stmt),
486 gimple_assign_rhs2 (stmt));
488 if (result)
490 STRIP_USELESS_TYPE_CONVERSION (result);
491 if (valid_gimple_rhs_p (result))
492 return result;
494 break;
496 case GIMPLE_TERNARY_RHS:
497 /* Try to fold a conditional expression. */
498 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
500 tree op0 = gimple_assign_rhs1 (stmt);
501 tree tem;
502 bool set = false;
503 location_t cond_loc = gimple_location (stmt);
505 if (COMPARISON_CLASS_P (op0))
507 fold_defer_overflow_warnings ();
508 tem = fold_binary_loc (cond_loc,
509 TREE_CODE (op0), TREE_TYPE (op0),
510 TREE_OPERAND (op0, 0),
511 TREE_OPERAND (op0, 1));
512 /* This is actually a conditional expression, not a GIMPLE
513 conditional statement, however, the valid_gimple_rhs_p
514 test still applies. */
515 set = (tem && is_gimple_condexpr (tem)
516 && valid_gimple_rhs_p (tem));
517 fold_undefer_overflow_warnings (set, stmt, 0);
519 else if (is_gimple_min_invariant (op0))
521 tem = op0;
522 set = true;
524 else
525 return NULL_TREE;
527 if (set)
528 result = fold_build3_loc (cond_loc, COND_EXPR,
529 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
530 gimple_assign_rhs2 (stmt),
531 gimple_assign_rhs3 (stmt));
534 if (!result)
535 result = fold_ternary_loc (loc, subcode,
536 TREE_TYPE (gimple_assign_lhs (stmt)),
537 gimple_assign_rhs1 (stmt),
538 gimple_assign_rhs2 (stmt),
539 gimple_assign_rhs3 (stmt));
541 if (result)
543 STRIP_USELESS_TYPE_CONVERSION (result);
544 if (valid_gimple_rhs_p (result))
545 return result;
547 break;
549 case GIMPLE_INVALID_RHS:
550 gcc_unreachable ();
553 return NULL_TREE;
556 /* Attempt to fold a conditional statement. Return true if any changes were
557 made. We only attempt to fold the condition expression, and do not perform
558 any transformation that would require alteration of the cfg. It is
559 assumed that the operands have been previously folded. */
561 static bool
562 fold_gimple_cond (gimple stmt)
564 tree result = fold_binary_loc (gimple_location (stmt),
565 gimple_cond_code (stmt),
566 boolean_type_node,
567 gimple_cond_lhs (stmt),
568 gimple_cond_rhs (stmt));
570 if (result)
572 STRIP_USELESS_TYPE_CONVERSION (result);
573 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
575 gimple_cond_set_condition_from_tree (stmt, result);
576 return true;
580 return false;
584 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
585 adjusting the replacement stmts location and virtual operands.
586 If the statement has a lhs the last stmt in the sequence is expected
587 to assign to that lhs. */
589 static void
590 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
592 gimple stmt = gsi_stmt (*si_p);
594 if (gimple_has_location (stmt))
595 annotate_all_with_location (stmts, gimple_location (stmt));
597 /* First iterate over the replacement statements backward, assigning
598 virtual operands to their defining statements. */
599 gimple laststore = NULL;
600 for (gimple_stmt_iterator i = gsi_last (stmts);
601 !gsi_end_p (i); gsi_prev (&i))
603 gimple new_stmt = gsi_stmt (i);
604 if ((gimple_assign_single_p (new_stmt)
605 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
606 || (is_gimple_call (new_stmt)
607 && (gimple_call_flags (new_stmt)
608 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
610 tree vdef;
611 if (!laststore)
612 vdef = gimple_vdef (stmt);
613 else
614 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
615 gimple_set_vdef (new_stmt, vdef);
616 if (vdef && TREE_CODE (vdef) == SSA_NAME)
617 SSA_NAME_DEF_STMT (vdef) = new_stmt;
618 laststore = new_stmt;
622 /* Second iterate over the statements forward, assigning virtual
623 operands to their uses. */
624 tree reaching_vuse = gimple_vuse (stmt);
625 for (gimple_stmt_iterator i = gsi_start (stmts);
626 !gsi_end_p (i); gsi_next (&i))
628 gimple new_stmt = gsi_stmt (i);
629 /* If the new statement possibly has a VUSE, update it with exact SSA
630 name we know will reach this one. */
631 if (gimple_has_mem_ops (new_stmt))
632 gimple_set_vuse (new_stmt, reaching_vuse);
633 gimple_set_modified (new_stmt, true);
634 if (gimple_vdef (new_stmt))
635 reaching_vuse = gimple_vdef (new_stmt);
638 /* If the new sequence does not do a store release the virtual
639 definition of the original statement. */
640 if (reaching_vuse
641 && reaching_vuse == gimple_vuse (stmt))
643 tree vdef = gimple_vdef (stmt);
644 if (vdef
645 && TREE_CODE (vdef) == SSA_NAME)
647 unlink_stmt_vdef (stmt);
648 release_ssa_name (vdef);
652 /* Finally replace the original statement with the sequence. */
653 gsi_replace_with_seq (si_p, stmts, false);
656 /* Convert EXPR into a GIMPLE value suitable for substitution on the
657 RHS of an assignment. Insert the necessary statements before
658 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
659 is replaced. If the call is expected to produces a result, then it
660 is replaced by an assignment of the new RHS to the result variable.
661 If the result is to be ignored, then the call is replaced by a
662 GIMPLE_NOP. A proper VDEF chain is retained by making the first
663 VUSE and the last VDEF of the whole sequence be the same as the replaced
664 statement and using new SSA names for stores in between. */
666 void
667 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
669 tree lhs;
670 gimple stmt, new_stmt;
671 gimple_stmt_iterator i;
672 gimple_seq stmts = NULL;
674 stmt = gsi_stmt (*si_p);
676 gcc_assert (is_gimple_call (stmt));
678 push_gimplify_context (gimple_in_ssa_p (cfun));
680 lhs = gimple_call_lhs (stmt);
681 if (lhs == NULL_TREE)
683 gimplify_and_add (expr, &stmts);
684 /* We can end up with folding a memcpy of an empty class assignment
685 which gets optimized away by C++ gimplification. */
686 if (gimple_seq_empty_p (stmts))
688 pop_gimplify_context (NULL);
689 if (gimple_in_ssa_p (cfun))
691 unlink_stmt_vdef (stmt);
692 release_defs (stmt);
694 gsi_replace (si_p, gimple_build_nop (), true);
695 return;
698 else
700 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
701 new_stmt = gimple_build_assign (lhs, tmp);
702 i = gsi_last (stmts);
703 gsi_insert_after_without_update (&i, new_stmt,
704 GSI_CONTINUE_LINKING);
707 pop_gimplify_context (NULL);
709 gsi_replace_with_seq_vops (si_p, stmts);
713 /* Replace the call at *GSI with the gimple value VAL. */
715 static void
716 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
718 gimple stmt = gsi_stmt (*gsi);
719 tree lhs = gimple_call_lhs (stmt);
720 gimple repl;
721 if (lhs)
723 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
724 val = fold_convert (TREE_TYPE (lhs), val);
725 repl = gimple_build_assign (lhs, val);
727 else
728 repl = gimple_build_nop ();
729 tree vdef = gimple_vdef (stmt);
730 if (vdef && TREE_CODE (vdef) == SSA_NAME)
732 unlink_stmt_vdef (stmt);
733 release_ssa_name (vdef);
735 gsi_replace (gsi, repl, true);
738 /* Replace the call at *GSI with the new call REPL and fold that
739 again. */
741 static void
742 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
744 gimple stmt = gsi_stmt (*gsi);
745 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
746 gimple_set_location (repl, gimple_location (stmt));
747 if (gimple_vdef (stmt)
748 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
750 gimple_set_vdef (repl, gimple_vdef (stmt));
751 gimple_set_vuse (repl, gimple_vuse (stmt));
752 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
754 gsi_replace (gsi, repl, true);
755 fold_stmt (gsi);
758 /* Return true if VAR is a VAR_DECL or a component thereof. */
760 static bool
761 var_decl_component_p (tree var)
763 tree inner = var;
764 while (handled_component_p (inner))
765 inner = TREE_OPERAND (inner, 0);
766 return SSA_VAR_P (inner);
769 /* Fold function call to builtin mem{{,p}cpy,move}. Return
770 NULL_TREE if no simplification can be made.
771 If ENDP is 0, return DEST (like memcpy).
772 If ENDP is 1, return DEST+LEN (like mempcpy).
773 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
774 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
775 (memmove). */
777 static bool
778 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
779 tree dest, tree src, int endp)
781 gimple stmt = gsi_stmt (*gsi);
782 tree lhs = gimple_call_lhs (stmt);
783 tree len = gimple_call_arg (stmt, 2);
784 tree destvar, srcvar;
785 location_t loc = gimple_location (stmt);
787 /* If the LEN parameter is zero, return DEST. */
788 if (integer_zerop (len))
790 gimple repl;
791 if (gimple_call_lhs (stmt))
792 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
793 else
794 repl = gimple_build_nop ();
795 tree vdef = gimple_vdef (stmt);
796 if (vdef && TREE_CODE (vdef) == SSA_NAME)
798 unlink_stmt_vdef (stmt);
799 release_ssa_name (vdef);
801 gsi_replace (gsi, repl, true);
802 return true;
805 /* If SRC and DEST are the same (and not volatile), return
806 DEST{,+LEN,+LEN-1}. */
807 if (operand_equal_p (src, dest, 0))
809 unlink_stmt_vdef (stmt);
810 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
811 release_ssa_name (gimple_vdef (stmt));
812 if (!lhs)
814 gsi_replace (gsi, gimple_build_nop (), true);
815 return true;
817 goto done;
819 else
821 tree srctype, desttype;
822 unsigned int src_align, dest_align;
823 tree off0;
825 /* Build accesses at offset zero with a ref-all character type. */
826 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
827 ptr_mode, true), 0);
829 /* If we can perform the copy efficiently with first doing all loads
830 and then all stores inline it that way. Currently efficiently
831 means that we can load all the memory into a single integer
832 register which is what MOVE_MAX gives us. */
833 src_align = get_pointer_alignment (src);
834 dest_align = get_pointer_alignment (dest);
835 if (tree_fits_uhwi_p (len)
836 && compare_tree_int (len, MOVE_MAX) <= 0
837 /* ??? Don't transform copies from strings with known length this
838 confuses the tree-ssa-strlen.c. This doesn't handle
839 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
840 reason. */
841 && !c_strlen (src, 2))
843 unsigned ilen = tree_to_uhwi (len);
844 if (exact_log2 (ilen) != -1)
846 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
847 if (type
848 && TYPE_MODE (type) != BLKmode
849 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
850 == ilen * 8)
851 /* If the destination pointer is not aligned we must be able
852 to emit an unaligned store. */
853 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
854 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
856 tree srctype = type;
857 tree desttype = type;
858 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
859 srctype = build_aligned_type (type, src_align);
860 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
861 tree tem = fold_const_aggregate_ref (srcmem);
862 if (tem)
863 srcmem = tem;
864 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
865 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
866 src_align))
867 srcmem = NULL_TREE;
868 if (srcmem)
870 gimple new_stmt;
871 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
873 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
874 if (gimple_in_ssa_p (cfun))
875 srcmem = make_ssa_name (TREE_TYPE (srcmem),
876 new_stmt);
877 else
878 srcmem = create_tmp_reg (TREE_TYPE (srcmem),
879 NULL);
880 gimple_assign_set_lhs (new_stmt, srcmem);
881 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
882 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
884 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
885 desttype = build_aligned_type (type, dest_align);
886 new_stmt
887 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
888 dest, off0),
889 srcmem);
890 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
891 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
892 if (gimple_vdef (new_stmt)
893 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
894 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
895 if (!lhs)
897 gsi_replace (gsi, new_stmt, true);
898 return true;
900 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
901 goto done;
907 if (endp == 3)
909 /* Both DEST and SRC must be pointer types.
910 ??? This is what old code did. Is the testing for pointer types
911 really mandatory?
913 If either SRC is readonly or length is 1, we can use memcpy. */
914 if (!dest_align || !src_align)
915 return false;
916 if (readonly_data_expr (src)
917 || (tree_fits_uhwi_p (len)
918 && (MIN (src_align, dest_align) / BITS_PER_UNIT
919 >= tree_to_uhwi (len))))
921 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
922 if (!fn)
923 return false;
924 gimple_call_set_fndecl (stmt, fn);
925 gimple_call_set_arg (stmt, 0, dest);
926 gimple_call_set_arg (stmt, 1, src);
927 fold_stmt (gsi);
928 return true;
931 /* If *src and *dest can't overlap, optimize into memcpy as well. */
932 if (TREE_CODE (src) == ADDR_EXPR
933 && TREE_CODE (dest) == ADDR_EXPR)
935 tree src_base, dest_base, fn;
936 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
937 HOST_WIDE_INT size = -1;
938 HOST_WIDE_INT maxsize = -1;
940 srcvar = TREE_OPERAND (src, 0);
941 src_base = get_ref_base_and_extent (srcvar, &src_offset,
942 &size, &maxsize);
943 destvar = TREE_OPERAND (dest, 0);
944 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
945 &size, &maxsize);
946 if (tree_fits_uhwi_p (len))
947 maxsize = tree_to_uhwi (len);
948 else
949 maxsize = -1;
950 src_offset /= BITS_PER_UNIT;
951 dest_offset /= BITS_PER_UNIT;
952 if (SSA_VAR_P (src_base)
953 && SSA_VAR_P (dest_base))
955 if (operand_equal_p (src_base, dest_base, 0)
956 && ranges_overlap_p (src_offset, maxsize,
957 dest_offset, maxsize))
958 return false;
960 else if (TREE_CODE (src_base) == MEM_REF
961 && TREE_CODE (dest_base) == MEM_REF)
963 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
964 TREE_OPERAND (dest_base, 0), 0))
965 return false;
966 offset_int off = mem_ref_offset (src_base) + src_offset;
967 if (!wi::fits_shwi_p (off))
968 return false;
969 src_offset = off.to_shwi ();
971 off = mem_ref_offset (dest_base) + dest_offset;
972 if (!wi::fits_shwi_p (off))
973 return false;
974 dest_offset = off.to_shwi ();
975 if (ranges_overlap_p (src_offset, maxsize,
976 dest_offset, maxsize))
977 return false;
979 else
980 return false;
982 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
983 if (!fn)
984 return false;
985 gimple_call_set_fndecl (stmt, fn);
986 gimple_call_set_arg (stmt, 0, dest);
987 gimple_call_set_arg (stmt, 1, src);
988 fold_stmt (gsi);
989 return true;
992 /* If the destination and source do not alias optimize into
993 memcpy as well. */
994 if ((is_gimple_min_invariant (dest)
995 || TREE_CODE (dest) == SSA_NAME)
996 && (is_gimple_min_invariant (src)
997 || TREE_CODE (src) == SSA_NAME))
999 ao_ref destr, srcr;
1000 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1001 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1002 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1004 tree fn;
1005 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1006 if (!fn)
1007 return false;
1008 gimple_call_set_fndecl (stmt, fn);
1009 gimple_call_set_arg (stmt, 0, dest);
1010 gimple_call_set_arg (stmt, 1, src);
1011 fold_stmt (gsi);
1012 return true;
1016 return false;
1019 if (!tree_fits_shwi_p (len))
1020 return false;
1021 /* FIXME:
1022 This logic lose for arguments like (type *)malloc (sizeof (type)),
1023 since we strip the casts of up to VOID return value from malloc.
1024 Perhaps we ought to inherit type from non-VOID argument here? */
1025 STRIP_NOPS (src);
1026 STRIP_NOPS (dest);
1027 if (!POINTER_TYPE_P (TREE_TYPE (src))
1028 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1029 return false;
1030 /* In the following try to find a type that is most natural to be
1031 used for the memcpy source and destination and that allows
1032 the most optimization when memcpy is turned into a plain assignment
1033 using that type. In theory we could always use a char[len] type
1034 but that only gains us that the destination and source possibly
1035 no longer will have their address taken. */
1036 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1037 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1039 tree tem = TREE_OPERAND (src, 0);
1040 STRIP_NOPS (tem);
1041 if (tem != TREE_OPERAND (src, 0))
1042 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1044 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1046 tree tem = TREE_OPERAND (dest, 0);
1047 STRIP_NOPS (tem);
1048 if (tem != TREE_OPERAND (dest, 0))
1049 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1051 srctype = TREE_TYPE (TREE_TYPE (src));
1052 if (TREE_CODE (srctype) == ARRAY_TYPE
1053 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1055 srctype = TREE_TYPE (srctype);
1056 STRIP_NOPS (src);
1057 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1059 desttype = TREE_TYPE (TREE_TYPE (dest));
1060 if (TREE_CODE (desttype) == ARRAY_TYPE
1061 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1063 desttype = TREE_TYPE (desttype);
1064 STRIP_NOPS (dest);
1065 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1067 if (TREE_ADDRESSABLE (srctype)
1068 || TREE_ADDRESSABLE (desttype))
1069 return false;
1071 /* Make sure we are not copying using a floating-point mode or
1072 a type whose size possibly does not match its precision. */
1073 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1074 || TREE_CODE (desttype) == BOOLEAN_TYPE
1075 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1076 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1077 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1078 || TREE_CODE (srctype) == BOOLEAN_TYPE
1079 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1080 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1081 if (!srctype)
1082 srctype = desttype;
1083 if (!desttype)
1084 desttype = srctype;
1085 if (!srctype)
1086 return false;
1088 src_align = get_pointer_alignment (src);
1089 dest_align = get_pointer_alignment (dest);
1090 if (dest_align < TYPE_ALIGN (desttype)
1091 || src_align < TYPE_ALIGN (srctype))
1092 return false;
1094 destvar = dest;
1095 STRIP_NOPS (destvar);
1096 if (TREE_CODE (destvar) == ADDR_EXPR
1097 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1098 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1099 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1100 else
1101 destvar = NULL_TREE;
1103 srcvar = src;
1104 STRIP_NOPS (srcvar);
1105 if (TREE_CODE (srcvar) == ADDR_EXPR
1106 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1107 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1109 if (!destvar
1110 || src_align >= TYPE_ALIGN (desttype))
1111 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1112 srcvar, off0);
1113 else if (!STRICT_ALIGNMENT)
1115 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1116 src_align);
1117 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1119 else
1120 srcvar = NULL_TREE;
1122 else
1123 srcvar = NULL_TREE;
1125 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1126 return false;
1128 if (srcvar == NULL_TREE)
1130 STRIP_NOPS (src);
1131 if (src_align >= TYPE_ALIGN (desttype))
1132 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1133 else
1135 if (STRICT_ALIGNMENT)
1136 return false;
1137 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1138 src_align);
1139 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1142 else if (destvar == NULL_TREE)
1144 STRIP_NOPS (dest);
1145 if (dest_align >= TYPE_ALIGN (srctype))
1146 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1147 else
1149 if (STRICT_ALIGNMENT)
1150 return false;
1151 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1152 dest_align);
1153 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1157 gimple new_stmt;
1158 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1160 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1161 if (gimple_in_ssa_p (cfun))
1162 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1163 else
1164 srcvar = create_tmp_reg (TREE_TYPE (srcvar), NULL);
1165 gimple_assign_set_lhs (new_stmt, srcvar);
1166 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1167 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1169 new_stmt = gimple_build_assign (destvar, srcvar);
1170 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1171 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1172 if (gimple_vdef (new_stmt)
1173 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1174 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1175 if (!lhs)
1177 gsi_replace (gsi, new_stmt, true);
1178 return true;
1180 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1183 done:
1184 if (endp == 0 || endp == 3)
1185 len = NULL_TREE;
1186 else if (endp == 2)
1187 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1188 ssize_int (1));
1189 if (endp == 2 || endp == 1)
1190 dest = fold_build_pointer_plus_loc (loc, dest, len);
1192 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1193 GSI_SAME_STMT);
1194 gimple repl = gimple_build_assign (lhs, dest);
1195 gsi_replace (gsi, repl, true);
1196 return true;
1199 /* Fold function call to builtin memset or bzero at *GSI setting the
1200 memory of size LEN to VAL. Return whether a simplification was made. */
1202 static bool
1203 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1205 gimple stmt = gsi_stmt (*gsi);
1206 tree etype;
1207 unsigned HOST_WIDE_INT length, cval;
1209 /* If the LEN parameter is zero, return DEST. */
1210 if (integer_zerop (len))
1212 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1213 return true;
1216 if (! tree_fits_uhwi_p (len))
1217 return false;
1219 if (TREE_CODE (c) != INTEGER_CST)
1220 return false;
1222 tree dest = gimple_call_arg (stmt, 0);
1223 tree var = dest;
1224 if (TREE_CODE (var) != ADDR_EXPR)
1225 return false;
1227 var = TREE_OPERAND (var, 0);
1228 if (TREE_THIS_VOLATILE (var))
1229 return false;
1231 etype = TREE_TYPE (var);
1232 if (TREE_CODE (etype) == ARRAY_TYPE)
1233 etype = TREE_TYPE (etype);
1235 if (!INTEGRAL_TYPE_P (etype)
1236 && !POINTER_TYPE_P (etype))
1237 return NULL_TREE;
1239 if (! var_decl_component_p (var))
1240 return NULL_TREE;
1242 length = tree_to_uhwi (len);
1243 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1244 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1245 return NULL_TREE;
1247 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1248 return NULL_TREE;
1250 if (integer_zerop (c))
1251 cval = 0;
1252 else
1254 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1255 return NULL_TREE;
1257 cval = TREE_INT_CST_LOW (c);
1258 cval &= 0xff;
1259 cval |= cval << 8;
1260 cval |= cval << 16;
1261 cval |= (cval << 31) << 1;
1264 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1265 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1266 gimple_set_vuse (store, gimple_vuse (stmt));
1267 tree vdef = gimple_vdef (stmt);
1268 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1270 gimple_set_vdef (store, gimple_vdef (stmt));
1271 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1273 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1274 if (gimple_call_lhs (stmt))
1276 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1277 gsi_replace (gsi, asgn, true);
1279 else
1281 gimple_stmt_iterator gsi2 = *gsi;
1282 gsi_prev (gsi);
1283 gsi_remove (&gsi2, true);
1286 return true;
1290 /* Return the string length, maximum string length or maximum value of
1291 ARG in LENGTH.
1292 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1293 is not NULL and, for TYPE == 0, its value is not equal to the length
1294 we determine or if we are unable to determine the length or value,
1295 return false. VISITED is a bitmap of visited variables.
1296 TYPE is 0 if string length should be returned, 1 for maximum string
1297 length and 2 for maximum value ARG can have. */
1299 static bool
1300 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1302 tree var, val;
1303 gimple def_stmt;
1305 if (TREE_CODE (arg) != SSA_NAME)
1307 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1308 if (TREE_CODE (arg) == ADDR_EXPR
1309 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1310 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1312 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1313 if (TREE_CODE (aop0) == INDIRECT_REF
1314 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1315 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1316 length, visited, type);
1319 if (type == 2)
1321 val = arg;
1322 if (TREE_CODE (val) != INTEGER_CST
1323 || tree_int_cst_sgn (val) < 0)
1324 return false;
1326 else
1327 val = c_strlen (arg, 1);
1328 if (!val)
1329 return false;
1331 if (*length)
1333 if (type > 0)
1335 if (TREE_CODE (*length) != INTEGER_CST
1336 || TREE_CODE (val) != INTEGER_CST)
1337 return false;
1339 if (tree_int_cst_lt (*length, val))
1340 *length = val;
1341 return true;
1343 else if (simple_cst_equal (val, *length) != 1)
1344 return false;
1347 *length = val;
1348 return true;
1351 /* If ARG is registered for SSA update we cannot look at its defining
1352 statement. */
1353 if (name_registered_for_update_p (arg))
1354 return false;
1356 /* If we were already here, break the infinite cycle. */
1357 if (!*visited)
1358 *visited = BITMAP_ALLOC (NULL);
1359 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1360 return true;
1362 var = arg;
1363 def_stmt = SSA_NAME_DEF_STMT (var);
1365 switch (gimple_code (def_stmt))
1367 case GIMPLE_ASSIGN:
1368 /* The RHS of the statement defining VAR must either have a
1369 constant length or come from another SSA_NAME with a constant
1370 length. */
1371 if (gimple_assign_single_p (def_stmt)
1372 || gimple_assign_unary_nop_p (def_stmt))
1374 tree rhs = gimple_assign_rhs1 (def_stmt);
1375 return get_maxval_strlen (rhs, length, visited, type);
1377 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1379 tree op2 = gimple_assign_rhs2 (def_stmt);
1380 tree op3 = gimple_assign_rhs3 (def_stmt);
1381 return get_maxval_strlen (op2, length, visited, type)
1382 && get_maxval_strlen (op3, length, visited, type);
1384 return false;
1386 case GIMPLE_PHI:
1388 /* All the arguments of the PHI node must have the same constant
1389 length. */
1390 unsigned i;
1392 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1394 tree arg = gimple_phi_arg (def_stmt, i)->def;
1396 /* If this PHI has itself as an argument, we cannot
1397 determine the string length of this argument. However,
1398 if we can find a constant string length for the other
1399 PHI args then we can still be sure that this is a
1400 constant string length. So be optimistic and just
1401 continue with the next argument. */
1402 if (arg == gimple_phi_result (def_stmt))
1403 continue;
1405 if (!get_maxval_strlen (arg, length, visited, type))
1406 return false;
1409 return true;
1411 default:
1412 return false;
1416 tree
1417 get_maxval_strlen (tree arg, int type)
1419 bitmap visited = NULL;
1420 tree len = NULL_TREE;
1421 if (!get_maxval_strlen (arg, &len, &visited, type))
1422 len = NULL_TREE;
1423 if (visited)
1424 BITMAP_FREE (visited);
1426 return len;
1430 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1431 If LEN is not NULL, it represents the length of the string to be
1432 copied. Return NULL_TREE if no simplification can be made. */
1434 static bool
1435 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1436 tree dest, tree src)
1438 location_t loc = gimple_location (gsi_stmt (*gsi));
1439 tree fn;
1441 /* If SRC and DEST are the same (and not volatile), return DEST. */
1442 if (operand_equal_p (src, dest, 0))
1444 replace_call_with_value (gsi, dest);
1445 return true;
1448 if (optimize_function_for_size_p (cfun))
1449 return false;
1451 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1452 if (!fn)
1453 return false;
1455 tree len = get_maxval_strlen (src, 0);
1456 if (!len)
1457 return false;
1459 len = fold_convert_loc (loc, size_type_node, len);
1460 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1461 len = force_gimple_operand_gsi (gsi, len, true,
1462 NULL_TREE, true, GSI_SAME_STMT);
1463 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1464 replace_call_with_call_and_fold (gsi, repl);
1465 return true;
1468 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1469 If SLEN is not NULL, it represents the length of the source string.
1470 Return NULL_TREE if no simplification can be made. */
1472 static bool
1473 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1474 tree dest, tree src, tree len)
1476 location_t loc = gimple_location (gsi_stmt (*gsi));
1477 tree fn;
1479 /* If the LEN parameter is zero, return DEST. */
1480 if (integer_zerop (len))
1482 replace_call_with_value (gsi, dest);
1483 return true;
1486 /* We can't compare slen with len as constants below if len is not a
1487 constant. */
1488 if (TREE_CODE (len) != INTEGER_CST)
1489 return false;
1491 /* Now, we must be passed a constant src ptr parameter. */
1492 tree slen = get_maxval_strlen (src, 0);
1493 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1494 return false;
1496 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1498 /* We do not support simplification of this case, though we do
1499 support it when expanding trees into RTL. */
1500 /* FIXME: generate a call to __builtin_memset. */
1501 if (tree_int_cst_lt (slen, len))
1502 return false;
1504 /* OK transform into builtin memcpy. */
1505 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1506 if (!fn)
1507 return false;
1509 len = fold_convert_loc (loc, size_type_node, len);
1510 len = force_gimple_operand_gsi (gsi, len, true,
1511 NULL_TREE, true, GSI_SAME_STMT);
1512 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1513 replace_call_with_call_and_fold (gsi, repl);
1514 return true;
1517 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1518 to the call.
1520 Return NULL_TREE if no simplification was possible, otherwise return the
1521 simplified form of the call as a tree.
1523 The simplified form may be a constant or other expression which
1524 computes the same value, but in a more efficient manner (including
1525 calls to other builtin functions).
1527 The call may contain arguments which need to be evaluated, but
1528 which are not useful to determine the result of the call. In
1529 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1530 COMPOUND_EXPR will be an argument which must be evaluated.
1531 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1532 COMPOUND_EXPR in the chain will contain the tree for the simplified
1533 form of the builtin function call. */
1535 static bool
1536 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1538 gimple stmt = gsi_stmt (*gsi);
1539 location_t loc = gimple_location (stmt);
1541 const char *p = c_getstr (src);
1543 /* If the string length is zero, return the dst parameter. */
1544 if (p && *p == '\0')
1546 replace_call_with_value (gsi, dst);
1547 return true;
1550 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1551 return false;
1553 /* See if we can store by pieces into (dst + strlen(dst)). */
1554 tree newdst;
1555 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1556 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1558 if (!strlen_fn || !memcpy_fn)
1559 return false;
1561 /* If the length of the source string isn't computable don't
1562 split strcat into strlen and memcpy. */
1563 tree len = get_maxval_strlen (src, 0);
1564 if (! len)
1565 return false;
1567 /* Create strlen (dst). */
1568 gimple_seq stmts = NULL, stmts2;
1569 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1570 gimple_set_location (repl, loc);
1571 if (gimple_in_ssa_p (cfun))
1572 newdst = make_ssa_name (size_type_node, NULL);
1573 else
1574 newdst = create_tmp_reg (size_type_node, NULL);
1575 gimple_call_set_lhs (repl, newdst);
1576 gimple_seq_add_stmt_without_update (&stmts, repl);
1578 /* Create (dst p+ strlen (dst)). */
1579 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1580 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1581 gimple_seq_add_seq_without_update (&stmts, stmts2);
1583 len = fold_convert_loc (loc, size_type_node, len);
1584 len = size_binop_loc (loc, PLUS_EXPR, len,
1585 build_int_cst (size_type_node, 1));
1586 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1587 gimple_seq_add_seq_without_update (&stmts, stmts2);
1589 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1590 gimple_seq_add_stmt_without_update (&stmts, repl);
1591 if (gimple_call_lhs (stmt))
1593 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1594 gimple_seq_add_stmt_without_update (&stmts, repl);
1595 gsi_replace_with_seq_vops (gsi, stmts);
1596 /* gsi now points at the assignment to the lhs, get a
1597 stmt iterator to the memcpy call.
1598 ??? We can't use gsi_for_stmt as that doesn't work when the
1599 CFG isn't built yet. */
1600 gimple_stmt_iterator gsi2 = *gsi;
1601 gsi_prev (&gsi2);
1602 fold_stmt (&gsi2);
1604 else
1606 gsi_replace_with_seq_vops (gsi, stmts);
1607 fold_stmt (gsi);
1609 return true;
1612 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1613 are the arguments to the call. */
1615 static bool
1616 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1618 gimple stmt = gsi_stmt (*gsi);
1619 tree dest = gimple_call_arg (stmt, 0);
1620 tree src = gimple_call_arg (stmt, 1);
1621 tree size = gimple_call_arg (stmt, 2);
1622 tree fn;
1623 const char *p;
1626 p = c_getstr (src);
1627 /* If the SRC parameter is "", return DEST. */
1628 if (p && *p == '\0')
1630 replace_call_with_value (gsi, dest);
1631 return true;
1634 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1635 return false;
1637 /* If __builtin_strcat_chk is used, assume strcat is available. */
1638 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1639 if (!fn)
1640 return false;
1642 gimple repl = gimple_build_call (fn, 2, dest, src);
1643 replace_call_with_call_and_fold (gsi, repl);
1644 return true;
1647 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1648 LEN, and SIZE. */
1650 static bool
1651 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1653 gimple stmt = gsi_stmt (*gsi);
1654 tree dest = gimple_call_arg (stmt, 0);
1655 tree src = gimple_call_arg (stmt, 1);
1656 tree len = gimple_call_arg (stmt, 2);
1657 tree size = gimple_call_arg (stmt, 3);
1658 tree fn;
1659 const char *p;
1661 p = c_getstr (src);
1662 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1663 if ((p && *p == '\0')
1664 || integer_zerop (len))
1666 replace_call_with_value (gsi, dest);
1667 return true;
1670 if (! tree_fits_uhwi_p (size))
1671 return false;
1673 if (! integer_all_onesp (size))
1675 tree src_len = c_strlen (src, 1);
1676 if (src_len
1677 && tree_fits_uhwi_p (src_len)
1678 && tree_fits_uhwi_p (len)
1679 && ! tree_int_cst_lt (len, src_len))
1681 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1682 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1683 if (!fn)
1684 return false;
1686 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1687 replace_call_with_call_and_fold (gsi, repl);
1688 return true;
1690 return false;
1693 /* If __builtin_strncat_chk is used, assume strncat is available. */
1694 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1695 if (!fn)
1696 return false;
1698 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1699 replace_call_with_call_and_fold (gsi, repl);
1700 return true;
1703 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1704 to the call. IGNORE is true if the value returned
1705 by the builtin will be ignored. UNLOCKED is true is true if this
1706 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1707 the known length of the string. Return NULL_TREE if no simplification
1708 was possible. */
1710 static bool
1711 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1712 tree arg0, tree arg1,
1713 bool unlocked)
1715 gimple stmt = gsi_stmt (*gsi);
1717 /* If we're using an unlocked function, assume the other unlocked
1718 functions exist explicitly. */
1719 tree const fn_fputc = (unlocked
1720 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1721 : builtin_decl_implicit (BUILT_IN_FPUTC));
1722 tree const fn_fwrite = (unlocked
1723 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1724 : builtin_decl_implicit (BUILT_IN_FWRITE));
1726 /* If the return value is used, don't do the transformation. */
1727 if (gimple_call_lhs (stmt))
1728 return false;
1730 /* Get the length of the string passed to fputs. If the length
1731 can't be determined, punt. */
1732 tree len = get_maxval_strlen (arg0, 0);
1733 if (!len
1734 || TREE_CODE (len) != INTEGER_CST)
1735 return false;
1737 switch (compare_tree_int (len, 1))
1739 case -1: /* length is 0, delete the call entirely . */
1740 replace_call_with_value (gsi, integer_zero_node);
1741 return true;
1743 case 0: /* length is 1, call fputc. */
1745 const char *p = c_getstr (arg0);
1746 if (p != NULL)
1748 if (!fn_fputc)
1749 return false;
1751 gimple repl = gimple_build_call (fn_fputc, 2,
1752 build_int_cst
1753 (integer_type_node, p[0]), arg1);
1754 replace_call_with_call_and_fold (gsi, repl);
1755 return true;
1758 /* FALLTHROUGH */
1759 case 1: /* length is greater than 1, call fwrite. */
1761 /* If optimizing for size keep fputs. */
1762 if (optimize_function_for_size_p (cfun))
1763 return false;
1764 /* New argument list transforming fputs(string, stream) to
1765 fwrite(string, 1, len, stream). */
1766 if (!fn_fwrite)
1767 return false;
1769 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1770 size_one_node, len, arg1);
1771 replace_call_with_call_and_fold (gsi, repl);
1772 return true;
1774 default:
1775 gcc_unreachable ();
1777 return false;
1780 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1781 DEST, SRC, LEN, and SIZE are the arguments to the call.
1782 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1783 code of the builtin. If MAXLEN is not NULL, it is maximum length
1784 passed as third argument. */
1786 static bool
1787 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1788 tree dest, tree src, tree len, tree size,
1789 enum built_in_function fcode)
1791 gimple stmt = gsi_stmt (*gsi);
1792 location_t loc = gimple_location (stmt);
1793 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1794 tree fn;
1796 /* If SRC and DEST are the same (and not volatile), return DEST
1797 (resp. DEST+LEN for __mempcpy_chk). */
1798 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1800 if (fcode != BUILT_IN_MEMPCPY_CHK)
1802 replace_call_with_value (gsi, dest);
1803 return true;
1805 else
1807 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1808 temp = force_gimple_operand_gsi (gsi, temp,
1809 false, NULL_TREE, true,
1810 GSI_SAME_STMT);
1811 replace_call_with_value (gsi, temp);
1812 return true;
1816 if (! tree_fits_uhwi_p (size))
1817 return false;
1819 tree maxlen = get_maxval_strlen (len, 2);
1820 if (! integer_all_onesp (size))
1822 if (! tree_fits_uhwi_p (len))
1824 /* If LEN is not constant, try MAXLEN too.
1825 For MAXLEN only allow optimizing into non-_ocs function
1826 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1827 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1829 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1831 /* (void) __mempcpy_chk () can be optimized into
1832 (void) __memcpy_chk (). */
1833 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1834 if (!fn)
1835 return false;
1837 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1838 replace_call_with_call_and_fold (gsi, repl);
1839 return true;
1841 return false;
1844 else
1845 maxlen = len;
1847 if (tree_int_cst_lt (size, maxlen))
1848 return false;
1851 fn = NULL_TREE;
1852 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1853 mem{cpy,pcpy,move,set} is available. */
1854 switch (fcode)
1856 case BUILT_IN_MEMCPY_CHK:
1857 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1858 break;
1859 case BUILT_IN_MEMPCPY_CHK:
1860 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1861 break;
1862 case BUILT_IN_MEMMOVE_CHK:
1863 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1864 break;
1865 case BUILT_IN_MEMSET_CHK:
1866 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1867 break;
1868 default:
1869 break;
1872 if (!fn)
1873 return false;
1875 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1876 replace_call_with_call_and_fold (gsi, repl);
1877 return true;
1880 /* Fold a call to the __st[rp]cpy_chk builtin.
1881 DEST, SRC, and SIZE are the arguments to the call.
1882 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1883 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1884 strings passed as second argument. */
1886 static bool
1887 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1888 tree dest,
1889 tree src, tree size,
1890 enum built_in_function fcode)
1892 gimple stmt = gsi_stmt (*gsi);
1893 location_t loc = gimple_location (stmt);
1894 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1895 tree len, fn;
1897 /* If SRC and DEST are the same (and not volatile), return DEST. */
1898 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1900 replace_call_with_value (gsi, dest);
1901 return true;
1904 if (! tree_fits_uhwi_p (size))
1905 return false;
1907 tree maxlen = get_maxval_strlen (src, 1);
1908 if (! integer_all_onesp (size))
1910 len = c_strlen (src, 1);
1911 if (! len || ! tree_fits_uhwi_p (len))
1913 /* If LEN is not constant, try MAXLEN too.
1914 For MAXLEN only allow optimizing into non-_ocs function
1915 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1916 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1918 if (fcode == BUILT_IN_STPCPY_CHK)
1920 if (! ignore)
1921 return false;
1923 /* If return value of __stpcpy_chk is ignored,
1924 optimize into __strcpy_chk. */
1925 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1926 if (!fn)
1927 return false;
1929 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1930 replace_call_with_call_and_fold (gsi, repl);
1931 return true;
1934 if (! len || TREE_SIDE_EFFECTS (len))
1935 return false;
1937 /* If c_strlen returned something, but not a constant,
1938 transform __strcpy_chk into __memcpy_chk. */
1939 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1940 if (!fn)
1941 return false;
1943 len = fold_convert_loc (loc, size_type_node, len);
1944 len = size_binop_loc (loc, PLUS_EXPR, len,
1945 build_int_cst (size_type_node, 1));
1946 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1947 true, GSI_SAME_STMT);
1948 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1949 replace_call_with_call_and_fold (gsi, repl);
1950 return true;
1953 else
1954 maxlen = len;
1956 if (! tree_int_cst_lt (maxlen, size))
1957 return false;
1960 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1961 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1962 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1963 if (!fn)
1964 return false;
1966 gimple repl = gimple_build_call (fn, 2, dest, src);
1967 replace_call_with_call_and_fold (gsi, repl);
1968 return true;
1971 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1972 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1973 length passed as third argument. IGNORE is true if return value can be
1974 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1976 static bool
1977 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1978 tree dest, tree src,
1979 tree len, tree size,
1980 enum built_in_function fcode)
1982 gimple stmt = gsi_stmt (*gsi);
1983 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1984 tree fn;
1986 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1988 /* If return value of __stpncpy_chk is ignored,
1989 optimize into __strncpy_chk. */
1990 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1991 if (fn)
1993 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1994 replace_call_with_call_and_fold (gsi, repl);
1995 return true;
1999 if (! tree_fits_uhwi_p (size))
2000 return false;
2002 tree maxlen = get_maxval_strlen (len, 2);
2003 if (! integer_all_onesp (size))
2005 if (! tree_fits_uhwi_p (len))
2007 /* If LEN is not constant, try MAXLEN too.
2008 For MAXLEN only allow optimizing into non-_ocs function
2009 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2010 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2011 return false;
2013 else
2014 maxlen = len;
2016 if (tree_int_cst_lt (size, maxlen))
2017 return false;
2020 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2021 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2022 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2023 if (!fn)
2024 return false;
2026 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2027 replace_call_with_call_and_fold (gsi, repl);
2028 return true;
2031 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2032 NULL_TREE if a normal call should be emitted rather than expanding
2033 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2034 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2035 passed as second argument. */
2037 static bool
2038 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2039 enum built_in_function fcode)
2041 gimple stmt = gsi_stmt (*gsi);
2042 tree dest, size, len, fn, fmt, flag;
2043 const char *fmt_str;
2045 /* Verify the required arguments in the original call. */
2046 if (gimple_call_num_args (stmt) < 5)
2047 return false;
2049 dest = gimple_call_arg (stmt, 0);
2050 len = gimple_call_arg (stmt, 1);
2051 flag = gimple_call_arg (stmt, 2);
2052 size = gimple_call_arg (stmt, 3);
2053 fmt = gimple_call_arg (stmt, 4);
2055 if (! tree_fits_uhwi_p (size))
2056 return false;
2058 if (! integer_all_onesp (size))
2060 tree maxlen = get_maxval_strlen (len, 2);
2061 if (! tree_fits_uhwi_p (len))
2063 /* If LEN is not constant, try MAXLEN too.
2064 For MAXLEN only allow optimizing into non-_ocs function
2065 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2066 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2067 return false;
2069 else
2070 maxlen = len;
2072 if (tree_int_cst_lt (size, maxlen))
2073 return false;
2076 if (!init_target_chars ())
2077 return false;
2079 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2080 or if format doesn't contain % chars or is "%s". */
2081 if (! integer_zerop (flag))
2083 fmt_str = c_getstr (fmt);
2084 if (fmt_str == NULL)
2085 return false;
2086 if (strchr (fmt_str, target_percent) != NULL
2087 && strcmp (fmt_str, target_percent_s))
2088 return false;
2091 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2092 available. */
2093 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2094 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2095 if (!fn)
2096 return false;
2098 /* Replace the called function and the first 5 argument by 3 retaining
2099 trailing varargs. */
2100 gimple_call_set_fndecl (stmt, fn);
2101 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2102 gimple_call_set_arg (stmt, 0, dest);
2103 gimple_call_set_arg (stmt, 1, len);
2104 gimple_call_set_arg (stmt, 2, fmt);
2105 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2106 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2107 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2108 fold_stmt (gsi);
2109 return true;
2112 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2113 Return NULL_TREE if a normal call should be emitted rather than
2114 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2115 or BUILT_IN_VSPRINTF_CHK. */
2117 static bool
2118 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2119 enum built_in_function fcode)
2121 gimple stmt = gsi_stmt (*gsi);
2122 tree dest, size, len, fn, fmt, flag;
2123 const char *fmt_str;
2124 unsigned nargs = gimple_call_num_args (stmt);
2126 /* Verify the required arguments in the original call. */
2127 if (nargs < 4)
2128 return false;
2129 dest = gimple_call_arg (stmt, 0);
2130 flag = gimple_call_arg (stmt, 1);
2131 size = gimple_call_arg (stmt, 2);
2132 fmt = gimple_call_arg (stmt, 3);
2134 if (! tree_fits_uhwi_p (size))
2135 return false;
2137 len = NULL_TREE;
2139 if (!init_target_chars ())
2140 return false;
2142 /* Check whether the format is a literal string constant. */
2143 fmt_str = c_getstr (fmt);
2144 if (fmt_str != NULL)
2146 /* If the format doesn't contain % args or %%, we know the size. */
2147 if (strchr (fmt_str, target_percent) == 0)
2149 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2150 len = build_int_cstu (size_type_node, strlen (fmt_str));
2152 /* If the format is "%s" and first ... argument is a string literal,
2153 we know the size too. */
2154 else if (fcode == BUILT_IN_SPRINTF_CHK
2155 && strcmp (fmt_str, target_percent_s) == 0)
2157 tree arg;
2159 if (nargs == 5)
2161 arg = gimple_call_arg (stmt, 4);
2162 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2164 len = c_strlen (arg, 1);
2165 if (! len || ! tree_fits_uhwi_p (len))
2166 len = NULL_TREE;
2172 if (! integer_all_onesp (size))
2174 if (! len || ! tree_int_cst_lt (len, size))
2175 return false;
2178 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2179 or if format doesn't contain % chars or is "%s". */
2180 if (! integer_zerop (flag))
2182 if (fmt_str == NULL)
2183 return false;
2184 if (strchr (fmt_str, target_percent) != NULL
2185 && strcmp (fmt_str, target_percent_s))
2186 return false;
2189 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2190 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2191 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2192 if (!fn)
2193 return false;
2195 /* Replace the called function and the first 4 argument by 2 retaining
2196 trailing varargs. */
2197 gimple_call_set_fndecl (stmt, fn);
2198 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2199 gimple_call_set_arg (stmt, 0, dest);
2200 gimple_call_set_arg (stmt, 1, fmt);
2201 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2202 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2203 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2204 fold_stmt (gsi);
2205 return true;
2208 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2209 ORIG may be null if this is a 2-argument call. We don't attempt to
2210 simplify calls with more than 3 arguments.
2212 Return NULL_TREE if no simplification was possible, otherwise return the
2213 simplified form of the call as a tree. If IGNORED is true, it means that
2214 the caller does not use the returned value of the function. */
2216 static bool
2217 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2219 gimple stmt = gsi_stmt (*gsi);
2220 tree dest = gimple_call_arg (stmt, 0);
2221 tree fmt = gimple_call_arg (stmt, 1);
2222 tree orig = NULL_TREE;
2223 const char *fmt_str = NULL;
2225 /* Verify the required arguments in the original call. We deal with two
2226 types of sprintf() calls: 'sprintf (str, fmt)' and
2227 'sprintf (dest, "%s", orig)'. */
2228 if (gimple_call_num_args (stmt) > 3)
2229 return false;
2231 if (gimple_call_num_args (stmt) == 3)
2232 orig = gimple_call_arg (stmt, 2);
2234 /* Check whether the format is a literal string constant. */
2235 fmt_str = c_getstr (fmt);
2236 if (fmt_str == NULL)
2237 return false;
2239 if (!init_target_chars ())
2240 return false;
2242 /* If the format doesn't contain % args or %%, use strcpy. */
2243 if (strchr (fmt_str, target_percent) == NULL)
2245 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2247 if (!fn)
2248 return false;
2250 /* Don't optimize sprintf (buf, "abc", ptr++). */
2251 if (orig)
2252 return false;
2254 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2255 'format' is known to contain no % formats. */
2256 gimple_seq stmts = NULL;
2257 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2258 gimple_seq_add_stmt_without_update (&stmts, repl);
2259 if (gimple_call_lhs (stmt))
2261 repl = gimple_build_assign (gimple_call_lhs (stmt),
2262 build_int_cst (integer_type_node,
2263 strlen (fmt_str)));
2264 gimple_seq_add_stmt_without_update (&stmts, repl);
2265 gsi_replace_with_seq_vops (gsi, stmts);
2266 /* gsi now points at the assignment to the lhs, get a
2267 stmt iterator to the memcpy call.
2268 ??? We can't use gsi_for_stmt as that doesn't work when the
2269 CFG isn't built yet. */
2270 gimple_stmt_iterator gsi2 = *gsi;
2271 gsi_prev (&gsi2);
2272 fold_stmt (&gsi2);
2274 else
2276 gsi_replace_with_seq_vops (gsi, stmts);
2277 fold_stmt (gsi);
2279 return true;
2282 /* If the format is "%s", use strcpy if the result isn't used. */
2283 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2285 tree fn;
2286 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2288 if (!fn)
2289 return false;
2291 /* Don't crash on sprintf (str1, "%s"). */
2292 if (!orig)
2293 return false;
2295 tree orig_len = NULL_TREE;
2296 if (gimple_call_lhs (stmt))
2298 orig_len = get_maxval_strlen (orig, 0);
2299 if (!orig_len)
2300 return false;
2303 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2304 gimple_seq stmts = NULL;
2305 gimple repl = gimple_build_call (fn, 2, dest, orig);
2306 gimple_seq_add_stmt_without_update (&stmts, repl);
2307 if (gimple_call_lhs (stmt))
2309 if (!useless_type_conversion_p (integer_type_node,
2310 TREE_TYPE (orig_len)))
2311 orig_len = fold_convert (integer_type_node, orig_len);
2312 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2313 gimple_seq_add_stmt_without_update (&stmts, repl);
2314 gsi_replace_with_seq_vops (gsi, stmts);
2315 /* gsi now points at the assignment to the lhs, get a
2316 stmt iterator to the memcpy call.
2317 ??? We can't use gsi_for_stmt as that doesn't work when the
2318 CFG isn't built yet. */
2319 gimple_stmt_iterator gsi2 = *gsi;
2320 gsi_prev (&gsi2);
2321 fold_stmt (&gsi2);
2323 else
2325 gsi_replace_with_seq_vops (gsi, stmts);
2326 fold_stmt (gsi);
2328 return true;
2330 return false;
2333 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2334 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2335 attempt to simplify calls with more than 4 arguments.
2337 Return NULL_TREE if no simplification was possible, otherwise return the
2338 simplified form of the call as a tree. If IGNORED is true, it means that
2339 the caller does not use the returned value of the function. */
2341 static bool
2342 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2344 gimple stmt = gsi_stmt (*gsi);
2345 tree dest = gimple_call_arg (stmt, 0);
2346 tree destsize = gimple_call_arg (stmt, 1);
2347 tree fmt = gimple_call_arg (stmt, 2);
2348 tree orig = NULL_TREE;
2349 const char *fmt_str = NULL;
2351 if (gimple_call_num_args (stmt) > 4)
2352 return false;
2354 if (gimple_call_num_args (stmt) == 4)
2355 orig = gimple_call_arg (stmt, 3);
2357 if (!tree_fits_uhwi_p (destsize))
2358 return false;
2359 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2361 /* Check whether the format is a literal string constant. */
2362 fmt_str = c_getstr (fmt);
2363 if (fmt_str == NULL)
2364 return false;
2366 if (!init_target_chars ())
2367 return false;
2369 /* If the format doesn't contain % args or %%, use strcpy. */
2370 if (strchr (fmt_str, target_percent) == NULL)
2372 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2373 if (!fn)
2374 return false;
2376 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2377 if (orig)
2378 return false;
2380 /* We could expand this as
2381 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2382 or to
2383 memcpy (str, fmt_with_nul_at_cstm1, cst);
2384 but in the former case that might increase code size
2385 and in the latter case grow .rodata section too much.
2386 So punt for now. */
2387 size_t len = strlen (fmt_str);
2388 if (len >= destlen)
2389 return false;
2391 gimple_seq stmts = NULL;
2392 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2393 gimple_seq_add_stmt_without_update (&stmts, repl);
2394 if (gimple_call_lhs (stmt))
2396 repl = gimple_build_assign (gimple_call_lhs (stmt),
2397 build_int_cst (integer_type_node, len));
2398 gimple_seq_add_stmt_without_update (&stmts, repl);
2399 gsi_replace_with_seq_vops (gsi, stmts);
2400 /* gsi now points at the assignment to the lhs, get a
2401 stmt iterator to the memcpy call.
2402 ??? We can't use gsi_for_stmt as that doesn't work when the
2403 CFG isn't built yet. */
2404 gimple_stmt_iterator gsi2 = *gsi;
2405 gsi_prev (&gsi2);
2406 fold_stmt (&gsi2);
2408 else
2410 gsi_replace_with_seq_vops (gsi, stmts);
2411 fold_stmt (gsi);
2413 return true;
2416 /* If the format is "%s", use strcpy if the result isn't used. */
2417 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2419 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2420 if (!fn)
2421 return false;
2423 /* Don't crash on snprintf (str1, cst, "%s"). */
2424 if (!orig)
2425 return false;
2427 tree orig_len = get_maxval_strlen (orig, 0);
2428 if (!orig_len)
2429 return false;
2431 /* We could expand this as
2432 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2433 or to
2434 memcpy (str1, str2_with_nul_at_cstm1, cst);
2435 but in the former case that might increase code size
2436 and in the latter case grow .rodata section too much.
2437 So punt for now. */
2438 if (compare_tree_int (orig_len, destlen) >= 0)
2439 return false;
2441 /* Convert snprintf (str1, cst, "%s", str2) into
2442 strcpy (str1, str2) if strlen (str2) < cst. */
2443 gimple_seq stmts = NULL;
2444 gimple repl = gimple_build_call (fn, 2, dest, orig);
2445 gimple_seq_add_stmt_without_update (&stmts, repl);
2446 if (gimple_call_lhs (stmt))
2448 if (!useless_type_conversion_p (integer_type_node,
2449 TREE_TYPE (orig_len)))
2450 orig_len = fold_convert (integer_type_node, orig_len);
2451 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2452 gimple_seq_add_stmt_without_update (&stmts, repl);
2453 gsi_replace_with_seq_vops (gsi, stmts);
2454 /* gsi now points at the assignment to the lhs, get a
2455 stmt iterator to the memcpy call.
2456 ??? We can't use gsi_for_stmt as that doesn't work when the
2457 CFG isn't built yet. */
2458 gimple_stmt_iterator gsi2 = *gsi;
2459 gsi_prev (&gsi2);
2460 fold_stmt (&gsi2);
2462 else
2464 gsi_replace_with_seq_vops (gsi, stmts);
2465 fold_stmt (gsi);
2467 return true;
2469 return false;
2473 /* Fold a call to __builtin_strlen with known length LEN. */
2475 static bool
2476 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2478 gimple stmt = gsi_stmt (*gsi);
2479 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2480 if (!len)
2481 return false;
2482 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2483 replace_call_with_value (gsi, len);
2484 return true;
2488 /* Fold the non-target builtin at *GSI and return whether any simplification
2489 was made. */
2491 static bool
2492 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2494 gimple stmt = gsi_stmt (*gsi);
2495 tree callee = gimple_call_fndecl (stmt);
2497 /* Give up for always_inline inline builtins until they are
2498 inlined. */
2499 if (avoid_folding_inline_builtin (callee))
2500 return false;
2502 switch (DECL_FUNCTION_CODE (callee))
2504 case BUILT_IN_BZERO:
2505 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2506 gimple_call_arg (stmt, 1));
2507 case BUILT_IN_MEMSET:
2508 return gimple_fold_builtin_memset (gsi,
2509 gimple_call_arg (stmt, 1),
2510 gimple_call_arg (stmt, 2));
2511 case BUILT_IN_BCOPY:
2512 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2513 gimple_call_arg (stmt, 0), 3);
2514 case BUILT_IN_MEMCPY:
2515 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2516 gimple_call_arg (stmt, 1), 0);
2517 case BUILT_IN_MEMPCPY:
2518 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2519 gimple_call_arg (stmt, 1), 1);
2520 case BUILT_IN_MEMMOVE:
2521 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2522 gimple_call_arg (stmt, 1), 3);
2523 case BUILT_IN_SPRINTF_CHK:
2524 case BUILT_IN_VSPRINTF_CHK:
2525 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2526 case BUILT_IN_STRCAT_CHK:
2527 return gimple_fold_builtin_strcat_chk (gsi);
2528 case BUILT_IN_STRNCAT_CHK:
2529 return gimple_fold_builtin_strncat_chk (gsi);
2530 case BUILT_IN_STRLEN:
2531 return gimple_fold_builtin_strlen (gsi);
2532 case BUILT_IN_STRCPY:
2533 return gimple_fold_builtin_strcpy (gsi,
2534 gimple_call_arg (stmt, 0),
2535 gimple_call_arg (stmt, 1));
2536 case BUILT_IN_STRNCPY:
2537 return gimple_fold_builtin_strncpy (gsi,
2538 gimple_call_arg (stmt, 0),
2539 gimple_call_arg (stmt, 1),
2540 gimple_call_arg (stmt, 2));
2541 case BUILT_IN_STRCAT:
2542 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2543 gimple_call_arg (stmt, 1));
2544 case BUILT_IN_FPUTS:
2545 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2546 gimple_call_arg (stmt, 1), false);
2547 case BUILT_IN_FPUTS_UNLOCKED:
2548 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2549 gimple_call_arg (stmt, 1), true);
2550 case BUILT_IN_MEMCPY_CHK:
2551 case BUILT_IN_MEMPCPY_CHK:
2552 case BUILT_IN_MEMMOVE_CHK:
2553 case BUILT_IN_MEMSET_CHK:
2554 return gimple_fold_builtin_memory_chk (gsi,
2555 gimple_call_arg (stmt, 0),
2556 gimple_call_arg (stmt, 1),
2557 gimple_call_arg (stmt, 2),
2558 gimple_call_arg (stmt, 3),
2559 DECL_FUNCTION_CODE (callee));
2560 case BUILT_IN_STRCPY_CHK:
2561 case BUILT_IN_STPCPY_CHK:
2562 return gimple_fold_builtin_stxcpy_chk (gsi,
2563 gimple_call_arg (stmt, 0),
2564 gimple_call_arg (stmt, 1),
2565 gimple_call_arg (stmt, 2),
2566 DECL_FUNCTION_CODE (callee));
2567 case BUILT_IN_STRNCPY_CHK:
2568 case BUILT_IN_STPNCPY_CHK:
2569 return gimple_fold_builtin_stxncpy_chk (gsi,
2570 gimple_call_arg (stmt, 0),
2571 gimple_call_arg (stmt, 1),
2572 gimple_call_arg (stmt, 2),
2573 gimple_call_arg (stmt, 3),
2574 DECL_FUNCTION_CODE (callee));
2575 case BUILT_IN_SNPRINTF_CHK:
2576 case BUILT_IN_VSNPRINTF_CHK:
2577 return gimple_fold_builtin_snprintf_chk (gsi,
2578 DECL_FUNCTION_CODE (callee));
2579 case BUILT_IN_SNPRINTF:
2580 return gimple_fold_builtin_snprintf (gsi);
2581 case BUILT_IN_SPRINTF:
2582 return gimple_fold_builtin_sprintf (gsi);
2583 default:;
2586 /* Try the generic builtin folder. */
2587 bool ignore = (gimple_call_lhs (stmt) == NULL);
2588 tree result = fold_call_stmt (stmt, ignore);
2589 if (result)
2591 if (ignore)
2592 STRIP_NOPS (result);
2593 else
2594 result = fold_convert (gimple_call_return_type (stmt), result);
2595 if (!update_call_from_tree (gsi, result))
2596 gimplify_and_update_call_from_tree (gsi, result);
2597 return true;
2600 return false;
2603 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2604 The statement may be replaced by another statement, e.g., if the call
2605 simplifies to a constant value. Return true if any changes were made.
2606 It is assumed that the operands have been previously folded. */
2608 static bool
2609 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2611 gimple stmt = gsi_stmt (*gsi);
2612 tree callee;
2613 bool changed = false;
2614 unsigned i;
2616 /* Fold *& in call arguments. */
2617 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2618 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2620 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2621 if (tmp)
2623 gimple_call_set_arg (stmt, i, tmp);
2624 changed = true;
2628 /* Check for virtual calls that became direct calls. */
2629 callee = gimple_call_fn (stmt);
2630 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2632 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2634 if (dump_file && virtual_method_call_p (callee)
2635 && !possible_polymorphic_call_target_p
2636 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2637 (OBJ_TYPE_REF_EXPR (callee)))))
2639 fprintf (dump_file,
2640 "Type inheritance inconsistent devirtualization of ");
2641 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2642 fprintf (dump_file, " to ");
2643 print_generic_expr (dump_file, callee, TDF_SLIM);
2644 fprintf (dump_file, "\n");
2647 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
2648 changed = true;
2650 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
2652 bool final;
2653 vec <cgraph_node *>targets
2654 = possible_polymorphic_call_targets (callee, stmt, &final);
2655 if (final && targets.length () <= 1 && dbg_cnt (devirt))
2657 tree lhs = gimple_call_lhs (stmt);
2658 if (dump_enabled_p ())
2660 location_t loc = gimple_location_safe (stmt);
2661 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2662 "folding virtual function call to %s\n",
2663 targets.length () == 1
2664 ? targets[0]->name ()
2665 : "__builtin_unreachable");
2667 if (targets.length () == 1)
2669 gimple_call_set_fndecl (stmt, targets[0]->decl);
2670 changed = true;
2671 /* If the call becomes noreturn, remove the lhs. */
2672 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2674 if (TREE_CODE (lhs) == SSA_NAME)
2676 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2677 tree def = get_or_create_ssa_default_def (cfun, var);
2678 gimple new_stmt = gimple_build_assign (lhs, def);
2679 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2681 gimple_call_set_lhs (stmt, NULL_TREE);
2684 else
2686 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2687 gimple new_stmt = gimple_build_call (fndecl, 0);
2688 gimple_set_location (new_stmt, gimple_location (stmt));
2689 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2691 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2692 tree def = get_or_create_ssa_default_def (cfun, var);
2694 /* To satisfy condition for
2695 cgraph_update_edges_for_call_stmt_node,
2696 we need to preserve GIMPLE_CALL statement
2697 at position of GSI iterator. */
2698 update_call_from_tree (gsi, def);
2699 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
2701 else
2703 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2704 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2705 gsi_replace (gsi, new_stmt, false);
2707 return true;
2713 if (inplace)
2714 return changed;
2716 /* Check for builtins that CCP can handle using information not
2717 available in the generic fold routines. */
2718 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2720 if (gimple_fold_builtin (gsi))
2721 changed = true;
2723 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
2725 changed |= targetm.gimple_fold_builtin (gsi);
2727 else if (gimple_call_internal_p (stmt))
2729 enum tree_code subcode = ERROR_MARK;
2730 tree result = NULL_TREE;
2731 switch (gimple_call_internal_fn (stmt))
2733 case IFN_BUILTIN_EXPECT:
2734 result = fold_builtin_expect (gimple_location (stmt),
2735 gimple_call_arg (stmt, 0),
2736 gimple_call_arg (stmt, 1),
2737 gimple_call_arg (stmt, 2));
2738 break;
2739 case IFN_UBSAN_OBJECT_SIZE:
2740 if (integer_all_onesp (gimple_call_arg (stmt, 2))
2741 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
2742 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
2743 && tree_int_cst_le (gimple_call_arg (stmt, 1),
2744 gimple_call_arg (stmt, 2))))
2746 gsi_replace (gsi, gimple_build_nop (), true);
2747 unlink_stmt_vdef (stmt);
2748 release_defs (stmt);
2749 return true;
2751 break;
2752 case IFN_UBSAN_CHECK_ADD:
2753 subcode = PLUS_EXPR;
2754 break;
2755 case IFN_UBSAN_CHECK_SUB:
2756 subcode = MINUS_EXPR;
2757 break;
2758 case IFN_UBSAN_CHECK_MUL:
2759 subcode = MULT_EXPR;
2760 break;
2761 default:
2762 break;
2764 if (subcode != ERROR_MARK)
2766 tree arg0 = gimple_call_arg (stmt, 0);
2767 tree arg1 = gimple_call_arg (stmt, 1);
2768 /* x = y + 0; x = y - 0; x = y * 0; */
2769 if (integer_zerop (arg1))
2770 result = subcode == MULT_EXPR
2771 ? build_zero_cst (TREE_TYPE (arg0))
2772 : arg0;
2773 /* x = 0 + y; x = 0 * y; */
2774 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
2775 result = subcode == MULT_EXPR
2776 ? build_zero_cst (TREE_TYPE (arg0))
2777 : arg1;
2778 /* x = y - y; */
2779 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
2780 result = build_zero_cst (TREE_TYPE (arg0));
2781 /* x = y * 1; x = 1 * y; */
2782 else if (subcode == MULT_EXPR)
2784 if (integer_onep (arg1))
2785 result = arg0;
2786 else if (integer_onep (arg0))
2787 result = arg1;
2790 if (result)
2792 if (!update_call_from_tree (gsi, result))
2793 gimplify_and_update_call_from_tree (gsi, result);
2794 changed = true;
2798 return changed;
2802 /* Worker for fold_stmt_1 dispatch to pattern based folding with
2803 gimple_simplify.
2805 Replaces *GSI with the simplification result in RCODE and OPS
2806 and the associated statements in *SEQ. Does the replacement
2807 according to INPLACE and returns true if the operation succeeded. */
2809 static bool
2810 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
2811 code_helper rcode, tree *ops,
2812 gimple_seq *seq, bool inplace)
2814 gimple stmt = gsi_stmt (*gsi);
2816 /* Play safe and do not allow abnormals to be mentioned in
2817 newly created statements. See also maybe_push_res_to_seq. */
2818 if ((TREE_CODE (ops[0]) == SSA_NAME
2819 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
2820 || (ops[1]
2821 && TREE_CODE (ops[1]) == SSA_NAME
2822 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
2823 || (ops[2]
2824 && TREE_CODE (ops[2]) == SSA_NAME
2825 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
2826 return false;
2828 if (gimple_code (stmt) == GIMPLE_COND)
2830 gcc_assert (rcode.is_tree_code ());
2831 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
2832 /* GIMPLE_CONDs condition may not throw. */
2833 && (!flag_exceptions
2834 || !cfun->can_throw_non_call_exceptions
2835 || !operation_could_trap_p (rcode,
2836 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
2837 false, NULL_TREE)))
2838 gimple_cond_set_condition (stmt, rcode, ops[0], ops[1]);
2839 else if (rcode == SSA_NAME)
2840 gimple_cond_set_condition (stmt, NE_EXPR, ops[0],
2841 build_zero_cst (TREE_TYPE (ops[0])));
2842 else if (rcode == INTEGER_CST)
2844 if (integer_zerop (ops[0]))
2845 gimple_cond_make_false (stmt);
2846 else
2847 gimple_cond_make_true (stmt);
2849 else if (!inplace)
2851 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
2852 ops, seq);
2853 if (!res)
2854 return false;
2855 gimple_cond_set_condition (stmt, NE_EXPR, res,
2856 build_zero_cst (TREE_TYPE (res)));
2858 else
2859 return false;
2860 if (dump_file && (dump_flags & TDF_DETAILS))
2862 fprintf (dump_file, "gimple_simplified to ");
2863 if (!gimple_seq_empty_p (*seq))
2864 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2865 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2866 0, TDF_SLIM);
2868 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2869 return true;
2871 else if (is_gimple_assign (stmt)
2872 && rcode.is_tree_code ())
2874 if (!inplace
2875 || gimple_num_ops (stmt) <= get_gimple_rhs_num_ops (rcode))
2877 maybe_build_generic_op (rcode,
2878 TREE_TYPE (gimple_assign_lhs (stmt)),
2879 &ops[0], ops[1], ops[2]);
2880 gimple_assign_set_rhs_with_ops_1 (gsi, rcode,
2881 ops[0], ops[1], ops[2]);
2882 if (dump_file && (dump_flags & TDF_DETAILS))
2884 fprintf (dump_file, "gimple_simplified to ");
2885 if (!gimple_seq_empty_p (*seq))
2886 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2887 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2888 0, TDF_SLIM);
2890 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2891 return true;
2894 else if (!inplace)
2896 if (gimple_has_lhs (stmt))
2898 tree lhs = gimple_get_lhs (stmt);
2899 maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
2900 ops, seq, lhs);
2901 if (dump_file && (dump_flags & TDF_DETAILS))
2903 fprintf (dump_file, "gimple_simplified to ");
2904 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2906 gsi_replace_with_seq_vops (gsi, *seq);
2907 return true;
2909 else
2910 gcc_unreachable ();
2913 return false;
2916 /* Canonicalize MEM_REFs invariant address operand after propagation. */
2918 static bool
2919 maybe_canonicalize_mem_ref_addr (tree *t)
2921 bool res = false;
2923 if (TREE_CODE (*t) == ADDR_EXPR)
2924 t = &TREE_OPERAND (*t, 0);
2926 while (handled_component_p (*t))
2927 t = &TREE_OPERAND (*t, 0);
2929 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
2930 of invariant addresses into a SSA name MEM_REF address. */
2931 if (TREE_CODE (*t) == MEM_REF
2932 || TREE_CODE (*t) == TARGET_MEM_REF)
2934 tree addr = TREE_OPERAND (*t, 0);
2935 if (TREE_CODE (addr) == ADDR_EXPR
2936 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
2937 || handled_component_p (TREE_OPERAND (addr, 0))))
2939 tree base;
2940 HOST_WIDE_INT coffset;
2941 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
2942 &coffset);
2943 if (!base)
2944 gcc_unreachable ();
2946 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
2947 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
2948 TREE_OPERAND (*t, 1),
2949 size_int (coffset));
2950 res = true;
2952 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
2953 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
2956 /* Canonicalize back MEM_REFs to plain reference trees if the object
2957 accessed is a decl that has the same access semantics as the MEM_REF. */
2958 if (TREE_CODE (*t) == MEM_REF
2959 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
2960 && integer_zerop (TREE_OPERAND (*t, 1)))
2962 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2963 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
2964 if (/* Same volatile qualification. */
2965 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
2966 /* Same TBAA behavior with -fstrict-aliasing. */
2967 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
2968 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
2969 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
2970 /* Same alignment. */
2971 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
2972 /* We have to look out here to not drop a required conversion
2973 from the rhs to the lhs if *t appears on the lhs or vice-versa
2974 if it appears on the rhs. Thus require strict type
2975 compatibility. */
2976 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
2978 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2979 res = true;
2983 /* Canonicalize TARGET_MEM_REF in particular with respect to
2984 the indexes becoming constant. */
2985 else if (TREE_CODE (*t) == TARGET_MEM_REF)
2987 tree tem = maybe_fold_tmr (*t);
2988 if (tem)
2990 *t = tem;
2991 res = true;
2995 return res;
2998 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2999 distinguishes both cases. */
3001 static bool
3002 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3004 bool changed = false;
3005 gimple stmt = gsi_stmt (*gsi);
3006 unsigned i;
3008 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3009 after propagation.
3010 ??? This shouldn't be done in generic folding but in the
3011 propagation helpers which also know whether an address was
3012 propagated. */
3013 switch (gimple_code (stmt))
3015 case GIMPLE_ASSIGN:
3016 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3018 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3019 if ((REFERENCE_CLASS_P (*rhs)
3020 || TREE_CODE (*rhs) == ADDR_EXPR)
3021 && maybe_canonicalize_mem_ref_addr (rhs))
3022 changed = true;
3023 tree *lhs = gimple_assign_lhs_ptr (stmt);
3024 if (REFERENCE_CLASS_P (*lhs)
3025 && maybe_canonicalize_mem_ref_addr (lhs))
3026 changed = true;
3028 break;
3029 case GIMPLE_CALL:
3031 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3033 tree *arg = gimple_call_arg_ptr (stmt, i);
3034 if (REFERENCE_CLASS_P (*arg)
3035 && maybe_canonicalize_mem_ref_addr (arg))
3036 changed = true;
3038 tree *lhs = gimple_call_lhs_ptr (stmt);
3039 if (*lhs
3040 && REFERENCE_CLASS_P (*lhs)
3041 && maybe_canonicalize_mem_ref_addr (lhs))
3042 changed = true;
3043 break;
3045 case GIMPLE_ASM:
3047 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3049 tree link = gimple_asm_output_op (stmt, i);
3050 tree op = TREE_VALUE (link);
3051 if (REFERENCE_CLASS_P (op)
3052 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3053 changed = true;
3055 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3057 tree link = gimple_asm_input_op (stmt, i);
3058 tree op = TREE_VALUE (link);
3059 if ((REFERENCE_CLASS_P (op)
3060 || TREE_CODE (op) == ADDR_EXPR)
3061 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3062 changed = true;
3065 break;
3066 case GIMPLE_DEBUG:
3067 if (gimple_debug_bind_p (stmt))
3069 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3070 if (*val
3071 && (REFERENCE_CLASS_P (*val)
3072 || TREE_CODE (*val) == ADDR_EXPR)
3073 && maybe_canonicalize_mem_ref_addr (val))
3074 changed = true;
3076 break;
3077 default:;
3080 /* Dispatch to pattern-based folding. */
3081 if (!inplace
3082 || is_gimple_assign (stmt)
3083 || gimple_code (stmt) == GIMPLE_COND)
3085 gimple_seq seq = NULL;
3086 code_helper rcode;
3087 tree ops[3] = {};
3088 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3090 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3091 changed = true;
3092 else
3093 gimple_seq_discard (seq);
3097 stmt = gsi_stmt (*gsi);
3099 /* Fold the main computation performed by the statement. */
3100 switch (gimple_code (stmt))
3102 case GIMPLE_ASSIGN:
3104 unsigned old_num_ops = gimple_num_ops (stmt);
3105 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3106 tree lhs = gimple_assign_lhs (stmt);
3107 tree new_rhs;
3108 /* First canonicalize operand order. This avoids building new
3109 trees if this is the only thing fold would later do. */
3110 if ((commutative_tree_code (subcode)
3111 || commutative_ternary_tree_code (subcode))
3112 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3113 gimple_assign_rhs2 (stmt), false))
3115 tree tem = gimple_assign_rhs1 (stmt);
3116 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3117 gimple_assign_set_rhs2 (stmt, tem);
3118 changed = true;
3120 new_rhs = fold_gimple_assign (gsi);
3121 if (new_rhs
3122 && !useless_type_conversion_p (TREE_TYPE (lhs),
3123 TREE_TYPE (new_rhs)))
3124 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3125 if (new_rhs
3126 && (!inplace
3127 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3129 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3130 changed = true;
3132 break;
3135 case GIMPLE_COND:
3136 changed |= fold_gimple_cond (stmt);
3137 break;
3139 case GIMPLE_CALL:
3140 changed |= gimple_fold_call (gsi, inplace);
3141 break;
3143 case GIMPLE_ASM:
3144 /* Fold *& in asm operands. */
3146 size_t noutputs;
3147 const char **oconstraints;
3148 const char *constraint;
3149 bool allows_mem, allows_reg;
3151 noutputs = gimple_asm_noutputs (stmt);
3152 oconstraints = XALLOCAVEC (const char *, noutputs);
3154 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3156 tree link = gimple_asm_output_op (stmt, i);
3157 tree op = TREE_VALUE (link);
3158 oconstraints[i]
3159 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3160 if (REFERENCE_CLASS_P (op)
3161 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3163 TREE_VALUE (link) = op;
3164 changed = true;
3167 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3169 tree link = gimple_asm_input_op (stmt, i);
3170 tree op = TREE_VALUE (link);
3171 constraint
3172 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3173 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3174 oconstraints, &allows_mem, &allows_reg);
3175 if (REFERENCE_CLASS_P (op)
3176 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3177 != NULL_TREE)
3179 TREE_VALUE (link) = op;
3180 changed = true;
3184 break;
3186 case GIMPLE_DEBUG:
3187 if (gimple_debug_bind_p (stmt))
3189 tree val = gimple_debug_bind_get_value (stmt);
3190 if (val
3191 && REFERENCE_CLASS_P (val))
3193 tree tem = maybe_fold_reference (val, false);
3194 if (tem)
3196 gimple_debug_bind_set_value (stmt, tem);
3197 changed = true;
3200 else if (val
3201 && TREE_CODE (val) == ADDR_EXPR)
3203 tree ref = TREE_OPERAND (val, 0);
3204 tree tem = maybe_fold_reference (ref, false);
3205 if (tem)
3207 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3208 gimple_debug_bind_set_value (stmt, tem);
3209 changed = true;
3213 break;
3215 default:;
3218 stmt = gsi_stmt (*gsi);
3220 /* Fold *& on the lhs. */
3221 if (gimple_has_lhs (stmt))
3223 tree lhs = gimple_get_lhs (stmt);
3224 if (lhs && REFERENCE_CLASS_P (lhs))
3226 tree new_lhs = maybe_fold_reference (lhs, true);
3227 if (new_lhs)
3229 gimple_set_lhs (stmt, new_lhs);
3230 changed = true;
3235 return changed;
3238 /* Valueziation callback that ends up not following SSA edges. */
3240 tree
3241 no_follow_ssa_edges (tree)
3243 return NULL_TREE;
3246 /* Valueization callback that ends up following single-use SSA edges only. */
3248 tree
3249 follow_single_use_edges (tree val)
3251 if (TREE_CODE (val) == SSA_NAME
3252 && !has_single_use (val))
3253 return NULL_TREE;
3254 return val;
3257 /* Fold the statement pointed to by GSI. In some cases, this function may
3258 replace the whole statement with a new one. Returns true iff folding
3259 makes any changes.
3260 The statement pointed to by GSI should be in valid gimple form but may
3261 be in unfolded state as resulting from for example constant propagation
3262 which can produce *&x = 0. */
3264 bool
3265 fold_stmt (gimple_stmt_iterator *gsi)
3267 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3270 bool
3271 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3273 return fold_stmt_1 (gsi, false, valueize);
3276 /* Perform the minimal folding on statement *GSI. Only operations like
3277 *&x created by constant propagation are handled. The statement cannot
3278 be replaced with a new one. Return true if the statement was
3279 changed, false otherwise.
3280 The statement *GSI should be in valid gimple form but may
3281 be in unfolded state as resulting from for example constant propagation
3282 which can produce *&x = 0. */
3284 bool
3285 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3287 gimple stmt = gsi_stmt (*gsi);
3288 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3289 gcc_assert (gsi_stmt (*gsi) == stmt);
3290 return changed;
3293 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3294 if EXPR is null or we don't know how.
3295 If non-null, the result always has boolean type. */
3297 static tree
3298 canonicalize_bool (tree expr, bool invert)
3300 if (!expr)
3301 return NULL_TREE;
3302 else if (invert)
3304 if (integer_nonzerop (expr))
3305 return boolean_false_node;
3306 else if (integer_zerop (expr))
3307 return boolean_true_node;
3308 else if (TREE_CODE (expr) == SSA_NAME)
3309 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3310 build_int_cst (TREE_TYPE (expr), 0));
3311 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3312 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3313 boolean_type_node,
3314 TREE_OPERAND (expr, 0),
3315 TREE_OPERAND (expr, 1));
3316 else
3317 return NULL_TREE;
3319 else
3321 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3322 return expr;
3323 if (integer_nonzerop (expr))
3324 return boolean_true_node;
3325 else if (integer_zerop (expr))
3326 return boolean_false_node;
3327 else if (TREE_CODE (expr) == SSA_NAME)
3328 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3329 build_int_cst (TREE_TYPE (expr), 0));
3330 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3331 return fold_build2 (TREE_CODE (expr),
3332 boolean_type_node,
3333 TREE_OPERAND (expr, 0),
3334 TREE_OPERAND (expr, 1));
3335 else
3336 return NULL_TREE;
3340 /* Check to see if a boolean expression EXPR is logically equivalent to the
3341 comparison (OP1 CODE OP2). Check for various identities involving
3342 SSA_NAMEs. */
3344 static bool
3345 same_bool_comparison_p (const_tree expr, enum tree_code code,
3346 const_tree op1, const_tree op2)
3348 gimple s;
3350 /* The obvious case. */
3351 if (TREE_CODE (expr) == code
3352 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3353 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3354 return true;
3356 /* Check for comparing (name, name != 0) and the case where expr
3357 is an SSA_NAME with a definition matching the comparison. */
3358 if (TREE_CODE (expr) == SSA_NAME
3359 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3361 if (operand_equal_p (expr, op1, 0))
3362 return ((code == NE_EXPR && integer_zerop (op2))
3363 || (code == EQ_EXPR && integer_nonzerop (op2)));
3364 s = SSA_NAME_DEF_STMT (expr);
3365 if (is_gimple_assign (s)
3366 && gimple_assign_rhs_code (s) == code
3367 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3368 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3369 return true;
3372 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3373 of name is a comparison, recurse. */
3374 if (TREE_CODE (op1) == SSA_NAME
3375 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3377 s = SSA_NAME_DEF_STMT (op1);
3378 if (is_gimple_assign (s)
3379 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3381 enum tree_code c = gimple_assign_rhs_code (s);
3382 if ((c == NE_EXPR && integer_zerop (op2))
3383 || (c == EQ_EXPR && integer_nonzerop (op2)))
3384 return same_bool_comparison_p (expr, c,
3385 gimple_assign_rhs1 (s),
3386 gimple_assign_rhs2 (s));
3387 if ((c == EQ_EXPR && integer_zerop (op2))
3388 || (c == NE_EXPR && integer_nonzerop (op2)))
3389 return same_bool_comparison_p (expr,
3390 invert_tree_comparison (c, false),
3391 gimple_assign_rhs1 (s),
3392 gimple_assign_rhs2 (s));
3395 return false;
3398 /* Check to see if two boolean expressions OP1 and OP2 are logically
3399 equivalent. */
3401 static bool
3402 same_bool_result_p (const_tree op1, const_tree op2)
3404 /* Simple cases first. */
3405 if (operand_equal_p (op1, op2, 0))
3406 return true;
3408 /* Check the cases where at least one of the operands is a comparison.
3409 These are a bit smarter than operand_equal_p in that they apply some
3410 identifies on SSA_NAMEs. */
3411 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3412 && same_bool_comparison_p (op1, TREE_CODE (op2),
3413 TREE_OPERAND (op2, 0),
3414 TREE_OPERAND (op2, 1)))
3415 return true;
3416 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3417 && same_bool_comparison_p (op2, TREE_CODE (op1),
3418 TREE_OPERAND (op1, 0),
3419 TREE_OPERAND (op1, 1)))
3420 return true;
3422 /* Default case. */
3423 return false;
3426 /* Forward declarations for some mutually recursive functions. */
3428 static tree
3429 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3430 enum tree_code code2, tree op2a, tree op2b);
3431 static tree
3432 and_var_with_comparison (tree var, bool invert,
3433 enum tree_code code2, tree op2a, tree op2b);
3434 static tree
3435 and_var_with_comparison_1 (gimple stmt,
3436 enum tree_code code2, tree op2a, tree op2b);
3437 static tree
3438 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3439 enum tree_code code2, tree op2a, tree op2b);
3440 static tree
3441 or_var_with_comparison (tree var, bool invert,
3442 enum tree_code code2, tree op2a, tree op2b);
3443 static tree
3444 or_var_with_comparison_1 (gimple stmt,
3445 enum tree_code code2, tree op2a, tree op2b);
3447 /* Helper function for and_comparisons_1: try to simplify the AND of the
3448 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3449 If INVERT is true, invert the value of the VAR before doing the AND.
3450 Return NULL_EXPR if we can't simplify this to a single expression. */
3452 static tree
3453 and_var_with_comparison (tree var, bool invert,
3454 enum tree_code code2, tree op2a, tree op2b)
3456 tree t;
3457 gimple stmt = SSA_NAME_DEF_STMT (var);
3459 /* We can only deal with variables whose definitions are assignments. */
3460 if (!is_gimple_assign (stmt))
3461 return NULL_TREE;
3463 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3464 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3465 Then we only have to consider the simpler non-inverted cases. */
3466 if (invert)
3467 t = or_var_with_comparison_1 (stmt,
3468 invert_tree_comparison (code2, false),
3469 op2a, op2b);
3470 else
3471 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3472 return canonicalize_bool (t, invert);
3475 /* Try to simplify the AND of the ssa variable defined by the assignment
3476 STMT with the comparison specified by (OP2A CODE2 OP2B).
3477 Return NULL_EXPR if we can't simplify this to a single expression. */
3479 static tree
3480 and_var_with_comparison_1 (gimple stmt,
3481 enum tree_code code2, tree op2a, tree op2b)
3483 tree var = gimple_assign_lhs (stmt);
3484 tree true_test_var = NULL_TREE;
3485 tree false_test_var = NULL_TREE;
3486 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3488 /* Check for identities like (var AND (var == 0)) => false. */
3489 if (TREE_CODE (op2a) == SSA_NAME
3490 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3492 if ((code2 == NE_EXPR && integer_zerop (op2b))
3493 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3495 true_test_var = op2a;
3496 if (var == true_test_var)
3497 return var;
3499 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3500 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3502 false_test_var = op2a;
3503 if (var == false_test_var)
3504 return boolean_false_node;
3508 /* If the definition is a comparison, recurse on it. */
3509 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3511 tree t = and_comparisons_1 (innercode,
3512 gimple_assign_rhs1 (stmt),
3513 gimple_assign_rhs2 (stmt),
3514 code2,
3515 op2a,
3516 op2b);
3517 if (t)
3518 return t;
3521 /* If the definition is an AND or OR expression, we may be able to
3522 simplify by reassociating. */
3523 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3524 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3526 tree inner1 = gimple_assign_rhs1 (stmt);
3527 tree inner2 = gimple_assign_rhs2 (stmt);
3528 gimple s;
3529 tree t;
3530 tree partial = NULL_TREE;
3531 bool is_and = (innercode == BIT_AND_EXPR);
3533 /* Check for boolean identities that don't require recursive examination
3534 of inner1/inner2:
3535 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3536 inner1 AND (inner1 OR inner2) => inner1
3537 !inner1 AND (inner1 AND inner2) => false
3538 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3539 Likewise for similar cases involving inner2. */
3540 if (inner1 == true_test_var)
3541 return (is_and ? var : inner1);
3542 else if (inner2 == true_test_var)
3543 return (is_and ? var : inner2);
3544 else if (inner1 == false_test_var)
3545 return (is_and
3546 ? boolean_false_node
3547 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
3548 else if (inner2 == false_test_var)
3549 return (is_and
3550 ? boolean_false_node
3551 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
3553 /* Next, redistribute/reassociate the AND across the inner tests.
3554 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3555 if (TREE_CODE (inner1) == SSA_NAME
3556 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3557 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3558 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3559 gimple_assign_rhs1 (s),
3560 gimple_assign_rhs2 (s),
3561 code2, op2a, op2b)))
3563 /* Handle the AND case, where we are reassociating:
3564 (inner1 AND inner2) AND (op2a code2 op2b)
3565 => (t AND inner2)
3566 If the partial result t is a constant, we win. Otherwise
3567 continue on to try reassociating with the other inner test. */
3568 if (is_and)
3570 if (integer_onep (t))
3571 return inner2;
3572 else if (integer_zerop (t))
3573 return boolean_false_node;
3576 /* Handle the OR case, where we are redistributing:
3577 (inner1 OR inner2) AND (op2a code2 op2b)
3578 => (t OR (inner2 AND (op2a code2 op2b))) */
3579 else if (integer_onep (t))
3580 return boolean_true_node;
3582 /* Save partial result for later. */
3583 partial = t;
3586 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3587 if (TREE_CODE (inner2) == SSA_NAME
3588 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3589 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3590 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3591 gimple_assign_rhs1 (s),
3592 gimple_assign_rhs2 (s),
3593 code2, op2a, op2b)))
3595 /* Handle the AND case, where we are reassociating:
3596 (inner1 AND inner2) AND (op2a code2 op2b)
3597 => (inner1 AND t) */
3598 if (is_and)
3600 if (integer_onep (t))
3601 return inner1;
3602 else if (integer_zerop (t))
3603 return boolean_false_node;
3604 /* If both are the same, we can apply the identity
3605 (x AND x) == x. */
3606 else if (partial && same_bool_result_p (t, partial))
3607 return t;
3610 /* Handle the OR case. where we are redistributing:
3611 (inner1 OR inner2) AND (op2a code2 op2b)
3612 => (t OR (inner1 AND (op2a code2 op2b)))
3613 => (t OR partial) */
3614 else
3616 if (integer_onep (t))
3617 return boolean_true_node;
3618 else if (partial)
3620 /* We already got a simplification for the other
3621 operand to the redistributed OR expression. The
3622 interesting case is when at least one is false.
3623 Or, if both are the same, we can apply the identity
3624 (x OR x) == x. */
3625 if (integer_zerop (partial))
3626 return t;
3627 else if (integer_zerop (t))
3628 return partial;
3629 else if (same_bool_result_p (t, partial))
3630 return t;
3635 return NULL_TREE;
3638 /* Try to simplify the AND of two comparisons defined by
3639 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3640 If this can be done without constructing an intermediate value,
3641 return the resulting tree; otherwise NULL_TREE is returned.
3642 This function is deliberately asymmetric as it recurses on SSA_DEFs
3643 in the first comparison but not the second. */
3645 static tree
3646 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3647 enum tree_code code2, tree op2a, tree op2b)
3649 tree truth_type = truth_type_for (TREE_TYPE (op1a));
3651 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3652 if (operand_equal_p (op1a, op2a, 0)
3653 && operand_equal_p (op1b, op2b, 0))
3655 /* Result will be either NULL_TREE, or a combined comparison. */
3656 tree t = combine_comparisons (UNKNOWN_LOCATION,
3657 TRUTH_ANDIF_EXPR, code1, code2,
3658 truth_type, op1a, op1b);
3659 if (t)
3660 return t;
3663 /* Likewise the swapped case of the above. */
3664 if (operand_equal_p (op1a, op2b, 0)
3665 && operand_equal_p (op1b, op2a, 0))
3667 /* Result will be either NULL_TREE, or a combined comparison. */
3668 tree t = combine_comparisons (UNKNOWN_LOCATION,
3669 TRUTH_ANDIF_EXPR, code1,
3670 swap_tree_comparison (code2),
3671 truth_type, op1a, op1b);
3672 if (t)
3673 return t;
3676 /* If both comparisons are of the same value against constants, we might
3677 be able to merge them. */
3678 if (operand_equal_p (op1a, op2a, 0)
3679 && TREE_CODE (op1b) == INTEGER_CST
3680 && TREE_CODE (op2b) == INTEGER_CST)
3682 int cmp = tree_int_cst_compare (op1b, op2b);
3684 /* If we have (op1a == op1b), we should either be able to
3685 return that or FALSE, depending on whether the constant op1b
3686 also satisfies the other comparison against op2b. */
3687 if (code1 == EQ_EXPR)
3689 bool done = true;
3690 bool val;
3691 switch (code2)
3693 case EQ_EXPR: val = (cmp == 0); break;
3694 case NE_EXPR: val = (cmp != 0); break;
3695 case LT_EXPR: val = (cmp < 0); break;
3696 case GT_EXPR: val = (cmp > 0); break;
3697 case LE_EXPR: val = (cmp <= 0); break;
3698 case GE_EXPR: val = (cmp >= 0); break;
3699 default: done = false;
3701 if (done)
3703 if (val)
3704 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3705 else
3706 return boolean_false_node;
3709 /* Likewise if the second comparison is an == comparison. */
3710 else if (code2 == EQ_EXPR)
3712 bool done = true;
3713 bool val;
3714 switch (code1)
3716 case EQ_EXPR: val = (cmp == 0); break;
3717 case NE_EXPR: val = (cmp != 0); break;
3718 case LT_EXPR: val = (cmp > 0); break;
3719 case GT_EXPR: val = (cmp < 0); break;
3720 case LE_EXPR: val = (cmp >= 0); break;
3721 case GE_EXPR: val = (cmp <= 0); break;
3722 default: done = false;
3724 if (done)
3726 if (val)
3727 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3728 else
3729 return boolean_false_node;
3733 /* Same business with inequality tests. */
3734 else if (code1 == NE_EXPR)
3736 bool val;
3737 switch (code2)
3739 case EQ_EXPR: val = (cmp != 0); break;
3740 case NE_EXPR: val = (cmp == 0); break;
3741 case LT_EXPR: val = (cmp >= 0); break;
3742 case GT_EXPR: val = (cmp <= 0); break;
3743 case LE_EXPR: val = (cmp > 0); break;
3744 case GE_EXPR: val = (cmp < 0); break;
3745 default:
3746 val = false;
3748 if (val)
3749 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3751 else if (code2 == NE_EXPR)
3753 bool val;
3754 switch (code1)
3756 case EQ_EXPR: val = (cmp == 0); break;
3757 case NE_EXPR: val = (cmp != 0); break;
3758 case LT_EXPR: val = (cmp <= 0); break;
3759 case GT_EXPR: val = (cmp >= 0); break;
3760 case LE_EXPR: val = (cmp < 0); break;
3761 case GE_EXPR: val = (cmp > 0); break;
3762 default:
3763 val = false;
3765 if (val)
3766 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3769 /* Chose the more restrictive of two < or <= comparisons. */
3770 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3771 && (code2 == LT_EXPR || code2 == LE_EXPR))
3773 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3774 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3775 else
3776 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3779 /* Likewise chose the more restrictive of two > or >= comparisons. */
3780 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3781 && (code2 == GT_EXPR || code2 == GE_EXPR))
3783 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3784 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3785 else
3786 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3789 /* Check for singleton ranges. */
3790 else if (cmp == 0
3791 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3792 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3793 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3795 /* Check for disjoint ranges. */
3796 else if (cmp <= 0
3797 && (code1 == LT_EXPR || code1 == LE_EXPR)
3798 && (code2 == GT_EXPR || code2 == GE_EXPR))
3799 return boolean_false_node;
3800 else if (cmp >= 0
3801 && (code1 == GT_EXPR || code1 == GE_EXPR)
3802 && (code2 == LT_EXPR || code2 == LE_EXPR))
3803 return boolean_false_node;
3806 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3807 NAME's definition is a truth value. See if there are any simplifications
3808 that can be done against the NAME's definition. */
3809 if (TREE_CODE (op1a) == SSA_NAME
3810 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3811 && (integer_zerop (op1b) || integer_onep (op1b)))
3813 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3814 || (code1 == NE_EXPR && integer_onep (op1b)));
3815 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3816 switch (gimple_code (stmt))
3818 case GIMPLE_ASSIGN:
3819 /* Try to simplify by copy-propagating the definition. */
3820 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3822 case GIMPLE_PHI:
3823 /* If every argument to the PHI produces the same result when
3824 ANDed with the second comparison, we win.
3825 Do not do this unless the type is bool since we need a bool
3826 result here anyway. */
3827 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3829 tree result = NULL_TREE;
3830 unsigned i;
3831 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3833 tree arg = gimple_phi_arg_def (stmt, i);
3835 /* If this PHI has itself as an argument, ignore it.
3836 If all the other args produce the same result,
3837 we're still OK. */
3838 if (arg == gimple_phi_result (stmt))
3839 continue;
3840 else if (TREE_CODE (arg) == INTEGER_CST)
3842 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3844 if (!result)
3845 result = boolean_false_node;
3846 else if (!integer_zerop (result))
3847 return NULL_TREE;
3849 else if (!result)
3850 result = fold_build2 (code2, boolean_type_node,
3851 op2a, op2b);
3852 else if (!same_bool_comparison_p (result,
3853 code2, op2a, op2b))
3854 return NULL_TREE;
3856 else if (TREE_CODE (arg) == SSA_NAME
3857 && !SSA_NAME_IS_DEFAULT_DEF (arg))
3859 tree temp;
3860 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3861 /* In simple cases we can look through PHI nodes,
3862 but we have to be careful with loops.
3863 See PR49073. */
3864 if (! dom_info_available_p (CDI_DOMINATORS)
3865 || gimple_bb (def_stmt) == gimple_bb (stmt)
3866 || dominated_by_p (CDI_DOMINATORS,
3867 gimple_bb (def_stmt),
3868 gimple_bb (stmt)))
3869 return NULL_TREE;
3870 temp = and_var_with_comparison (arg, invert, code2,
3871 op2a, op2b);
3872 if (!temp)
3873 return NULL_TREE;
3874 else if (!result)
3875 result = temp;
3876 else if (!same_bool_result_p (result, temp))
3877 return NULL_TREE;
3879 else
3880 return NULL_TREE;
3882 return result;
3885 default:
3886 break;
3889 return NULL_TREE;
3892 /* Try to simplify the AND of two comparisons, specified by
3893 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3894 If this can be simplified to a single expression (without requiring
3895 introducing more SSA variables to hold intermediate values),
3896 return the resulting tree. Otherwise return NULL_TREE.
3897 If the result expression is non-null, it has boolean type. */
3899 tree
3900 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
3901 enum tree_code code2, tree op2a, tree op2b)
3903 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
3904 if (t)
3905 return t;
3906 else
3907 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
3910 /* Helper function for or_comparisons_1: try to simplify the OR of the
3911 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3912 If INVERT is true, invert the value of VAR before doing the OR.
3913 Return NULL_EXPR if we can't simplify this to a single expression. */
3915 static tree
3916 or_var_with_comparison (tree var, bool invert,
3917 enum tree_code code2, tree op2a, tree op2b)
3919 tree t;
3920 gimple stmt = SSA_NAME_DEF_STMT (var);
3922 /* We can only deal with variables whose definitions are assignments. */
3923 if (!is_gimple_assign (stmt))
3924 return NULL_TREE;
3926 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3927 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3928 Then we only have to consider the simpler non-inverted cases. */
3929 if (invert)
3930 t = and_var_with_comparison_1 (stmt,
3931 invert_tree_comparison (code2, false),
3932 op2a, op2b);
3933 else
3934 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
3935 return canonicalize_bool (t, invert);
3938 /* Try to simplify the OR of the ssa variable defined by the assignment
3939 STMT with the comparison specified by (OP2A CODE2 OP2B).
3940 Return NULL_EXPR if we can't simplify this to a single expression. */
3942 static tree
3943 or_var_with_comparison_1 (gimple stmt,
3944 enum tree_code code2, tree op2a, tree op2b)
3946 tree var = gimple_assign_lhs (stmt);
3947 tree true_test_var = NULL_TREE;
3948 tree false_test_var = NULL_TREE;
3949 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3951 /* Check for identities like (var OR (var != 0)) => true . */
3952 if (TREE_CODE (op2a) == SSA_NAME
3953 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3955 if ((code2 == NE_EXPR && integer_zerop (op2b))
3956 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3958 true_test_var = op2a;
3959 if (var == true_test_var)
3960 return var;
3962 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3963 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3965 false_test_var = op2a;
3966 if (var == false_test_var)
3967 return boolean_true_node;
3971 /* If the definition is a comparison, recurse on it. */
3972 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3974 tree t = or_comparisons_1 (innercode,
3975 gimple_assign_rhs1 (stmt),
3976 gimple_assign_rhs2 (stmt),
3977 code2,
3978 op2a,
3979 op2b);
3980 if (t)
3981 return t;
3984 /* If the definition is an AND or OR expression, we may be able to
3985 simplify by reassociating. */
3986 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3987 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3989 tree inner1 = gimple_assign_rhs1 (stmt);
3990 tree inner2 = gimple_assign_rhs2 (stmt);
3991 gimple s;
3992 tree t;
3993 tree partial = NULL_TREE;
3994 bool is_or = (innercode == BIT_IOR_EXPR);
3996 /* Check for boolean identities that don't require recursive examination
3997 of inner1/inner2:
3998 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
3999 inner1 OR (inner1 AND inner2) => inner1
4000 !inner1 OR (inner1 OR inner2) => true
4001 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4003 if (inner1 == true_test_var)
4004 return (is_or ? var : inner1);
4005 else if (inner2 == true_test_var)
4006 return (is_or ? var : inner2);
4007 else if (inner1 == false_test_var)
4008 return (is_or
4009 ? boolean_true_node
4010 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4011 else if (inner2 == false_test_var)
4012 return (is_or
4013 ? boolean_true_node
4014 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4016 /* Next, redistribute/reassociate the OR across the inner tests.
4017 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4018 if (TREE_CODE (inner1) == SSA_NAME
4019 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4020 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4021 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4022 gimple_assign_rhs1 (s),
4023 gimple_assign_rhs2 (s),
4024 code2, op2a, op2b)))
4026 /* Handle the OR case, where we are reassociating:
4027 (inner1 OR inner2) OR (op2a code2 op2b)
4028 => (t OR inner2)
4029 If the partial result t is a constant, we win. Otherwise
4030 continue on to try reassociating with the other inner test. */
4031 if (is_or)
4033 if (integer_onep (t))
4034 return boolean_true_node;
4035 else if (integer_zerop (t))
4036 return inner2;
4039 /* Handle the AND case, where we are redistributing:
4040 (inner1 AND inner2) OR (op2a code2 op2b)
4041 => (t AND (inner2 OR (op2a code op2b))) */
4042 else if (integer_zerop (t))
4043 return boolean_false_node;
4045 /* Save partial result for later. */
4046 partial = t;
4049 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4050 if (TREE_CODE (inner2) == SSA_NAME
4051 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4052 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4053 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4054 gimple_assign_rhs1 (s),
4055 gimple_assign_rhs2 (s),
4056 code2, op2a, op2b)))
4058 /* Handle the OR case, where we are reassociating:
4059 (inner1 OR inner2) OR (op2a code2 op2b)
4060 => (inner1 OR t)
4061 => (t OR partial) */
4062 if (is_or)
4064 if (integer_zerop (t))
4065 return inner1;
4066 else if (integer_onep (t))
4067 return boolean_true_node;
4068 /* If both are the same, we can apply the identity
4069 (x OR x) == x. */
4070 else if (partial && same_bool_result_p (t, partial))
4071 return t;
4074 /* Handle the AND case, where we are redistributing:
4075 (inner1 AND inner2) OR (op2a code2 op2b)
4076 => (t AND (inner1 OR (op2a code2 op2b)))
4077 => (t AND partial) */
4078 else
4080 if (integer_zerop (t))
4081 return boolean_false_node;
4082 else if (partial)
4084 /* We already got a simplification for the other
4085 operand to the redistributed AND expression. The
4086 interesting case is when at least one is true.
4087 Or, if both are the same, we can apply the identity
4088 (x AND x) == x. */
4089 if (integer_onep (partial))
4090 return t;
4091 else if (integer_onep (t))
4092 return partial;
4093 else if (same_bool_result_p (t, partial))
4094 return t;
4099 return NULL_TREE;
4102 /* Try to simplify the OR of two comparisons defined by
4103 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4104 If this can be done without constructing an intermediate value,
4105 return the resulting tree; otherwise NULL_TREE is returned.
4106 This function is deliberately asymmetric as it recurses on SSA_DEFs
4107 in the first comparison but not the second. */
4109 static tree
4110 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4111 enum tree_code code2, tree op2a, tree op2b)
4113 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4115 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4116 if (operand_equal_p (op1a, op2a, 0)
4117 && operand_equal_p (op1b, op2b, 0))
4119 /* Result will be either NULL_TREE, or a combined comparison. */
4120 tree t = combine_comparisons (UNKNOWN_LOCATION,
4121 TRUTH_ORIF_EXPR, code1, code2,
4122 truth_type, op1a, op1b);
4123 if (t)
4124 return t;
4127 /* Likewise the swapped case of the above. */
4128 if (operand_equal_p (op1a, op2b, 0)
4129 && operand_equal_p (op1b, op2a, 0))
4131 /* Result will be either NULL_TREE, or a combined comparison. */
4132 tree t = combine_comparisons (UNKNOWN_LOCATION,
4133 TRUTH_ORIF_EXPR, code1,
4134 swap_tree_comparison (code2),
4135 truth_type, op1a, op1b);
4136 if (t)
4137 return t;
4140 /* If both comparisons are of the same value against constants, we might
4141 be able to merge them. */
4142 if (operand_equal_p (op1a, op2a, 0)
4143 && TREE_CODE (op1b) == INTEGER_CST
4144 && TREE_CODE (op2b) == INTEGER_CST)
4146 int cmp = tree_int_cst_compare (op1b, op2b);
4148 /* If we have (op1a != op1b), we should either be able to
4149 return that or TRUE, depending on whether the constant op1b
4150 also satisfies the other comparison against op2b. */
4151 if (code1 == NE_EXPR)
4153 bool done = true;
4154 bool val;
4155 switch (code2)
4157 case EQ_EXPR: val = (cmp == 0); break;
4158 case NE_EXPR: val = (cmp != 0); break;
4159 case LT_EXPR: val = (cmp < 0); break;
4160 case GT_EXPR: val = (cmp > 0); break;
4161 case LE_EXPR: val = (cmp <= 0); break;
4162 case GE_EXPR: val = (cmp >= 0); break;
4163 default: done = false;
4165 if (done)
4167 if (val)
4168 return boolean_true_node;
4169 else
4170 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4173 /* Likewise if the second comparison is a != comparison. */
4174 else if (code2 == NE_EXPR)
4176 bool done = true;
4177 bool val;
4178 switch (code1)
4180 case EQ_EXPR: val = (cmp == 0); break;
4181 case NE_EXPR: val = (cmp != 0); break;
4182 case LT_EXPR: val = (cmp > 0); break;
4183 case GT_EXPR: val = (cmp < 0); break;
4184 case LE_EXPR: val = (cmp >= 0); break;
4185 case GE_EXPR: val = (cmp <= 0); break;
4186 default: done = false;
4188 if (done)
4190 if (val)
4191 return boolean_true_node;
4192 else
4193 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4197 /* See if an equality test is redundant with the other comparison. */
4198 else if (code1 == EQ_EXPR)
4200 bool val;
4201 switch (code2)
4203 case EQ_EXPR: val = (cmp == 0); break;
4204 case NE_EXPR: val = (cmp != 0); break;
4205 case LT_EXPR: val = (cmp < 0); break;
4206 case GT_EXPR: val = (cmp > 0); break;
4207 case LE_EXPR: val = (cmp <= 0); break;
4208 case GE_EXPR: val = (cmp >= 0); break;
4209 default:
4210 val = false;
4212 if (val)
4213 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4215 else if (code2 == EQ_EXPR)
4217 bool val;
4218 switch (code1)
4220 case EQ_EXPR: val = (cmp == 0); break;
4221 case NE_EXPR: val = (cmp != 0); break;
4222 case LT_EXPR: val = (cmp > 0); break;
4223 case GT_EXPR: val = (cmp < 0); break;
4224 case LE_EXPR: val = (cmp >= 0); break;
4225 case GE_EXPR: val = (cmp <= 0); break;
4226 default:
4227 val = false;
4229 if (val)
4230 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4233 /* Chose the less restrictive of two < or <= comparisons. */
4234 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4235 && (code2 == LT_EXPR || code2 == LE_EXPR))
4237 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4238 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4239 else
4240 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4243 /* Likewise chose the less restrictive of two > or >= comparisons. */
4244 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4245 && (code2 == GT_EXPR || code2 == GE_EXPR))
4247 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4248 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4249 else
4250 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4253 /* Check for singleton ranges. */
4254 else if (cmp == 0
4255 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4256 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4257 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4259 /* Check for less/greater pairs that don't restrict the range at all. */
4260 else if (cmp >= 0
4261 && (code1 == LT_EXPR || code1 == LE_EXPR)
4262 && (code2 == GT_EXPR || code2 == GE_EXPR))
4263 return boolean_true_node;
4264 else if (cmp <= 0
4265 && (code1 == GT_EXPR || code1 == GE_EXPR)
4266 && (code2 == LT_EXPR || code2 == LE_EXPR))
4267 return boolean_true_node;
4270 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4271 NAME's definition is a truth value. See if there are any simplifications
4272 that can be done against the NAME's definition. */
4273 if (TREE_CODE (op1a) == SSA_NAME
4274 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4275 && (integer_zerop (op1b) || integer_onep (op1b)))
4277 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4278 || (code1 == NE_EXPR && integer_onep (op1b)));
4279 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4280 switch (gimple_code (stmt))
4282 case GIMPLE_ASSIGN:
4283 /* Try to simplify by copy-propagating the definition. */
4284 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4286 case GIMPLE_PHI:
4287 /* If every argument to the PHI produces the same result when
4288 ORed with the second comparison, we win.
4289 Do not do this unless the type is bool since we need a bool
4290 result here anyway. */
4291 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4293 tree result = NULL_TREE;
4294 unsigned i;
4295 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4297 tree arg = gimple_phi_arg_def (stmt, i);
4299 /* If this PHI has itself as an argument, ignore it.
4300 If all the other args produce the same result,
4301 we're still OK. */
4302 if (arg == gimple_phi_result (stmt))
4303 continue;
4304 else if (TREE_CODE (arg) == INTEGER_CST)
4306 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4308 if (!result)
4309 result = boolean_true_node;
4310 else if (!integer_onep (result))
4311 return NULL_TREE;
4313 else if (!result)
4314 result = fold_build2 (code2, boolean_type_node,
4315 op2a, op2b);
4316 else if (!same_bool_comparison_p (result,
4317 code2, op2a, op2b))
4318 return NULL_TREE;
4320 else if (TREE_CODE (arg) == SSA_NAME
4321 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4323 tree temp;
4324 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4325 /* In simple cases we can look through PHI nodes,
4326 but we have to be careful with loops.
4327 See PR49073. */
4328 if (! dom_info_available_p (CDI_DOMINATORS)
4329 || gimple_bb (def_stmt) == gimple_bb (stmt)
4330 || dominated_by_p (CDI_DOMINATORS,
4331 gimple_bb (def_stmt),
4332 gimple_bb (stmt)))
4333 return NULL_TREE;
4334 temp = or_var_with_comparison (arg, invert, code2,
4335 op2a, op2b);
4336 if (!temp)
4337 return NULL_TREE;
4338 else if (!result)
4339 result = temp;
4340 else if (!same_bool_result_p (result, temp))
4341 return NULL_TREE;
4343 else
4344 return NULL_TREE;
4346 return result;
4349 default:
4350 break;
4353 return NULL_TREE;
4356 /* Try to simplify the OR of two comparisons, specified by
4357 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4358 If this can be simplified to a single expression (without requiring
4359 introducing more SSA variables to hold intermediate values),
4360 return the resulting tree. Otherwise return NULL_TREE.
4361 If the result expression is non-null, it has boolean type. */
4363 tree
4364 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4365 enum tree_code code2, tree op2a, tree op2b)
4367 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4368 if (t)
4369 return t;
4370 else
4371 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4375 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4377 Either NULL_TREE, a simplified but non-constant or a constant
4378 is returned.
4380 ??? This should go into a gimple-fold-inline.h file to be eventually
4381 privatized with the single valueize function used in the various TUs
4382 to avoid the indirect function call overhead. */
4384 tree
4385 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
4387 code_helper rcode;
4388 tree ops[3] = {};
4389 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4390 edges if there are intermediate VARYING defs. For this reason
4391 do not follow SSA edges here even though SCCVN can technically
4392 just deal fine with that. */
4393 if (gimple_simplify (stmt, &rcode, ops, NULL, no_follow_ssa_edges)
4394 && rcode.is_tree_code ()
4395 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4396 || ((tree_code) rcode) == ADDR_EXPR)
4397 && is_gimple_val (ops[0]))
4399 tree res = ops[0];
4400 if (dump_file && dump_flags & TDF_DETAILS)
4402 fprintf (dump_file, "Match-and-simplified ");
4403 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4404 fprintf (dump_file, " to ");
4405 print_generic_expr (dump_file, res, 0);
4406 fprintf (dump_file, "\n");
4408 return res;
4411 location_t loc = gimple_location (stmt);
4412 switch (gimple_code (stmt))
4414 case GIMPLE_ASSIGN:
4416 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4418 switch (get_gimple_rhs_class (subcode))
4420 case GIMPLE_SINGLE_RHS:
4422 tree rhs = gimple_assign_rhs1 (stmt);
4423 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4425 if (TREE_CODE (rhs) == SSA_NAME)
4427 /* If the RHS is an SSA_NAME, return its known constant value,
4428 if any. */
4429 return (*valueize) (rhs);
4431 /* Handle propagating invariant addresses into address
4432 operations. */
4433 else if (TREE_CODE (rhs) == ADDR_EXPR
4434 && !is_gimple_min_invariant (rhs))
4436 HOST_WIDE_INT offset = 0;
4437 tree base;
4438 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4439 &offset,
4440 valueize);
4441 if (base
4442 && (CONSTANT_CLASS_P (base)
4443 || decl_address_invariant_p (base)))
4444 return build_invariant_address (TREE_TYPE (rhs),
4445 base, offset);
4447 else if (TREE_CODE (rhs) == CONSTRUCTOR
4448 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4449 && (CONSTRUCTOR_NELTS (rhs)
4450 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4452 unsigned i;
4453 tree val, *vec;
4455 vec = XALLOCAVEC (tree,
4456 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4457 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4459 val = (*valueize) (val);
4460 if (TREE_CODE (val) == INTEGER_CST
4461 || TREE_CODE (val) == REAL_CST
4462 || TREE_CODE (val) == FIXED_CST)
4463 vec[i] = val;
4464 else
4465 return NULL_TREE;
4468 return build_vector (TREE_TYPE (rhs), vec);
4470 if (subcode == OBJ_TYPE_REF)
4472 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4473 /* If callee is constant, we can fold away the wrapper. */
4474 if (is_gimple_min_invariant (val))
4475 return val;
4478 if (kind == tcc_reference)
4480 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4481 || TREE_CODE (rhs) == REALPART_EXPR
4482 || TREE_CODE (rhs) == IMAGPART_EXPR)
4483 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4485 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4486 return fold_unary_loc (EXPR_LOCATION (rhs),
4487 TREE_CODE (rhs),
4488 TREE_TYPE (rhs), val);
4490 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4491 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4493 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4494 return fold_ternary_loc (EXPR_LOCATION (rhs),
4495 TREE_CODE (rhs),
4496 TREE_TYPE (rhs), val,
4497 TREE_OPERAND (rhs, 1),
4498 TREE_OPERAND (rhs, 2));
4500 else if (TREE_CODE (rhs) == MEM_REF
4501 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4503 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4504 if (TREE_CODE (val) == ADDR_EXPR
4505 && is_gimple_min_invariant (val))
4507 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4508 unshare_expr (val),
4509 TREE_OPERAND (rhs, 1));
4510 if (tem)
4511 rhs = tem;
4514 return fold_const_aggregate_ref_1 (rhs, valueize);
4516 else if (kind == tcc_declaration)
4517 return get_symbol_constant_value (rhs);
4518 return rhs;
4521 case GIMPLE_UNARY_RHS:
4523 /* Handle unary operators that can appear in GIMPLE form.
4524 Note that we know the single operand must be a constant,
4525 so this should almost always return a simplified RHS. */
4526 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4528 return
4529 fold_unary_ignore_overflow_loc (loc, subcode,
4530 gimple_expr_type (stmt), op0);
4533 case GIMPLE_BINARY_RHS:
4535 /* Handle binary operators that can appear in GIMPLE form. */
4536 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4537 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4539 /* Translate &x + CST into an invariant form suitable for
4540 further propagation. */
4541 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4542 && TREE_CODE (op0) == ADDR_EXPR
4543 && TREE_CODE (op1) == INTEGER_CST)
4545 tree off = fold_convert (ptr_type_node, op1);
4546 return build_fold_addr_expr_loc
4547 (loc,
4548 fold_build2 (MEM_REF,
4549 TREE_TYPE (TREE_TYPE (op0)),
4550 unshare_expr (op0), off));
4553 return fold_binary_loc (loc, subcode,
4554 gimple_expr_type (stmt), op0, op1);
4557 case GIMPLE_TERNARY_RHS:
4559 /* Handle ternary operators that can appear in GIMPLE form. */
4560 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4561 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4562 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
4564 /* Fold embedded expressions in ternary codes. */
4565 if ((subcode == COND_EXPR
4566 || subcode == VEC_COND_EXPR)
4567 && COMPARISON_CLASS_P (op0))
4569 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
4570 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
4571 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
4572 TREE_TYPE (op0), op00, op01);
4573 if (tem)
4574 op0 = tem;
4577 return fold_ternary_loc (loc, subcode,
4578 gimple_expr_type (stmt), op0, op1, op2);
4581 default:
4582 gcc_unreachable ();
4586 case GIMPLE_CALL:
4588 tree fn;
4590 if (gimple_call_internal_p (stmt))
4592 enum tree_code subcode = ERROR_MARK;
4593 switch (gimple_call_internal_fn (stmt))
4595 case IFN_UBSAN_CHECK_ADD:
4596 subcode = PLUS_EXPR;
4597 break;
4598 case IFN_UBSAN_CHECK_SUB:
4599 subcode = MINUS_EXPR;
4600 break;
4601 case IFN_UBSAN_CHECK_MUL:
4602 subcode = MULT_EXPR;
4603 break;
4604 default:
4605 return NULL_TREE;
4607 tree arg0 = gimple_call_arg (stmt, 0);
4608 tree arg1 = gimple_call_arg (stmt, 1);
4609 tree op0 = (*valueize) (arg0);
4610 tree op1 = (*valueize) (arg1);
4612 if (TREE_CODE (op0) != INTEGER_CST
4613 || TREE_CODE (op1) != INTEGER_CST)
4615 switch (subcode)
4617 case MULT_EXPR:
4618 /* x * 0 = 0 * x = 0 without overflow. */
4619 if (integer_zerop (op0) || integer_zerop (op1))
4620 return build_zero_cst (TREE_TYPE (arg0));
4621 break;
4622 case MINUS_EXPR:
4623 /* y - y = 0 without overflow. */
4624 if (operand_equal_p (op0, op1, 0))
4625 return build_zero_cst (TREE_TYPE (arg0));
4626 break;
4627 default:
4628 break;
4631 tree res
4632 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
4633 if (res
4634 && TREE_CODE (res) == INTEGER_CST
4635 && !TREE_OVERFLOW (res))
4636 return res;
4637 return NULL_TREE;
4640 fn = (*valueize) (gimple_call_fn (stmt));
4641 if (TREE_CODE (fn) == ADDR_EXPR
4642 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4643 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4644 && gimple_builtin_call_types_compatible_p (stmt,
4645 TREE_OPERAND (fn, 0)))
4647 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4648 tree call, retval;
4649 unsigned i;
4650 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4651 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4652 call = build_call_array_loc (loc,
4653 gimple_call_return_type (stmt),
4654 fn, gimple_call_num_args (stmt), args);
4655 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4656 if (retval)
4658 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4659 STRIP_NOPS (retval);
4660 retval = fold_convert (gimple_call_return_type (stmt), retval);
4662 return retval;
4664 return NULL_TREE;
4667 default:
4668 return NULL_TREE;
4672 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4673 Returns NULL_TREE if folding to a constant is not possible, otherwise
4674 returns a constant according to is_gimple_min_invariant. */
4676 tree
4677 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4679 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4680 if (res && is_gimple_min_invariant (res))
4681 return res;
4682 return NULL_TREE;
4686 /* The following set of functions are supposed to fold references using
4687 their constant initializers. */
4689 static tree fold_ctor_reference (tree type, tree ctor,
4690 unsigned HOST_WIDE_INT offset,
4691 unsigned HOST_WIDE_INT size, tree);
4693 /* See if we can find constructor defining value of BASE.
4694 When we know the consructor with constant offset (such as
4695 base is array[40] and we do know constructor of array), then
4696 BIT_OFFSET is adjusted accordingly.
4698 As a special case, return error_mark_node when constructor
4699 is not explicitly available, but it is known to be zero
4700 such as 'static const int a;'. */
4701 static tree
4702 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4703 tree (*valueize)(tree))
4705 HOST_WIDE_INT bit_offset2, size, max_size;
4706 if (TREE_CODE (base) == MEM_REF)
4708 if (!integer_zerop (TREE_OPERAND (base, 1)))
4710 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
4711 return NULL_TREE;
4712 *bit_offset += (mem_ref_offset (base).to_short_addr ()
4713 * BITS_PER_UNIT);
4716 if (valueize
4717 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4718 base = valueize (TREE_OPERAND (base, 0));
4719 if (!base || TREE_CODE (base) != ADDR_EXPR)
4720 return NULL_TREE;
4721 base = TREE_OPERAND (base, 0);
4724 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4725 DECL_INITIAL. If BASE is a nested reference into another
4726 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4727 the inner reference. */
4728 switch (TREE_CODE (base))
4730 case VAR_DECL:
4731 case CONST_DECL:
4733 tree init = ctor_for_folding (base);
4735 /* Our semantic is exact opposite of ctor_for_folding;
4736 NULL means unknown, while error_mark_node is 0. */
4737 if (init == error_mark_node)
4738 return NULL_TREE;
4739 if (!init)
4740 return error_mark_node;
4741 return init;
4744 case ARRAY_REF:
4745 case COMPONENT_REF:
4746 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4747 if (max_size == -1 || size != max_size)
4748 return NULL_TREE;
4749 *bit_offset += bit_offset2;
4750 return get_base_constructor (base, bit_offset, valueize);
4752 case STRING_CST:
4753 case CONSTRUCTOR:
4754 return base;
4756 default:
4757 return NULL_TREE;
4761 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4762 SIZE to the memory at bit OFFSET. */
4764 static tree
4765 fold_array_ctor_reference (tree type, tree ctor,
4766 unsigned HOST_WIDE_INT offset,
4767 unsigned HOST_WIDE_INT size,
4768 tree from_decl)
4770 unsigned HOST_WIDE_INT cnt;
4771 tree cfield, cval;
4772 offset_int low_bound;
4773 offset_int elt_size;
4774 offset_int index, max_index;
4775 offset_int access_index;
4776 tree domain_type = NULL_TREE, index_type = NULL_TREE;
4777 HOST_WIDE_INT inner_offset;
4779 /* Compute low bound and elt size. */
4780 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4781 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
4782 if (domain_type && TYPE_MIN_VALUE (domain_type))
4784 /* Static constructors for variably sized objects makes no sense. */
4785 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
4786 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
4787 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
4789 else
4790 low_bound = 0;
4791 /* Static constructors for variably sized objects makes no sense. */
4792 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
4793 == INTEGER_CST);
4794 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
4796 /* We can handle only constantly sized accesses that are known to not
4797 be larger than size of array element. */
4798 if (!TYPE_SIZE_UNIT (type)
4799 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
4800 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4801 || elt_size == 0)
4802 return NULL_TREE;
4804 /* Compute the array index we look for. */
4805 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
4806 elt_size);
4807 access_index += low_bound;
4808 if (index_type)
4809 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4810 TYPE_SIGN (index_type));
4812 /* And offset within the access. */
4813 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
4815 /* See if the array field is large enough to span whole access. We do not
4816 care to fold accesses spanning multiple array indexes. */
4817 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
4818 return NULL_TREE;
4820 index = low_bound - 1;
4821 if (index_type)
4822 index = wi::ext (index, TYPE_PRECISION (index_type),
4823 TYPE_SIGN (index_type));
4825 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4827 /* Array constructor might explicitely set index, or specify range
4828 or leave index NULL meaning that it is next index after previous
4829 one. */
4830 if (cfield)
4832 if (TREE_CODE (cfield) == INTEGER_CST)
4833 max_index = index = wi::to_offset (cfield);
4834 else
4836 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
4837 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4838 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
4841 else
4843 index += 1;
4844 if (index_type)
4845 index = wi::ext (index, TYPE_PRECISION (index_type),
4846 TYPE_SIGN (index_type));
4847 max_index = index;
4850 /* Do we have match? */
4851 if (wi::cmpu (access_index, index) >= 0
4852 && wi::cmpu (access_index, max_index) <= 0)
4853 return fold_ctor_reference (type, cval, inner_offset, size,
4854 from_decl);
4856 /* When memory is not explicitely mentioned in constructor,
4857 it is 0 (or out of range). */
4858 return build_zero_cst (type);
4861 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4862 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4864 static tree
4865 fold_nonarray_ctor_reference (tree type, tree ctor,
4866 unsigned HOST_WIDE_INT offset,
4867 unsigned HOST_WIDE_INT size,
4868 tree from_decl)
4870 unsigned HOST_WIDE_INT cnt;
4871 tree cfield, cval;
4873 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4874 cval)
4876 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4877 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4878 tree field_size = DECL_SIZE (cfield);
4879 offset_int bitoffset;
4880 offset_int bitoffset_end, access_end;
4882 /* Variable sized objects in static constructors makes no sense,
4883 but field_size can be NULL for flexible array members. */
4884 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4885 && TREE_CODE (byte_offset) == INTEGER_CST
4886 && (field_size != NULL_TREE
4887 ? TREE_CODE (field_size) == INTEGER_CST
4888 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4890 /* Compute bit offset of the field. */
4891 bitoffset = (wi::to_offset (field_offset)
4892 + wi::lshift (wi::to_offset (byte_offset),
4893 LOG2_BITS_PER_UNIT));
4894 /* Compute bit offset where the field ends. */
4895 if (field_size != NULL_TREE)
4896 bitoffset_end = bitoffset + wi::to_offset (field_size);
4897 else
4898 bitoffset_end = 0;
4900 access_end = offset_int (offset) + size;
4902 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4903 [BITOFFSET, BITOFFSET_END)? */
4904 if (wi::cmps (access_end, bitoffset) > 0
4905 && (field_size == NULL_TREE
4906 || wi::lts_p (offset, bitoffset_end)))
4908 offset_int inner_offset = offset_int (offset) - bitoffset;
4909 /* We do have overlap. Now see if field is large enough to
4910 cover the access. Give up for accesses spanning multiple
4911 fields. */
4912 if (wi::cmps (access_end, bitoffset_end) > 0)
4913 return NULL_TREE;
4914 if (wi::lts_p (offset, bitoffset))
4915 return NULL_TREE;
4916 return fold_ctor_reference (type, cval,
4917 inner_offset.to_uhwi (), size,
4918 from_decl);
4921 /* When memory is not explicitely mentioned in constructor, it is 0. */
4922 return build_zero_cst (type);
4925 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4926 to the memory at bit OFFSET. */
4928 static tree
4929 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
4930 unsigned HOST_WIDE_INT size, tree from_decl)
4932 tree ret;
4934 /* We found the field with exact match. */
4935 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
4936 && !offset)
4937 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
4939 /* We are at the end of walk, see if we can view convert the
4940 result. */
4941 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
4942 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
4943 && !compare_tree_int (TYPE_SIZE (type), size)
4944 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
4946 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
4947 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
4948 if (ret)
4949 STRIP_NOPS (ret);
4950 return ret;
4952 /* For constants and byte-aligned/sized reads try to go through
4953 native_encode/interpret. */
4954 if (CONSTANT_CLASS_P (ctor)
4955 && BITS_PER_UNIT == 8
4956 && offset % BITS_PER_UNIT == 0
4957 && size % BITS_PER_UNIT == 0
4958 && size <= MAX_BITSIZE_MODE_ANY_MODE)
4960 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
4961 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
4962 offset / BITS_PER_UNIT) > 0)
4963 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
4965 if (TREE_CODE (ctor) == CONSTRUCTOR)
4968 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
4969 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
4970 return fold_array_ctor_reference (type, ctor, offset, size,
4971 from_decl);
4972 else
4973 return fold_nonarray_ctor_reference (type, ctor, offset, size,
4974 from_decl);
4977 return NULL_TREE;
4980 /* Return the tree representing the element referenced by T if T is an
4981 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4982 names using VALUEIZE. Return NULL_TREE otherwise. */
4984 tree
4985 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
4987 tree ctor, idx, base;
4988 HOST_WIDE_INT offset, size, max_size;
4989 tree tem;
4991 if (TREE_THIS_VOLATILE (t))
4992 return NULL_TREE;
4994 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4995 return get_symbol_constant_value (t);
4997 tem = fold_read_from_constant_string (t);
4998 if (tem)
4999 return tem;
5001 switch (TREE_CODE (t))
5003 case ARRAY_REF:
5004 case ARRAY_RANGE_REF:
5005 /* Constant indexes are handled well by get_base_constructor.
5006 Only special case variable offsets.
5007 FIXME: This code can't handle nested references with variable indexes
5008 (they will be handled only by iteration of ccp). Perhaps we can bring
5009 get_ref_base_and_extent here and make it use a valueize callback. */
5010 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5011 && valueize
5012 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5013 && TREE_CODE (idx) == INTEGER_CST)
5015 tree low_bound, unit_size;
5017 /* If the resulting bit-offset is constant, track it. */
5018 if ((low_bound = array_ref_low_bound (t),
5019 TREE_CODE (low_bound) == INTEGER_CST)
5020 && (unit_size = array_ref_element_size (t),
5021 tree_fits_uhwi_p (unit_size)))
5023 offset_int woffset
5024 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5025 TYPE_PRECISION (TREE_TYPE (idx)));
5027 if (wi::fits_shwi_p (woffset))
5029 offset = woffset.to_shwi ();
5030 /* TODO: This code seems wrong, multiply then check
5031 to see if it fits. */
5032 offset *= tree_to_uhwi (unit_size);
5033 offset *= BITS_PER_UNIT;
5035 base = TREE_OPERAND (t, 0);
5036 ctor = get_base_constructor (base, &offset, valueize);
5037 /* Empty constructor. Always fold to 0. */
5038 if (ctor == error_mark_node)
5039 return build_zero_cst (TREE_TYPE (t));
5040 /* Out of bound array access. Value is undefined,
5041 but don't fold. */
5042 if (offset < 0)
5043 return NULL_TREE;
5044 /* We can not determine ctor. */
5045 if (!ctor)
5046 return NULL_TREE;
5047 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5048 tree_to_uhwi (unit_size)
5049 * BITS_PER_UNIT,
5050 base);
5054 /* Fallthru. */
5056 case COMPONENT_REF:
5057 case BIT_FIELD_REF:
5058 case TARGET_MEM_REF:
5059 case MEM_REF:
5060 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5061 ctor = get_base_constructor (base, &offset, valueize);
5063 /* Empty constructor. Always fold to 0. */
5064 if (ctor == error_mark_node)
5065 return build_zero_cst (TREE_TYPE (t));
5066 /* We do not know precise address. */
5067 if (max_size == -1 || max_size != size)
5068 return NULL_TREE;
5069 /* We can not determine ctor. */
5070 if (!ctor)
5071 return NULL_TREE;
5073 /* Out of bound array access. Value is undefined, but don't fold. */
5074 if (offset < 0)
5075 return NULL_TREE;
5077 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5078 base);
5080 case REALPART_EXPR:
5081 case IMAGPART_EXPR:
5083 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5084 if (c && TREE_CODE (c) == COMPLEX_CST)
5085 return fold_build1_loc (EXPR_LOCATION (t),
5086 TREE_CODE (t), TREE_TYPE (t), c);
5087 break;
5090 default:
5091 break;
5094 return NULL_TREE;
5097 tree
5098 fold_const_aggregate_ref (tree t)
5100 return fold_const_aggregate_ref_1 (t, NULL);
5103 /* Lookup virtual method with index TOKEN in a virtual table V
5104 at OFFSET.
5105 Set CAN_REFER if non-NULL to false if method
5106 is not referable or if the virtual table is ill-formed (such as rewriten
5107 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5109 tree
5110 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5111 tree v,
5112 unsigned HOST_WIDE_INT offset,
5113 bool *can_refer)
5115 tree vtable = v, init, fn;
5116 unsigned HOST_WIDE_INT size;
5117 unsigned HOST_WIDE_INT elt_size, access_index;
5118 tree domain_type;
5120 if (can_refer)
5121 *can_refer = true;
5123 /* First of all double check we have virtual table. */
5124 if (TREE_CODE (v) != VAR_DECL
5125 || !DECL_VIRTUAL_P (v))
5127 gcc_assert (in_lto_p);
5128 /* Pass down that we lost track of the target. */
5129 if (can_refer)
5130 *can_refer = false;
5131 return NULL_TREE;
5134 init = ctor_for_folding (v);
5136 /* The virtual tables should always be born with constructors
5137 and we always should assume that they are avaialble for
5138 folding. At the moment we do not stream them in all cases,
5139 but it should never happen that ctor seem unreachable. */
5140 gcc_assert (init);
5141 if (init == error_mark_node)
5143 gcc_assert (in_lto_p);
5144 /* Pass down that we lost track of the target. */
5145 if (can_refer)
5146 *can_refer = false;
5147 return NULL_TREE;
5149 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5150 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5151 offset *= BITS_PER_UNIT;
5152 offset += token * size;
5154 /* Lookup the value in the constructor that is assumed to be array.
5155 This is equivalent to
5156 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5157 offset, size, NULL);
5158 but in a constant time. We expect that frontend produced a simple
5159 array without indexed initializers. */
5161 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5162 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5163 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5164 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5166 access_index = offset / BITS_PER_UNIT / elt_size;
5167 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5169 /* This code makes an assumption that there are no
5170 indexed fileds produced by C++ FE, so we can directly index the array. */
5171 if (access_index < CONSTRUCTOR_NELTS (init))
5173 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5174 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5175 STRIP_NOPS (fn);
5177 else
5178 fn = NULL;
5180 /* For type inconsistent program we may end up looking up virtual method
5181 in virtual table that does not contain TOKEN entries. We may overrun
5182 the virtual table and pick up a constant or RTTI info pointer.
5183 In any case the call is undefined. */
5184 if (!fn
5185 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5186 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5187 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5188 else
5190 fn = TREE_OPERAND (fn, 0);
5192 /* When cgraph node is missing and function is not public, we cannot
5193 devirtualize. This can happen in WHOPR when the actual method
5194 ends up in other partition, because we found devirtualization
5195 possibility too late. */
5196 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5198 if (can_refer)
5200 *can_refer = false;
5201 return fn;
5203 return NULL_TREE;
5207 /* Make sure we create a cgraph node for functions we'll reference.
5208 They can be non-existent if the reference comes from an entry
5209 of an external vtable for example. */
5210 cgraph_node::get_create (fn);
5212 return fn;
5215 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5216 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5217 KNOWN_BINFO carries the binfo describing the true type of
5218 OBJ_TYPE_REF_OBJECT(REF).
5219 Set CAN_REFER if non-NULL to false if method
5220 is not referable or if the virtual table is ill-formed (such as rewriten
5221 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5223 tree
5224 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5225 bool *can_refer)
5227 unsigned HOST_WIDE_INT offset;
5228 tree v;
5230 v = BINFO_VTABLE (known_binfo);
5231 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5232 if (!v)
5233 return NULL_TREE;
5235 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5237 if (can_refer)
5238 *can_refer = false;
5239 return NULL_TREE;
5241 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5244 /* Return true iff VAL is a gimple expression that is known to be
5245 non-negative. Restricted to floating-point inputs. */
5247 bool
5248 gimple_val_nonnegative_real_p (tree val)
5250 gimple def_stmt;
5252 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5254 /* Use existing logic for non-gimple trees. */
5255 if (tree_expr_nonnegative_p (val))
5256 return true;
5258 if (TREE_CODE (val) != SSA_NAME)
5259 return false;
5261 /* Currently we look only at the immediately defining statement
5262 to make this determination, since recursion on defining
5263 statements of operands can lead to quadratic behavior in the
5264 worst case. This is expected to catch almost all occurrences
5265 in practice. It would be possible to implement limited-depth
5266 recursion if important cases are lost. Alternatively, passes
5267 that need this information (such as the pow/powi lowering code
5268 in the cse_sincos pass) could be revised to provide it through
5269 dataflow propagation. */
5271 def_stmt = SSA_NAME_DEF_STMT (val);
5273 if (is_gimple_assign (def_stmt))
5275 tree op0, op1;
5277 /* See fold-const.c:tree_expr_nonnegative_p for additional
5278 cases that could be handled with recursion. */
5280 switch (gimple_assign_rhs_code (def_stmt))
5282 case ABS_EXPR:
5283 /* Always true for floating-point operands. */
5284 return true;
5286 case MULT_EXPR:
5287 /* True if the two operands are identical (since we are
5288 restricted to floating-point inputs). */
5289 op0 = gimple_assign_rhs1 (def_stmt);
5290 op1 = gimple_assign_rhs2 (def_stmt);
5292 if (op0 == op1
5293 || operand_equal_p (op0, op1, 0))
5294 return true;
5296 default:
5297 return false;
5300 else if (is_gimple_call (def_stmt))
5302 tree fndecl = gimple_call_fndecl (def_stmt);
5303 if (fndecl
5304 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5306 tree arg1;
5308 switch (DECL_FUNCTION_CODE (fndecl))
5310 CASE_FLT_FN (BUILT_IN_ACOS):
5311 CASE_FLT_FN (BUILT_IN_ACOSH):
5312 CASE_FLT_FN (BUILT_IN_CABS):
5313 CASE_FLT_FN (BUILT_IN_COSH):
5314 CASE_FLT_FN (BUILT_IN_ERFC):
5315 CASE_FLT_FN (BUILT_IN_EXP):
5316 CASE_FLT_FN (BUILT_IN_EXP10):
5317 CASE_FLT_FN (BUILT_IN_EXP2):
5318 CASE_FLT_FN (BUILT_IN_FABS):
5319 CASE_FLT_FN (BUILT_IN_FDIM):
5320 CASE_FLT_FN (BUILT_IN_HYPOT):
5321 CASE_FLT_FN (BUILT_IN_POW10):
5322 return true;
5324 CASE_FLT_FN (BUILT_IN_SQRT):
5325 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5326 nonnegative inputs. */
5327 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5328 return true;
5330 break;
5332 CASE_FLT_FN (BUILT_IN_POWI):
5333 /* True if the second argument is an even integer. */
5334 arg1 = gimple_call_arg (def_stmt, 1);
5336 if (TREE_CODE (arg1) == INTEGER_CST
5337 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5338 return true;
5340 break;
5342 CASE_FLT_FN (BUILT_IN_POW):
5343 /* True if the second argument is an even integer-valued
5344 real. */
5345 arg1 = gimple_call_arg (def_stmt, 1);
5347 if (TREE_CODE (arg1) == REAL_CST)
5349 REAL_VALUE_TYPE c;
5350 HOST_WIDE_INT n;
5352 c = TREE_REAL_CST (arg1);
5353 n = real_to_integer (&c);
5355 if ((n & 1) == 0)
5357 REAL_VALUE_TYPE cint;
5358 real_from_integer (&cint, VOIDmode, n, SIGNED);
5359 if (real_identical (&c, &cint))
5360 return true;
5364 break;
5366 default:
5367 return false;
5372 return false;
5375 /* Given a pointer value OP0, return a simplified version of an
5376 indirection through OP0, or NULL_TREE if no simplification is
5377 possible. Note that the resulting type may be different from
5378 the type pointed to in the sense that it is still compatible
5379 from the langhooks point of view. */
5381 tree
5382 gimple_fold_indirect_ref (tree t)
5384 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5385 tree sub = t;
5386 tree subtype;
5388 STRIP_NOPS (sub);
5389 subtype = TREE_TYPE (sub);
5390 if (!POINTER_TYPE_P (subtype))
5391 return NULL_TREE;
5393 if (TREE_CODE (sub) == ADDR_EXPR)
5395 tree op = TREE_OPERAND (sub, 0);
5396 tree optype = TREE_TYPE (op);
5397 /* *&p => p */
5398 if (useless_type_conversion_p (type, optype))
5399 return op;
5401 /* *(foo *)&fooarray => fooarray[0] */
5402 if (TREE_CODE (optype) == ARRAY_TYPE
5403 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5404 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5406 tree type_domain = TYPE_DOMAIN (optype);
5407 tree min_val = size_zero_node;
5408 if (type_domain && TYPE_MIN_VALUE (type_domain))
5409 min_val = TYPE_MIN_VALUE (type_domain);
5410 if (TREE_CODE (min_val) == INTEGER_CST)
5411 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5413 /* *(foo *)&complexfoo => __real__ complexfoo */
5414 else if (TREE_CODE (optype) == COMPLEX_TYPE
5415 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5416 return fold_build1 (REALPART_EXPR, type, op);
5417 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5418 else if (TREE_CODE (optype) == VECTOR_TYPE
5419 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5421 tree part_width = TYPE_SIZE (type);
5422 tree index = bitsize_int (0);
5423 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5427 /* *(p + CST) -> ... */
5428 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5429 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5431 tree addr = TREE_OPERAND (sub, 0);
5432 tree off = TREE_OPERAND (sub, 1);
5433 tree addrtype;
5435 STRIP_NOPS (addr);
5436 addrtype = TREE_TYPE (addr);
5438 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5439 if (TREE_CODE (addr) == ADDR_EXPR
5440 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5441 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5442 && tree_fits_uhwi_p (off))
5444 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5445 tree part_width = TYPE_SIZE (type);
5446 unsigned HOST_WIDE_INT part_widthi
5447 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5448 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5449 tree index = bitsize_int (indexi);
5450 if (offset / part_widthi
5451 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5452 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5453 part_width, index);
5456 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5457 if (TREE_CODE (addr) == ADDR_EXPR
5458 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5459 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5461 tree size = TYPE_SIZE_UNIT (type);
5462 if (tree_int_cst_equal (size, off))
5463 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5466 /* *(p + CST) -> MEM_REF <p, CST>. */
5467 if (TREE_CODE (addr) != ADDR_EXPR
5468 || DECL_P (TREE_OPERAND (addr, 0)))
5469 return fold_build2 (MEM_REF, type,
5470 addr,
5471 wide_int_to_tree (ptype, off));
5474 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5475 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5476 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5477 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5479 tree type_domain;
5480 tree min_val = size_zero_node;
5481 tree osub = sub;
5482 sub = gimple_fold_indirect_ref (sub);
5483 if (! sub)
5484 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5485 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5486 if (type_domain && TYPE_MIN_VALUE (type_domain))
5487 min_val = TYPE_MIN_VALUE (type_domain);
5488 if (TREE_CODE (min_val) == INTEGER_CST)
5489 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5492 return NULL_TREE;
5495 /* Return true if CODE is an operation that when operating on signed
5496 integer types involves undefined behavior on overflow and the
5497 operation can be expressed with unsigned arithmetic. */
5499 bool
5500 arith_code_with_undefined_signed_overflow (tree_code code)
5502 switch (code)
5504 case PLUS_EXPR:
5505 case MINUS_EXPR:
5506 case MULT_EXPR:
5507 case NEGATE_EXPR:
5508 case POINTER_PLUS_EXPR:
5509 return true;
5510 default:
5511 return false;
5515 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5516 operation that can be transformed to unsigned arithmetic by converting
5517 its operand, carrying out the operation in the corresponding unsigned
5518 type and converting the result back to the original type.
5520 Returns a sequence of statements that replace STMT and also contain
5521 a modified form of STMT itself. */
5523 gimple_seq
5524 rewrite_to_defined_overflow (gimple stmt)
5526 if (dump_file && (dump_flags & TDF_DETAILS))
5528 fprintf (dump_file, "rewriting stmt with undefined signed "
5529 "overflow ");
5530 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5533 tree lhs = gimple_assign_lhs (stmt);
5534 tree type = unsigned_type_for (TREE_TYPE (lhs));
5535 gimple_seq stmts = NULL;
5536 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5538 gimple_seq stmts2 = NULL;
5539 gimple_set_op (stmt, i,
5540 force_gimple_operand (fold_convert (type,
5541 gimple_op (stmt, i)),
5542 &stmts2, true, NULL_TREE));
5543 gimple_seq_add_seq (&stmts, stmts2);
5545 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5546 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5547 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5548 gimple_seq_add_stmt (&stmts, stmt);
5549 gimple cvt = gimple_build_assign_with_ops
5550 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
5551 gimple_seq_add_stmt (&stmts, cvt);
5553 return stmts;
5557 /* Build the expression CODE OP0 of type TYPE with location LOC,
5558 simplifying it first if possible using VALUEIZE if not NULL.
5559 OP0 is expected to be valueized already. Returns the built
5560 expression value and appends statements possibly defining it
5561 to SEQ. */
5563 tree
5564 gimple_build (gimple_seq *seq, location_t loc,
5565 enum tree_code code, tree type, tree op0,
5566 tree (*valueize)(tree))
5568 tree res = gimple_simplify (code, type, op0, seq, valueize);
5569 if (!res)
5571 if (gimple_in_ssa_p (cfun))
5572 res = make_ssa_name (type, NULL);
5573 else
5574 res = create_tmp_reg (type, NULL);
5575 gimple stmt;
5576 if (code == REALPART_EXPR
5577 || code == IMAGPART_EXPR
5578 || code == VIEW_CONVERT_EXPR)
5579 stmt = gimple_build_assign_with_ops (code, res,
5580 build1 (code, type,
5581 op0), NULL_TREE);
5582 else
5583 stmt = gimple_build_assign_with_ops (code, res, op0, NULL_TREE);
5584 gimple_set_location (stmt, loc);
5585 gimple_seq_add_stmt_without_update (seq, stmt);
5587 return res;
5590 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5591 simplifying it first if possible using VALUEIZE if not NULL.
5592 OP0 and OP1 are expected to be valueized already. Returns the built
5593 expression value and appends statements possibly defining it
5594 to SEQ. */
5596 tree
5597 gimple_build (gimple_seq *seq, location_t loc,
5598 enum tree_code code, tree type, tree op0, tree op1,
5599 tree (*valueize)(tree))
5601 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
5602 if (!res)
5604 if (gimple_in_ssa_p (cfun))
5605 res = make_ssa_name (type, NULL);
5606 else
5607 res = create_tmp_reg (type, NULL);
5608 gimple stmt = gimple_build_assign_with_ops (code, res, op0, op1);
5609 gimple_set_location (stmt, loc);
5610 gimple_seq_add_stmt_without_update (seq, stmt);
5612 return res;
5615 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5616 simplifying it first if possible using VALUEIZE if not NULL.
5617 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5618 expression value and appends statements possibly defining it
5619 to SEQ. */
5621 tree
5622 gimple_build (gimple_seq *seq, location_t loc,
5623 enum tree_code code, tree type, tree op0, tree op1, tree op2,
5624 tree (*valueize)(tree))
5626 tree res = gimple_simplify (code, type, op0, op1, op2,
5627 seq, valueize);
5628 if (!res)
5630 if (gimple_in_ssa_p (cfun))
5631 res = make_ssa_name (type, NULL);
5632 else
5633 res = create_tmp_reg (type, NULL);
5634 gimple stmt;
5635 if (code == BIT_FIELD_REF)
5636 stmt = gimple_build_assign_with_ops (code, res,
5637 build3 (BIT_FIELD_REF, type,
5638 op0, op1, op2),
5639 NULL_TREE);
5640 else
5641 stmt = gimple_build_assign_with_ops (code, res, op0, op1, op2);
5642 gimple_set_location (stmt, loc);
5643 gimple_seq_add_stmt_without_update (seq, stmt);
5645 return res;
5648 /* Build the call FN (ARG0) with a result of type TYPE
5649 (or no result if TYPE is void) with location LOC,
5650 simplifying it first if possible using VALUEIZE if not NULL.
5651 ARG0 is expected to be valueized already. Returns the built
5652 expression value (or NULL_TREE if TYPE is void) and appends
5653 statements possibly defining it to SEQ. */
5655 tree
5656 gimple_build (gimple_seq *seq, location_t loc,
5657 enum built_in_function fn, tree type, tree arg0,
5658 tree (*valueize)(tree))
5660 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
5661 if (!res)
5663 tree decl = builtin_decl_implicit (fn);
5664 gimple stmt = gimple_build_call (decl, 1, arg0);
5665 if (!VOID_TYPE_P (type))
5667 if (gimple_in_ssa_p (cfun))
5668 res = make_ssa_name (type, NULL);
5669 else
5670 res = create_tmp_reg (type, NULL);
5671 gimple_call_set_lhs (stmt, res);
5673 gimple_set_location (stmt, loc);
5674 gimple_seq_add_stmt_without_update (seq, stmt);
5676 return res;
5679 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
5680 (or no result if TYPE is void) with location LOC,
5681 simplifying it first if possible using VALUEIZE if not NULL.
5682 ARG0 is expected to be valueized already. Returns the built
5683 expression value (or NULL_TREE if TYPE is void) and appends
5684 statements possibly defining it to SEQ. */
5686 tree
5687 gimple_build (gimple_seq *seq, location_t loc,
5688 enum built_in_function fn, tree type, tree arg0, tree arg1,
5689 tree (*valueize)(tree))
5691 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
5692 if (!res)
5694 tree decl = builtin_decl_implicit (fn);
5695 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
5696 if (!VOID_TYPE_P (type))
5698 if (gimple_in_ssa_p (cfun))
5699 res = make_ssa_name (type, NULL);
5700 else
5701 res = create_tmp_reg (type, NULL);
5702 gimple_call_set_lhs (stmt, res);
5704 gimple_set_location (stmt, loc);
5705 gimple_seq_add_stmt_without_update (seq, stmt);
5707 return res;
5710 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5711 (or no result if TYPE is void) with location LOC,
5712 simplifying it first if possible using VALUEIZE if not NULL.
5713 ARG0 is expected to be valueized already. Returns the built
5714 expression value (or NULL_TREE if TYPE is void) and appends
5715 statements possibly defining it to SEQ. */
5717 tree
5718 gimple_build (gimple_seq *seq, location_t loc,
5719 enum built_in_function fn, tree type,
5720 tree arg0, tree arg1, tree arg2,
5721 tree (*valueize)(tree))
5723 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
5724 if (!res)
5726 tree decl = builtin_decl_implicit (fn);
5727 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
5728 if (!VOID_TYPE_P (type))
5730 if (gimple_in_ssa_p (cfun))
5731 res = make_ssa_name (type, NULL);
5732 else
5733 res = create_tmp_reg (type, NULL);
5734 gimple_call_set_lhs (stmt, res);
5736 gimple_set_location (stmt, loc);
5737 gimple_seq_add_stmt_without_update (seq, stmt);
5739 return res;
5742 /* Build the conversion (TYPE) OP with a result of type TYPE
5743 with location LOC if such conversion is neccesary in GIMPLE,
5744 simplifying it first.
5745 Returns the built expression value and appends
5746 statements possibly defining it to SEQ. */
5748 tree
5749 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
5751 if (useless_type_conversion_p (type, TREE_TYPE (op)))
5752 return op;
5753 return gimple_build (seq, loc, NOP_EXPR, type, op);