2014-08-14 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / gimple-fold.c
blob4fa1a3579c316a68a4c932a2c785dc510d2df2c8
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "dumpfile.h"
33 #include "bitmap.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "tree-dfa.h"
47 #include "tree-ssa.h"
48 #include "tree-ssa-propagate.h"
49 #include "target.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
55 #include "dbgcnt.h"
56 #include "builtins.h"
57 #include "output.h"
59 /* Return true when DECL can be referenced from current unit.
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
62 reasons:
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
69 set.
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
74 declaring the body.
75 3) COMDAT functions referred by external vtables that
76 we devirtualize only during final compilation stage.
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
79 directly. */
81 static bool
82 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
84 varpool_node *vnode;
85 struct cgraph_node *node;
86 symtab_node *snode;
88 if (DECL_ABSTRACT (decl))
89 return false;
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 return true;
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (cgraph_function_flags_ready)
102 return true;
103 snode = symtab_node::get (decl);
104 if (!snode || !snode->definition)
105 return false;
106 node = dyn_cast <cgraph_node *> (snode);
107 return !node || !node->global.inlined_to;
110 /* We will later output the initializer, so we can refer to it.
111 So we are concerned only when DECL comes from initializer of
112 external var or var that has been optimized out. */
113 if (!from_decl
114 || TREE_CODE (from_decl) != VAR_DECL
115 || (!DECL_EXTERNAL (from_decl)
116 && (vnode = varpool_node::get (from_decl)) != NULL
117 && vnode->definition)
118 || (flag_ltrans
119 && (vnode = varpool_node::get (from_decl)) != NULL
120 && vnode->in_other_partition))
121 return true;
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
129 return false;
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134 return true;
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
139 As observed in PR20991 for already optimized out comdat virtual functions
140 it may be tempting to not necessarily give up because the copy will be
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
145 was privatized. */
146 if (!cgraph_function_flags_ready)
147 return true;
149 snode = symtab_node::get (decl);
150 if (!snode
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
154 return false;
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
163 tree
164 canonicalize_constructor_val (tree cval, tree from_decl)
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
180 if (TREE_CODE (cval) == ADDR_EXPR)
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
191 if (!base)
192 return NULL_TREE;
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
196 && !can_refer_decl_in_current_unit_p (base, from_decl))
197 return NULL_TREE;
198 if (TREE_CODE (base) == VAR_DECL)
199 TREE_ADDRESSABLE (base) = 1;
200 else if (TREE_CODE (base) == FUNCTION_DECL)
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
205 cgraph_node::get_create (base);
207 /* Fixup types in global initializers. */
208 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
209 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
212 cval = fold_convert (TREE_TYPE (orig_cval), cval);
213 return cval;
215 if (TREE_OVERFLOW_P (cval))
216 return drop_tree_overflow (cval);
217 return orig_cval;
220 /* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
223 tree
224 get_symbol_constant_value (tree sym)
226 tree val = ctor_for_folding (sym);
227 if (val != error_mark_node)
229 if (val)
231 val = canonicalize_constructor_val (unshare_expr (val), sym);
232 if (val && is_gimple_min_invariant (val))
233 return val;
234 else
235 return NULL_TREE;
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
240 if (!val
241 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
242 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
243 return build_zero_cst (TREE_TYPE (sym));
246 return NULL_TREE;
251 /* Subroutine of fold_stmt. We perform several simplifications of the
252 memory reference tree EXPR and make sure to re-gimplify them properly
253 after propagation of constant addresses. IS_LHS is true if the
254 reference is supposed to be an lvalue. */
256 static tree
257 maybe_fold_reference (tree expr, bool is_lhs)
259 tree *t = &expr;
260 tree result;
262 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
263 || TREE_CODE (expr) == REALPART_EXPR
264 || TREE_CODE (expr) == IMAGPART_EXPR)
265 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
266 return fold_unary_loc (EXPR_LOCATION (expr),
267 TREE_CODE (expr),
268 TREE_TYPE (expr),
269 TREE_OPERAND (expr, 0));
270 else if (TREE_CODE (expr) == BIT_FIELD_REF
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_ternary_loc (EXPR_LOCATION (expr),
273 TREE_CODE (expr),
274 TREE_TYPE (expr),
275 TREE_OPERAND (expr, 0),
276 TREE_OPERAND (expr, 1),
277 TREE_OPERAND (expr, 2));
279 while (handled_component_p (*t))
280 t = &TREE_OPERAND (*t, 0);
282 /* Canonicalize MEM_REFs invariant address operand. Do this first
283 to avoid feeding non-canonical MEM_REFs elsewhere. */
284 if (TREE_CODE (*t) == MEM_REF
285 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
287 bool volatile_p = TREE_THIS_VOLATILE (*t);
288 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
289 TREE_OPERAND (*t, 0),
290 TREE_OPERAND (*t, 1));
291 if (tem)
293 TREE_THIS_VOLATILE (tem) = volatile_p;
294 *t = tem;
295 tem = maybe_fold_reference (expr, is_lhs);
296 if (tem)
297 return tem;
298 return expr;
302 if (!is_lhs
303 && (result = fold_const_aggregate_ref (expr))
304 && is_gimple_min_invariant (result))
305 return result;
307 /* Fold back MEM_REFs to reference trees. */
308 if (TREE_CODE (*t) == MEM_REF
309 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
310 && integer_zerop (TREE_OPERAND (*t, 1))
311 && (TREE_THIS_VOLATILE (*t)
312 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
313 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
314 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
315 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
316 /* We have to look out here to not drop a required conversion
317 from the rhs to the lhs if is_lhs, but we don't have the
318 rhs here to verify that. Thus require strict type
319 compatibility. */
320 && types_compatible_p (TREE_TYPE (*t),
321 TREE_TYPE (TREE_OPERAND
322 (TREE_OPERAND (*t, 0), 0))))
324 tree tem;
325 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
326 tem = maybe_fold_reference (expr, is_lhs);
327 if (tem)
328 return tem;
329 return expr;
331 else if (TREE_CODE (*t) == TARGET_MEM_REF)
333 tree tem = maybe_fold_tmr (*t);
334 if (tem)
336 *t = tem;
337 tem = maybe_fold_reference (expr, is_lhs);
338 if (tem)
339 return tem;
340 return expr;
344 return NULL_TREE;
348 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
349 replacement rhs for the statement or NULL_TREE if no simplification
350 could be made. It is assumed that the operands have been previously
351 folded. */
353 static tree
354 fold_gimple_assign (gimple_stmt_iterator *si)
356 gimple stmt = gsi_stmt (*si);
357 enum tree_code subcode = gimple_assign_rhs_code (stmt);
358 location_t loc = gimple_location (stmt);
360 tree result = NULL_TREE;
362 switch (get_gimple_rhs_class (subcode))
364 case GIMPLE_SINGLE_RHS:
366 tree rhs = gimple_assign_rhs1 (stmt);
368 if (REFERENCE_CLASS_P (rhs))
369 return maybe_fold_reference (rhs, false);
371 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
373 tree val = OBJ_TYPE_REF_EXPR (rhs);
374 if (is_gimple_min_invariant (val))
375 return val;
376 else if (flag_devirtualize && virtual_method_call_p (rhs))
378 bool final;
379 vec <cgraph_node *>targets
380 = possible_polymorphic_call_targets (rhs, stmt, &final);
381 if (final && targets.length () <= 1 && dbg_cnt (devirt))
383 tree fndecl;
385 if (targets.length () == 1)
386 fndecl = targets[0]->decl;
387 else
388 fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
389 if (dump_enabled_p ())
391 location_t loc = gimple_location_safe (stmt);
392 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
393 "resolving virtual function address "
394 "reference to function %s\n",
395 targets.length () == 1
396 ? targets[0]->name ()
397 : "__builtin_unreachable");
399 val = fold_convert (TREE_TYPE (val),
400 build_fold_addr_expr_loc (loc, fndecl));
401 STRIP_USELESS_TYPE_CONVERSION (val);
402 return val;
407 else if (TREE_CODE (rhs) == ADDR_EXPR)
409 tree ref = TREE_OPERAND (rhs, 0);
410 tree tem = maybe_fold_reference (ref, true);
411 if (tem
412 && TREE_CODE (tem) == MEM_REF
413 && integer_zerop (TREE_OPERAND (tem, 1)))
414 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
415 else if (tem)
416 result = fold_convert (TREE_TYPE (rhs),
417 build_fold_addr_expr_loc (loc, tem));
418 else if (TREE_CODE (ref) == MEM_REF
419 && integer_zerop (TREE_OPERAND (ref, 1)))
420 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
423 else if (TREE_CODE (rhs) == CONSTRUCTOR
424 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
425 && (CONSTRUCTOR_NELTS (rhs)
426 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
428 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
429 unsigned i;
430 tree val;
432 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
433 if (TREE_CODE (val) != INTEGER_CST
434 && TREE_CODE (val) != REAL_CST
435 && TREE_CODE (val) != FIXED_CST)
436 return NULL_TREE;
438 return build_vector_from_ctor (TREE_TYPE (rhs),
439 CONSTRUCTOR_ELTS (rhs));
442 else if (DECL_P (rhs))
443 return get_symbol_constant_value (rhs);
445 /* If we couldn't fold the RHS, hand over to the generic
446 fold routines. */
447 if (result == NULL_TREE)
448 result = fold (rhs);
450 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
451 that may have been added by fold, and "useless" type
452 conversions that might now be apparent due to propagation. */
453 STRIP_USELESS_TYPE_CONVERSION (result);
455 if (result != rhs && valid_gimple_rhs_p (result))
456 return result;
458 return NULL_TREE;
460 break;
462 case GIMPLE_UNARY_RHS:
464 tree rhs = gimple_assign_rhs1 (stmt);
466 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
467 if (result)
469 /* If the operation was a conversion do _not_ mark a
470 resulting constant with TREE_OVERFLOW if the original
471 constant was not. These conversions have implementation
472 defined behavior and retaining the TREE_OVERFLOW flag
473 here would confuse later passes such as VRP. */
474 if (CONVERT_EXPR_CODE_P (subcode)
475 && TREE_CODE (result) == INTEGER_CST
476 && TREE_CODE (rhs) == INTEGER_CST)
477 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
479 STRIP_USELESS_TYPE_CONVERSION (result);
480 if (valid_gimple_rhs_p (result))
481 return result;
484 break;
486 case GIMPLE_BINARY_RHS:
487 /* Try to canonicalize for boolean-typed X the comparisons
488 X == 0, X == 1, X != 0, and X != 1. */
489 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
490 || gimple_assign_rhs_code (stmt) == NE_EXPR)
492 tree lhs = gimple_assign_lhs (stmt);
493 tree op1 = gimple_assign_rhs1 (stmt);
494 tree op2 = gimple_assign_rhs2 (stmt);
495 tree type = TREE_TYPE (op1);
497 /* Check whether the comparison operands are of the same boolean
498 type as the result type is.
499 Check that second operand is an integer-constant with value
500 one or zero. */
501 if (TREE_CODE (op2) == INTEGER_CST
502 && (integer_zerop (op2) || integer_onep (op2))
503 && useless_type_conversion_p (TREE_TYPE (lhs), type))
505 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
506 bool is_logical_not = false;
508 /* X == 0 and X != 1 is a logical-not.of X
509 X == 1 and X != 0 is X */
510 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
511 || (cmp_code == NE_EXPR && integer_onep (op2)))
512 is_logical_not = true;
514 if (is_logical_not == false)
515 result = op1;
516 /* Only for one-bit precision typed X the transformation
517 !X -> ~X is valied. */
518 else if (TYPE_PRECISION (type) == 1)
519 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
520 type, op1);
521 /* Otherwise we use !X -> X ^ 1. */
522 else
523 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
524 type, op1, build_int_cst (type, 1));
529 if (!result)
530 result = fold_binary_loc (loc, subcode,
531 TREE_TYPE (gimple_assign_lhs (stmt)),
532 gimple_assign_rhs1 (stmt),
533 gimple_assign_rhs2 (stmt));
535 if (result)
537 STRIP_USELESS_TYPE_CONVERSION (result);
538 if (valid_gimple_rhs_p (result))
539 return result;
541 break;
543 case GIMPLE_TERNARY_RHS:
544 /* Try to fold a conditional expression. */
545 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
547 tree op0 = gimple_assign_rhs1 (stmt);
548 tree tem;
549 bool set = false;
550 location_t cond_loc = gimple_location (stmt);
552 if (COMPARISON_CLASS_P (op0))
554 fold_defer_overflow_warnings ();
555 tem = fold_binary_loc (cond_loc,
556 TREE_CODE (op0), TREE_TYPE (op0),
557 TREE_OPERAND (op0, 0),
558 TREE_OPERAND (op0, 1));
559 /* This is actually a conditional expression, not a GIMPLE
560 conditional statement, however, the valid_gimple_rhs_p
561 test still applies. */
562 set = (tem && is_gimple_condexpr (tem)
563 && valid_gimple_rhs_p (tem));
564 fold_undefer_overflow_warnings (set, stmt, 0);
566 else if (is_gimple_min_invariant (op0))
568 tem = op0;
569 set = true;
571 else
572 return NULL_TREE;
574 if (set)
575 result = fold_build3_loc (cond_loc, COND_EXPR,
576 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
577 gimple_assign_rhs2 (stmt),
578 gimple_assign_rhs3 (stmt));
581 if (!result)
582 result = fold_ternary_loc (loc, subcode,
583 TREE_TYPE (gimple_assign_lhs (stmt)),
584 gimple_assign_rhs1 (stmt),
585 gimple_assign_rhs2 (stmt),
586 gimple_assign_rhs3 (stmt));
588 if (result)
590 STRIP_USELESS_TYPE_CONVERSION (result);
591 if (valid_gimple_rhs_p (result))
592 return result;
594 break;
596 case GIMPLE_INVALID_RHS:
597 gcc_unreachable ();
600 return NULL_TREE;
603 /* Attempt to fold a conditional statement. Return true if any changes were
604 made. We only attempt to fold the condition expression, and do not perform
605 any transformation that would require alteration of the cfg. It is
606 assumed that the operands have been previously folded. */
608 static bool
609 fold_gimple_cond (gimple stmt)
611 tree result = fold_binary_loc (gimple_location (stmt),
612 gimple_cond_code (stmt),
613 boolean_type_node,
614 gimple_cond_lhs (stmt),
615 gimple_cond_rhs (stmt));
617 if (result)
619 STRIP_USELESS_TYPE_CONVERSION (result);
620 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
622 gimple_cond_set_condition_from_tree (stmt, result);
623 return true;
627 return false;
631 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
632 adjusting the replacement stmts location and virtual operands.
633 If the statement has a lhs the last stmt in the sequence is expected
634 to assign to that lhs. */
636 static void
637 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
639 gimple stmt = gsi_stmt (*si_p);
641 if (gimple_has_location (stmt))
642 annotate_all_with_location (stmts, gimple_location (stmt));
644 /* First iterate over the replacement statements backward, assigning
645 virtual operands to their defining statements. */
646 gimple laststore = NULL;
647 for (gimple_stmt_iterator i = gsi_last (stmts);
648 !gsi_end_p (i); gsi_prev (&i))
650 gimple new_stmt = gsi_stmt (i);
651 if ((gimple_assign_single_p (new_stmt)
652 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
653 || (is_gimple_call (new_stmt)
654 && (gimple_call_flags (new_stmt)
655 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
657 tree vdef;
658 if (!laststore)
659 vdef = gimple_vdef (stmt);
660 else
661 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
662 gimple_set_vdef (new_stmt, vdef);
663 if (vdef && TREE_CODE (vdef) == SSA_NAME)
664 SSA_NAME_DEF_STMT (vdef) = new_stmt;
665 laststore = new_stmt;
669 /* Second iterate over the statements forward, assigning virtual
670 operands to their uses. */
671 tree reaching_vuse = gimple_vuse (stmt);
672 for (gimple_stmt_iterator i = gsi_start (stmts);
673 !gsi_end_p (i); gsi_next (&i))
675 gimple new_stmt = gsi_stmt (i);
676 /* If the new statement possibly has a VUSE, update it with exact SSA
677 name we know will reach this one. */
678 if (gimple_has_mem_ops (new_stmt))
679 gimple_set_vuse (new_stmt, reaching_vuse);
680 gimple_set_modified (new_stmt, true);
681 if (gimple_vdef (new_stmt))
682 reaching_vuse = gimple_vdef (new_stmt);
685 /* If the new sequence does not do a store release the virtual
686 definition of the original statement. */
687 if (reaching_vuse
688 && reaching_vuse == gimple_vuse (stmt))
690 tree vdef = gimple_vdef (stmt);
691 if (vdef
692 && TREE_CODE (vdef) == SSA_NAME)
694 unlink_stmt_vdef (stmt);
695 release_ssa_name (vdef);
699 /* Finally replace the original statement with the sequence. */
700 gsi_replace_with_seq (si_p, stmts, false);
703 /* Convert EXPR into a GIMPLE value suitable for substitution on the
704 RHS of an assignment. Insert the necessary statements before
705 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
706 is replaced. If the call is expected to produces a result, then it
707 is replaced by an assignment of the new RHS to the result variable.
708 If the result is to be ignored, then the call is replaced by a
709 GIMPLE_NOP. A proper VDEF chain is retained by making the first
710 VUSE and the last VDEF of the whole sequence be the same as the replaced
711 statement and using new SSA names for stores in between. */
713 void
714 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
716 tree lhs;
717 gimple stmt, new_stmt;
718 gimple_stmt_iterator i;
719 gimple_seq stmts = NULL;
721 stmt = gsi_stmt (*si_p);
723 gcc_assert (is_gimple_call (stmt));
725 push_gimplify_context (gimple_in_ssa_p (cfun));
727 lhs = gimple_call_lhs (stmt);
728 if (lhs == NULL_TREE)
730 gimplify_and_add (expr, &stmts);
731 /* We can end up with folding a memcpy of an empty class assignment
732 which gets optimized away by C++ gimplification. */
733 if (gimple_seq_empty_p (stmts))
735 pop_gimplify_context (NULL);
736 if (gimple_in_ssa_p (cfun))
738 unlink_stmt_vdef (stmt);
739 release_defs (stmt);
741 gsi_replace (si_p, gimple_build_nop (), true);
742 return;
745 else
747 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
748 new_stmt = gimple_build_assign (lhs, tmp);
749 i = gsi_last (stmts);
750 gsi_insert_after_without_update (&i, new_stmt,
751 GSI_CONTINUE_LINKING);
754 pop_gimplify_context (NULL);
756 gsi_replace_with_seq_vops (si_p, stmts);
760 /* Replace the call at *GSI with the gimple value VAL. */
762 static void
763 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
765 gimple stmt = gsi_stmt (*gsi);
766 tree lhs = gimple_call_lhs (stmt);
767 gimple repl;
768 if (lhs)
770 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
771 val = fold_convert (TREE_TYPE (lhs), val);
772 repl = gimple_build_assign (lhs, val);
774 else
775 repl = gimple_build_nop ();
776 tree vdef = gimple_vdef (stmt);
777 if (vdef && TREE_CODE (vdef) == SSA_NAME)
779 unlink_stmt_vdef (stmt);
780 release_ssa_name (vdef);
782 gsi_replace (gsi, repl, true);
785 /* Replace the call at *GSI with the new call REPL and fold that
786 again. */
788 static void
789 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
791 gimple stmt = gsi_stmt (*gsi);
792 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
793 gimple_set_location (repl, gimple_location (stmt));
794 if (gimple_vdef (stmt)
795 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
797 gimple_set_vdef (repl, gimple_vdef (stmt));
798 gimple_set_vuse (repl, gimple_vuse (stmt));
799 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
801 gsi_replace (gsi, repl, true);
802 fold_stmt (gsi);
805 /* Return true if VAR is a VAR_DECL or a component thereof. */
807 static bool
808 var_decl_component_p (tree var)
810 tree inner = var;
811 while (handled_component_p (inner))
812 inner = TREE_OPERAND (inner, 0);
813 return SSA_VAR_P (inner);
816 /* Fold function call to builtin mem{{,p}cpy,move}. Return
817 NULL_TREE if no simplification can be made.
818 If ENDP is 0, return DEST (like memcpy).
819 If ENDP is 1, return DEST+LEN (like mempcpy).
820 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
821 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
822 (memmove). */
824 static bool
825 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
826 tree dest, tree src, int endp)
828 gimple stmt = gsi_stmt (*gsi);
829 tree lhs = gimple_call_lhs (stmt);
830 tree len = gimple_call_arg (stmt, 2);
831 tree destvar, srcvar;
832 location_t loc = gimple_location (stmt);
834 /* If the LEN parameter is zero, return DEST. */
835 if (integer_zerop (len))
837 gimple repl;
838 if (gimple_call_lhs (stmt))
839 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
840 else
841 repl = gimple_build_nop ();
842 tree vdef = gimple_vdef (stmt);
843 if (vdef && TREE_CODE (vdef) == SSA_NAME)
845 unlink_stmt_vdef (stmt);
846 release_ssa_name (vdef);
848 gsi_replace (gsi, repl, true);
849 return true;
852 /* If SRC and DEST are the same (and not volatile), return
853 DEST{,+LEN,+LEN-1}. */
854 if (operand_equal_p (src, dest, 0))
856 unlink_stmt_vdef (stmt);
857 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
858 release_ssa_name (gimple_vdef (stmt));
859 if (!lhs)
861 gsi_replace (gsi, gimple_build_nop (), true);
862 return true;
864 goto done;
866 else
868 tree srctype, desttype;
869 unsigned int src_align, dest_align;
870 tree off0;
872 /* Build accesses at offset zero with a ref-all character type. */
873 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
874 ptr_mode, true), 0);
876 /* If we can perform the copy efficiently with first doing all loads
877 and then all stores inline it that way. Currently efficiently
878 means that we can load all the memory into a single integer
879 register which is what MOVE_MAX gives us. */
880 src_align = get_pointer_alignment (src);
881 dest_align = get_pointer_alignment (dest);
882 if (tree_fits_uhwi_p (len)
883 && compare_tree_int (len, MOVE_MAX) <= 0
884 /* ??? Don't transform copies from strings with known length this
885 confuses the tree-ssa-strlen.c. This doesn't handle
886 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
887 reason. */
888 && !c_strlen (src, 2))
890 unsigned ilen = tree_to_uhwi (len);
891 if (exact_log2 (ilen) != -1)
893 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
894 if (type
895 && TYPE_MODE (type) != BLKmode
896 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
897 == ilen * 8)
898 /* If the destination pointer is not aligned we must be able
899 to emit an unaligned store. */
900 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
901 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
903 tree srctype = type;
904 tree desttype = type;
905 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
906 srctype = build_aligned_type (type, src_align);
907 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
908 tree tem = fold_const_aggregate_ref (srcmem);
909 if (tem)
910 srcmem = tem;
911 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
912 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
913 src_align))
914 srcmem = NULL_TREE;
915 if (srcmem)
917 gimple new_stmt;
918 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
920 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
921 if (gimple_in_ssa_p (cfun))
922 srcmem = make_ssa_name (TREE_TYPE (srcmem),
923 new_stmt);
924 else
925 srcmem = create_tmp_reg (TREE_TYPE (srcmem),
926 NULL);
927 gimple_assign_set_lhs (new_stmt, srcmem);
928 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
929 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
931 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
932 desttype = build_aligned_type (type, dest_align);
933 new_stmt
934 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
935 dest, off0),
936 srcmem);
937 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
938 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
939 if (gimple_vdef (new_stmt)
940 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
941 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
942 if (!lhs)
944 gsi_replace (gsi, new_stmt, true);
945 return true;
947 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
948 goto done;
954 if (endp == 3)
956 /* Both DEST and SRC must be pointer types.
957 ??? This is what old code did. Is the testing for pointer types
958 really mandatory?
960 If either SRC is readonly or length is 1, we can use memcpy. */
961 if (!dest_align || !src_align)
962 return false;
963 if (readonly_data_expr (src)
964 || (tree_fits_uhwi_p (len)
965 && (MIN (src_align, dest_align) / BITS_PER_UNIT
966 >= tree_to_uhwi (len))))
968 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
969 if (!fn)
970 return false;
971 gimple_call_set_fndecl (stmt, fn);
972 gimple_call_set_arg (stmt, 0, dest);
973 gimple_call_set_arg (stmt, 1, src);
974 fold_stmt (gsi);
975 return true;
978 /* If *src and *dest can't overlap, optimize into memcpy as well. */
979 if (TREE_CODE (src) == ADDR_EXPR
980 && TREE_CODE (dest) == ADDR_EXPR)
982 tree src_base, dest_base, fn;
983 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
984 HOST_WIDE_INT size = -1;
985 HOST_WIDE_INT maxsize = -1;
987 srcvar = TREE_OPERAND (src, 0);
988 src_base = get_ref_base_and_extent (srcvar, &src_offset,
989 &size, &maxsize);
990 destvar = TREE_OPERAND (dest, 0);
991 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
992 &size, &maxsize);
993 if (tree_fits_uhwi_p (len))
994 maxsize = tree_to_uhwi (len);
995 else
996 maxsize = -1;
997 src_offset /= BITS_PER_UNIT;
998 dest_offset /= BITS_PER_UNIT;
999 if (SSA_VAR_P (src_base)
1000 && SSA_VAR_P (dest_base))
1002 if (operand_equal_p (src_base, dest_base, 0)
1003 && ranges_overlap_p (src_offset, maxsize,
1004 dest_offset, maxsize))
1005 return false;
1007 else if (TREE_CODE (src_base) == MEM_REF
1008 && TREE_CODE (dest_base) == MEM_REF)
1010 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1011 TREE_OPERAND (dest_base, 0), 0))
1012 return false;
1013 offset_int off = mem_ref_offset (src_base) + src_offset;
1014 if (!wi::fits_shwi_p (off))
1015 return false;
1016 src_offset = off.to_shwi ();
1018 off = mem_ref_offset (dest_base) + dest_offset;
1019 if (!wi::fits_shwi_p (off))
1020 return false;
1021 dest_offset = off.to_shwi ();
1022 if (ranges_overlap_p (src_offset, maxsize,
1023 dest_offset, maxsize))
1024 return false;
1026 else
1027 return false;
1029 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1030 if (!fn)
1031 return false;
1032 gimple_call_set_fndecl (stmt, fn);
1033 gimple_call_set_arg (stmt, 0, dest);
1034 gimple_call_set_arg (stmt, 1, src);
1035 fold_stmt (gsi);
1036 return true;
1039 /* If the destination and source do not alias optimize into
1040 memcpy as well. */
1041 if ((is_gimple_min_invariant (dest)
1042 || TREE_CODE (dest) == SSA_NAME)
1043 && (is_gimple_min_invariant (src)
1044 || TREE_CODE (src) == SSA_NAME))
1046 ao_ref destr, srcr;
1047 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1048 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1049 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1051 tree fn;
1052 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1053 if (!fn)
1054 return false;
1055 gimple_call_set_fndecl (stmt, fn);
1056 gimple_call_set_arg (stmt, 0, dest);
1057 gimple_call_set_arg (stmt, 1, src);
1058 fold_stmt (gsi);
1059 return true;
1063 return false;
1066 if (!tree_fits_shwi_p (len))
1067 return false;
1068 /* FIXME:
1069 This logic lose for arguments like (type *)malloc (sizeof (type)),
1070 since we strip the casts of up to VOID return value from malloc.
1071 Perhaps we ought to inherit type from non-VOID argument here? */
1072 STRIP_NOPS (src);
1073 STRIP_NOPS (dest);
1074 if (!POINTER_TYPE_P (TREE_TYPE (src))
1075 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1076 return false;
1077 /* In the following try to find a type that is most natural to be
1078 used for the memcpy source and destination and that allows
1079 the most optimization when memcpy is turned into a plain assignment
1080 using that type. In theory we could always use a char[len] type
1081 but that only gains us that the destination and source possibly
1082 no longer will have their address taken. */
1083 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1084 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1086 tree tem = TREE_OPERAND (src, 0);
1087 STRIP_NOPS (tem);
1088 if (tem != TREE_OPERAND (src, 0))
1089 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1091 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1093 tree tem = TREE_OPERAND (dest, 0);
1094 STRIP_NOPS (tem);
1095 if (tem != TREE_OPERAND (dest, 0))
1096 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1098 srctype = TREE_TYPE (TREE_TYPE (src));
1099 if (TREE_CODE (srctype) == ARRAY_TYPE
1100 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1102 srctype = TREE_TYPE (srctype);
1103 STRIP_NOPS (src);
1104 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1106 desttype = TREE_TYPE (TREE_TYPE (dest));
1107 if (TREE_CODE (desttype) == ARRAY_TYPE
1108 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1110 desttype = TREE_TYPE (desttype);
1111 STRIP_NOPS (dest);
1112 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1114 if (TREE_ADDRESSABLE (srctype)
1115 || TREE_ADDRESSABLE (desttype))
1116 return false;
1118 /* Make sure we are not copying using a floating-point mode or
1119 a type whose size possibly does not match its precision. */
1120 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1121 || TREE_CODE (desttype) == BOOLEAN_TYPE
1122 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1123 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1124 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1125 || TREE_CODE (srctype) == BOOLEAN_TYPE
1126 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1127 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1128 if (!srctype)
1129 srctype = desttype;
1130 if (!desttype)
1131 desttype = srctype;
1132 if (!srctype)
1133 return false;
1135 src_align = get_pointer_alignment (src);
1136 dest_align = get_pointer_alignment (dest);
1137 if (dest_align < TYPE_ALIGN (desttype)
1138 || src_align < TYPE_ALIGN (srctype))
1139 return false;
1141 destvar = dest;
1142 STRIP_NOPS (destvar);
1143 if (TREE_CODE (destvar) == ADDR_EXPR
1144 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1145 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1146 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1147 else
1148 destvar = NULL_TREE;
1150 srcvar = src;
1151 STRIP_NOPS (srcvar);
1152 if (TREE_CODE (srcvar) == ADDR_EXPR
1153 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1154 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1156 if (!destvar
1157 || src_align >= TYPE_ALIGN (desttype))
1158 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1159 srcvar, off0);
1160 else if (!STRICT_ALIGNMENT)
1162 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1163 src_align);
1164 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1166 else
1167 srcvar = NULL_TREE;
1169 else
1170 srcvar = NULL_TREE;
1172 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1173 return false;
1175 if (srcvar == NULL_TREE)
1177 STRIP_NOPS (src);
1178 if (src_align >= TYPE_ALIGN (desttype))
1179 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1180 else
1182 if (STRICT_ALIGNMENT)
1183 return false;
1184 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1185 src_align);
1186 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1189 else if (destvar == NULL_TREE)
1191 STRIP_NOPS (dest);
1192 if (dest_align >= TYPE_ALIGN (srctype))
1193 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1194 else
1196 if (STRICT_ALIGNMENT)
1197 return false;
1198 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1199 dest_align);
1200 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1204 gimple new_stmt;
1205 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1207 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1208 if (gimple_in_ssa_p (cfun))
1209 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1210 else
1211 srcvar = create_tmp_reg (TREE_TYPE (srcvar), NULL);
1212 gimple_assign_set_lhs (new_stmt, srcvar);
1213 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1214 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1216 new_stmt = gimple_build_assign (destvar, srcvar);
1217 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1218 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1219 if (gimple_vdef (new_stmt)
1220 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1221 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1222 if (!lhs)
1224 gsi_replace (gsi, new_stmt, true);
1225 return true;
1227 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1230 done:
1231 if (endp == 0 || endp == 3)
1232 len = NULL_TREE;
1233 else if (endp == 2)
1234 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1235 ssize_int (1));
1236 if (endp == 2 || endp == 1)
1237 dest = fold_build_pointer_plus_loc (loc, dest, len);
1239 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1240 GSI_SAME_STMT);
1241 gimple repl = gimple_build_assign (lhs, dest);
1242 gsi_replace (gsi, repl, true);
1243 return true;
1246 /* Fold function call to builtin memset or bzero at *GSI setting the
1247 memory of size LEN to VAL. Return whether a simplification was made. */
1249 static bool
1250 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1252 gimple stmt = gsi_stmt (*gsi);
1253 tree etype;
1254 unsigned HOST_WIDE_INT length, cval;
1256 /* If the LEN parameter is zero, return DEST. */
1257 if (integer_zerop (len))
1259 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1260 return true;
1263 if (! tree_fits_uhwi_p (len))
1264 return false;
1266 if (TREE_CODE (c) != INTEGER_CST)
1267 return false;
1269 tree dest = gimple_call_arg (stmt, 0);
1270 tree var = dest;
1271 if (TREE_CODE (var) != ADDR_EXPR)
1272 return false;
1274 var = TREE_OPERAND (var, 0);
1275 if (TREE_THIS_VOLATILE (var))
1276 return false;
1278 etype = TREE_TYPE (var);
1279 if (TREE_CODE (etype) == ARRAY_TYPE)
1280 etype = TREE_TYPE (etype);
1282 if (!INTEGRAL_TYPE_P (etype)
1283 && !POINTER_TYPE_P (etype))
1284 return NULL_TREE;
1286 if (! var_decl_component_p (var))
1287 return NULL_TREE;
1289 length = tree_to_uhwi (len);
1290 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1291 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1292 return NULL_TREE;
1294 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1295 return NULL_TREE;
1297 if (integer_zerop (c))
1298 cval = 0;
1299 else
1301 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1302 return NULL_TREE;
1304 cval = TREE_INT_CST_LOW (c);
1305 cval &= 0xff;
1306 cval |= cval << 8;
1307 cval |= cval << 16;
1308 cval |= (cval << 31) << 1;
1311 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1312 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1313 gimple_set_vuse (store, gimple_vuse (stmt));
1314 tree vdef = gimple_vdef (stmt);
1315 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1317 gimple_set_vdef (store, gimple_vdef (stmt));
1318 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1320 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1321 if (gimple_call_lhs (stmt))
1323 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1324 gsi_replace (gsi, asgn, true);
1326 else
1328 gimple_stmt_iterator gsi2 = *gsi;
1329 gsi_prev (gsi);
1330 gsi_remove (&gsi2, true);
1333 return true;
1337 /* Return the string length, maximum string length or maximum value of
1338 ARG in LENGTH.
1339 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1340 is not NULL and, for TYPE == 0, its value is not equal to the length
1341 we determine or if we are unable to determine the length or value,
1342 return false. VISITED is a bitmap of visited variables.
1343 TYPE is 0 if string length should be returned, 1 for maximum string
1344 length and 2 for maximum value ARG can have. */
1346 static bool
1347 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1349 tree var, val;
1350 gimple def_stmt;
1352 if (TREE_CODE (arg) != SSA_NAME)
1354 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1355 if (TREE_CODE (arg) == ADDR_EXPR
1356 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1357 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1359 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1360 if (TREE_CODE (aop0) == INDIRECT_REF
1361 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1362 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1363 length, visited, type);
1366 if (type == 2)
1368 val = arg;
1369 if (TREE_CODE (val) != INTEGER_CST
1370 || tree_int_cst_sgn (val) < 0)
1371 return false;
1373 else
1374 val = c_strlen (arg, 1);
1375 if (!val)
1376 return false;
1378 if (*length)
1380 if (type > 0)
1382 if (TREE_CODE (*length) != INTEGER_CST
1383 || TREE_CODE (val) != INTEGER_CST)
1384 return false;
1386 if (tree_int_cst_lt (*length, val))
1387 *length = val;
1388 return true;
1390 else if (simple_cst_equal (val, *length) != 1)
1391 return false;
1394 *length = val;
1395 return true;
1398 /* If ARG is registered for SSA update we cannot look at its defining
1399 statement. */
1400 if (name_registered_for_update_p (arg))
1401 return false;
1403 /* If we were already here, break the infinite cycle. */
1404 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1405 return true;
1407 var = arg;
1408 def_stmt = SSA_NAME_DEF_STMT (var);
1410 switch (gimple_code (def_stmt))
1412 case GIMPLE_ASSIGN:
1413 /* The RHS of the statement defining VAR must either have a
1414 constant length or come from another SSA_NAME with a constant
1415 length. */
1416 if (gimple_assign_single_p (def_stmt)
1417 || gimple_assign_unary_nop_p (def_stmt))
1419 tree rhs = gimple_assign_rhs1 (def_stmt);
1420 return get_maxval_strlen (rhs, length, visited, type);
1422 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1424 tree op2 = gimple_assign_rhs2 (def_stmt);
1425 tree op3 = gimple_assign_rhs3 (def_stmt);
1426 return get_maxval_strlen (op2, length, visited, type)
1427 && get_maxval_strlen (op3, length, visited, type);
1429 return false;
1431 case GIMPLE_PHI:
1433 /* All the arguments of the PHI node must have the same constant
1434 length. */
1435 unsigned i;
1437 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1439 tree arg = gimple_phi_arg (def_stmt, i)->def;
1441 /* If this PHI has itself as an argument, we cannot
1442 determine the string length of this argument. However,
1443 if we can find a constant string length for the other
1444 PHI args then we can still be sure that this is a
1445 constant string length. So be optimistic and just
1446 continue with the next argument. */
1447 if (arg == gimple_phi_result (def_stmt))
1448 continue;
1450 if (!get_maxval_strlen (arg, length, visited, type))
1451 return false;
1454 return true;
1456 default:
1457 return false;
1462 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1463 If LEN is not NULL, it represents the length of the string to be
1464 copied. Return NULL_TREE if no simplification can be made. */
1466 static bool
1467 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1468 location_t loc, tree dest, tree src, tree len)
1470 tree fn;
1472 /* If SRC and DEST are the same (and not volatile), return DEST. */
1473 if (operand_equal_p (src, dest, 0))
1475 replace_call_with_value (gsi, dest);
1476 return true;
1479 if (optimize_function_for_size_p (cfun))
1480 return false;
1482 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1483 if (!fn)
1484 return false;
1486 if (!len)
1488 len = c_strlen (src, 1);
1489 if (! len || TREE_SIDE_EFFECTS (len))
1490 return NULL_TREE;
1493 len = fold_convert_loc (loc, size_type_node, len);
1494 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1495 len = force_gimple_operand_gsi (gsi, len, true,
1496 NULL_TREE, true, GSI_SAME_STMT);
1497 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1498 replace_call_with_call_and_fold (gsi, repl);
1499 return true;
1502 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1503 If SLEN is not NULL, it represents the length of the source string.
1504 Return NULL_TREE if no simplification can be made. */
1506 static bool
1507 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi, location_t loc,
1508 tree dest, tree src, tree len, tree slen)
1510 tree fn;
1512 /* If the LEN parameter is zero, return DEST. */
1513 if (integer_zerop (len))
1515 replace_call_with_value (gsi, dest);
1516 return true;
1519 /* We can't compare slen with len as constants below if len is not a
1520 constant. */
1521 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
1522 return false;
1524 if (!slen)
1525 slen = c_strlen (src, 1);
1527 /* Now, we must be passed a constant src ptr parameter. */
1528 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
1529 return false;
1531 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1533 /* We do not support simplification of this case, though we do
1534 support it when expanding trees into RTL. */
1535 /* FIXME: generate a call to __builtin_memset. */
1536 if (tree_int_cst_lt (slen, len))
1537 return false;
1539 /* OK transform into builtin memcpy. */
1540 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1541 if (!fn)
1542 return false;
1544 len = fold_convert_loc (loc, size_type_node, len);
1545 len = force_gimple_operand_gsi (gsi, len, true,
1546 NULL_TREE, true, GSI_SAME_STMT);
1547 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1548 replace_call_with_call_and_fold (gsi, repl);
1549 return true;
1552 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1553 to the call.
1555 Return NULL_TREE if no simplification was possible, otherwise return the
1556 simplified form of the call as a tree.
1558 The simplified form may be a constant or other expression which
1559 computes the same value, but in a more efficient manner (including
1560 calls to other builtin functions).
1562 The call may contain arguments which need to be evaluated, but
1563 which are not useful to determine the result of the call. In
1564 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1565 COMPOUND_EXPR will be an argument which must be evaluated.
1566 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1567 COMPOUND_EXPR in the chain will contain the tree for the simplified
1568 form of the builtin function call. */
1570 static bool
1571 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi,
1572 location_t loc ATTRIBUTE_UNUSED, tree dst, tree src,
1573 tree len)
1575 gimple stmt = gsi_stmt (*gsi);
1577 const char *p = c_getstr (src);
1579 /* If the string length is zero, return the dst parameter. */
1580 if (p && *p == '\0')
1582 replace_call_with_value (gsi, dst);
1583 return true;
1586 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1587 return false;
1589 /* See if we can store by pieces into (dst + strlen(dst)). */
1590 tree newdst;
1591 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1592 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1594 if (!strlen_fn || !memcpy_fn)
1595 return false;
1597 /* If the length of the source string isn't computable don't
1598 split strcat into strlen and memcpy. */
1599 if (! len)
1600 len = c_strlen (src, 1);
1601 if (! len || TREE_SIDE_EFFECTS (len))
1602 return false;
1604 /* Create strlen (dst). */
1605 gimple_seq stmts = NULL, stmts2;
1606 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1607 gimple_set_location (repl, loc);
1608 if (gimple_in_ssa_p (cfun))
1609 newdst = make_ssa_name (size_type_node, NULL);
1610 else
1611 newdst = create_tmp_reg (size_type_node, NULL);
1612 gimple_call_set_lhs (repl, newdst);
1613 gimple_seq_add_stmt_without_update (&stmts, repl);
1615 /* Create (dst p+ strlen (dst)). */
1616 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1617 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1618 gimple_seq_add_seq_without_update (&stmts, stmts2);
1620 len = fold_convert_loc (loc, size_type_node, len);
1621 len = size_binop_loc (loc, PLUS_EXPR, len,
1622 build_int_cst (size_type_node, 1));
1623 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1624 gimple_seq_add_seq_without_update (&stmts, stmts2);
1626 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1627 gimple_seq_add_stmt_without_update (&stmts, repl);
1628 if (gimple_call_lhs (stmt))
1630 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1631 gimple_seq_add_stmt_without_update (&stmts, repl);
1632 gsi_replace_with_seq_vops (gsi, stmts);
1633 /* gsi now points at the assignment to the lhs, get a
1634 stmt iterator to the memcpy call.
1635 ??? We can't use gsi_for_stmt as that doesn't work when the
1636 CFG isn't built yet. */
1637 gimple_stmt_iterator gsi2 = *gsi;
1638 gsi_prev (&gsi2);
1639 fold_stmt (&gsi2);
1641 else
1643 gsi_replace_with_seq_vops (gsi, stmts);
1644 fold_stmt (gsi);
1646 return true;
1649 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1650 to the call. IGNORE is true if the value returned
1651 by the builtin will be ignored. UNLOCKED is true is true if this
1652 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1653 the known length of the string. Return NULL_TREE if no simplification
1654 was possible. */
1656 static bool
1657 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1658 location_t loc ATTRIBUTE_UNUSED,
1659 tree arg0, tree arg1,
1660 bool ignore, bool unlocked, tree len)
1662 /* If we're using an unlocked function, assume the other unlocked
1663 functions exist explicitly. */
1664 tree const fn_fputc = (unlocked
1665 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1666 : builtin_decl_implicit (BUILT_IN_FPUTC));
1667 tree const fn_fwrite = (unlocked
1668 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1669 : builtin_decl_implicit (BUILT_IN_FWRITE));
1671 /* If the return value is used, don't do the transformation. */
1672 if (!ignore)
1673 return false;
1675 if (! len)
1676 len = c_strlen (arg0, 0);
1678 /* Get the length of the string passed to fputs. If the length
1679 can't be determined, punt. */
1680 if (!len
1681 || TREE_CODE (len) != INTEGER_CST)
1682 return false;
1684 switch (compare_tree_int (len, 1))
1686 case -1: /* length is 0, delete the call entirely . */
1687 replace_call_with_value (gsi, integer_zero_node);
1688 return true;
1690 case 0: /* length is 1, call fputc. */
1692 const char *p = c_getstr (arg0);
1693 if (p != NULL)
1695 if (!fn_fputc)
1696 return false;
1698 gimple repl = gimple_build_call (fn_fputc, 2,
1699 build_int_cst
1700 (integer_type_node, p[0]), arg1);
1701 replace_call_with_call_and_fold (gsi, repl);
1702 return true;
1705 /* FALLTHROUGH */
1706 case 1: /* length is greater than 1, call fwrite. */
1708 /* If optimizing for size keep fputs. */
1709 if (optimize_function_for_size_p (cfun))
1710 return false;
1711 /* New argument list transforming fputs(string, stream) to
1712 fwrite(string, 1, len, stream). */
1713 if (!fn_fwrite)
1714 return false;
1716 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1717 size_one_node, len, arg1);
1718 replace_call_with_call_and_fold (gsi, repl);
1719 return true;
1721 default:
1722 gcc_unreachable ();
1724 return false;
1727 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1728 DEST, SRC, LEN, and SIZE are the arguments to the call.
1729 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1730 code of the builtin. If MAXLEN is not NULL, it is maximum length
1731 passed as third argument. */
1733 static bool
1734 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1735 location_t loc,
1736 tree dest, tree src, tree len, tree size,
1737 tree maxlen, bool ignore,
1738 enum built_in_function fcode)
1740 tree fn;
1742 /* If SRC and DEST are the same (and not volatile), return DEST
1743 (resp. DEST+LEN for __mempcpy_chk). */
1744 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1746 if (fcode != BUILT_IN_MEMPCPY_CHK)
1748 replace_call_with_value (gsi, dest);
1749 return true;
1751 else
1753 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1754 temp = force_gimple_operand_gsi (gsi, temp,
1755 false, NULL_TREE, true,
1756 GSI_SAME_STMT);
1757 replace_call_with_value (gsi, temp);
1758 return true;
1762 if (! tree_fits_uhwi_p (size))
1763 return false;
1765 if (! integer_all_onesp (size))
1767 if (! tree_fits_uhwi_p (len))
1769 /* If LEN is not constant, try MAXLEN too.
1770 For MAXLEN only allow optimizing into non-_ocs function
1771 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1772 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1774 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1776 /* (void) __mempcpy_chk () can be optimized into
1777 (void) __memcpy_chk (). */
1778 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1779 if (!fn)
1780 return false;
1782 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1783 replace_call_with_call_and_fold (gsi, repl);
1784 return true;
1786 return false;
1789 else
1790 maxlen = len;
1792 if (tree_int_cst_lt (size, maxlen))
1793 return false;
1796 fn = NULL_TREE;
1797 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1798 mem{cpy,pcpy,move,set} is available. */
1799 switch (fcode)
1801 case BUILT_IN_MEMCPY_CHK:
1802 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1803 break;
1804 case BUILT_IN_MEMPCPY_CHK:
1805 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1806 break;
1807 case BUILT_IN_MEMMOVE_CHK:
1808 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1809 break;
1810 case BUILT_IN_MEMSET_CHK:
1811 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1812 break;
1813 default:
1814 break;
1817 if (!fn)
1818 return false;
1820 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1821 replace_call_with_call_and_fold (gsi, repl);
1822 return true;
1825 /* Fold a call to the __st[rp]cpy_chk builtin.
1826 DEST, SRC, and SIZE are the arguments to the call.
1827 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1828 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1829 strings passed as second argument. */
1831 static bool
1832 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1833 location_t loc, tree dest,
1834 tree src, tree size,
1835 tree maxlen, bool ignore,
1836 enum built_in_function fcode)
1838 tree len, fn;
1840 /* If SRC and DEST are the same (and not volatile), return DEST. */
1841 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1843 replace_call_with_value (gsi, dest);
1844 return true;
1847 if (! tree_fits_uhwi_p (size))
1848 return false;
1850 if (! integer_all_onesp (size))
1852 len = c_strlen (src, 1);
1853 if (! len || ! tree_fits_uhwi_p (len))
1855 /* If LEN is not constant, try MAXLEN too.
1856 For MAXLEN only allow optimizing into non-_ocs function
1857 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1858 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1860 if (fcode == BUILT_IN_STPCPY_CHK)
1862 if (! ignore)
1863 return false;
1865 /* If return value of __stpcpy_chk is ignored,
1866 optimize into __strcpy_chk. */
1867 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1868 if (!fn)
1869 return false;
1871 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1872 replace_call_with_call_and_fold (gsi, repl);
1873 return true;
1876 if (! len || TREE_SIDE_EFFECTS (len))
1877 return false;
1879 /* If c_strlen returned something, but not a constant,
1880 transform __strcpy_chk into __memcpy_chk. */
1881 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1882 if (!fn)
1883 return false;
1885 len = fold_convert_loc (loc, size_type_node, len);
1886 len = size_binop_loc (loc, PLUS_EXPR, len,
1887 build_int_cst (size_type_node, 1));
1888 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1889 true, GSI_SAME_STMT);
1890 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1891 replace_call_with_call_and_fold (gsi, repl);
1892 return true;
1895 else
1896 maxlen = len;
1898 if (! tree_int_cst_lt (maxlen, size))
1899 return false;
1902 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1903 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1904 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1905 if (!fn)
1906 return false;
1908 gimple repl = gimple_build_call (fn, 2, dest, src);
1909 replace_call_with_call_and_fold (gsi, repl);
1910 return true;
1913 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1914 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1915 length passed as third argument. IGNORE is true if return value can be
1916 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1918 static bool
1919 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1920 tree dest, tree src,
1921 tree len, tree size, tree maxlen, bool ignore,
1922 enum built_in_function fcode)
1924 tree fn;
1926 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1928 /* If return value of __stpncpy_chk is ignored,
1929 optimize into __strncpy_chk. */
1930 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1931 if (fn)
1933 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1934 replace_call_with_call_and_fold (gsi, repl);
1935 return true;
1939 if (! tree_fits_uhwi_p (size))
1940 return false;
1942 if (! integer_all_onesp (size))
1944 if (! tree_fits_uhwi_p (len))
1946 /* If LEN is not constant, try MAXLEN too.
1947 For MAXLEN only allow optimizing into non-_ocs function
1948 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1949 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1950 return false;
1952 else
1953 maxlen = len;
1955 if (tree_int_cst_lt (size, maxlen))
1956 return false;
1959 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1960 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1961 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1962 if (!fn)
1963 return false;
1965 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1966 replace_call_with_call_and_fold (gsi, repl);
1967 return true;
1970 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1971 NULL_TREE if a normal call should be emitted rather than expanding
1972 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1973 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1974 passed as second argument. */
1976 static bool
1977 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
1978 tree maxlen, enum built_in_function fcode)
1980 gimple stmt = gsi_stmt (*gsi);
1981 tree dest, size, len, fn, fmt, flag;
1982 const char *fmt_str;
1984 /* Verify the required arguments in the original call. */
1985 if (gimple_call_num_args (stmt) < 5)
1986 return false;
1988 dest = gimple_call_arg (stmt, 0);
1989 len = gimple_call_arg (stmt, 1);
1990 flag = gimple_call_arg (stmt, 2);
1991 size = gimple_call_arg (stmt, 3);
1992 fmt = gimple_call_arg (stmt, 4);
1994 if (! tree_fits_uhwi_p (size))
1995 return false;
1997 if (! integer_all_onesp (size))
1999 if (! tree_fits_uhwi_p (len))
2001 /* If LEN is not constant, try MAXLEN too.
2002 For MAXLEN only allow optimizing into non-_ocs function
2003 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2004 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2005 return false;
2007 else
2008 maxlen = len;
2010 if (tree_int_cst_lt (size, maxlen))
2011 return false;
2014 if (!init_target_chars ())
2015 return false;
2017 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2018 or if format doesn't contain % chars or is "%s". */
2019 if (! integer_zerop (flag))
2021 fmt_str = c_getstr (fmt);
2022 if (fmt_str == NULL)
2023 return false;
2024 if (strchr (fmt_str, target_percent) != NULL
2025 && strcmp (fmt_str, target_percent_s))
2026 return false;
2029 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2030 available. */
2031 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2032 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2033 if (!fn)
2034 return false;
2036 /* Replace the called function and the first 5 argument by 3 retaining
2037 trailing varargs. */
2038 gimple_call_set_fndecl (stmt, fn);
2039 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2040 gimple_call_set_arg (stmt, 0, dest);
2041 gimple_call_set_arg (stmt, 1, len);
2042 gimple_call_set_arg (stmt, 2, fmt);
2043 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2044 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2045 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2046 fold_stmt (gsi);
2047 return true;
2050 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2051 Return NULL_TREE if a normal call should be emitted rather than
2052 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2053 or BUILT_IN_VSPRINTF_CHK. */
2055 static bool
2056 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2057 enum built_in_function fcode)
2059 gimple stmt = gsi_stmt (*gsi);
2060 tree dest, size, len, fn, fmt, flag;
2061 const char *fmt_str;
2062 unsigned nargs = gimple_call_num_args (stmt);
2064 /* Verify the required arguments in the original call. */
2065 if (nargs < 4)
2066 return false;
2067 dest = gimple_call_arg (stmt, 0);
2068 flag = gimple_call_arg (stmt, 1);
2069 size = gimple_call_arg (stmt, 2);
2070 fmt = gimple_call_arg (stmt, 3);
2072 if (! tree_fits_uhwi_p (size))
2073 return false;
2075 len = NULL_TREE;
2077 if (!init_target_chars ())
2078 return false;
2080 /* Check whether the format is a literal string constant. */
2081 fmt_str = c_getstr (fmt);
2082 if (fmt_str != NULL)
2084 /* If the format doesn't contain % args or %%, we know the size. */
2085 if (strchr (fmt_str, target_percent) == 0)
2087 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2088 len = build_int_cstu (size_type_node, strlen (fmt_str));
2090 /* If the format is "%s" and first ... argument is a string literal,
2091 we know the size too. */
2092 else if (fcode == BUILT_IN_SPRINTF_CHK
2093 && strcmp (fmt_str, target_percent_s) == 0)
2095 tree arg;
2097 if (nargs == 5)
2099 arg = gimple_call_arg (stmt, 4);
2100 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2102 len = c_strlen (arg, 1);
2103 if (! len || ! tree_fits_uhwi_p (len))
2104 len = NULL_TREE;
2110 if (! integer_all_onesp (size))
2112 if (! len || ! tree_int_cst_lt (len, size))
2113 return false;
2116 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2117 or if format doesn't contain % chars or is "%s". */
2118 if (! integer_zerop (flag))
2120 if (fmt_str == NULL)
2121 return false;
2122 if (strchr (fmt_str, target_percent) != NULL
2123 && strcmp (fmt_str, target_percent_s))
2124 return false;
2127 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2128 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2129 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2130 if (!fn)
2131 return false;
2133 /* Replace the called function and the first 4 argument by 2 retaining
2134 trailing varargs. */
2135 gimple_call_set_fndecl (stmt, fn);
2136 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2137 gimple_call_set_arg (stmt, 0, dest);
2138 gimple_call_set_arg (stmt, 1, fmt);
2139 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2140 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2141 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2142 fold_stmt (gsi);
2143 return true;
2146 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2147 ORIG may be null if this is a 2-argument call. We don't attempt to
2148 simplify calls with more than 3 arguments.
2150 Return NULL_TREE if no simplification was possible, otherwise return the
2151 simplified form of the call as a tree. If IGNORED is true, it means that
2152 the caller does not use the returned value of the function. */
2154 static bool
2155 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2157 gimple stmt = gsi_stmt (*gsi);
2158 tree dest = gimple_call_arg (stmt, 0);
2159 tree fmt = gimple_call_arg (stmt, 1);
2160 tree orig = NULL_TREE;
2161 const char *fmt_str = NULL;
2163 /* Verify the required arguments in the original call. We deal with two
2164 types of sprintf() calls: 'sprintf (str, fmt)' and
2165 'sprintf (dest, "%s", orig)'. */
2166 if (gimple_call_num_args (stmt) > 3)
2167 return false;
2169 if (gimple_call_num_args (stmt) == 3)
2170 orig = gimple_call_arg (stmt, 2);
2172 /* Check whether the format is a literal string constant. */
2173 fmt_str = c_getstr (fmt);
2174 if (fmt_str == NULL)
2175 return false;
2177 if (!init_target_chars ())
2178 return false;
2180 /* If the format doesn't contain % args or %%, use strcpy. */
2181 if (strchr (fmt_str, target_percent) == NULL)
2183 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2185 if (!fn)
2186 return false;
2188 /* Don't optimize sprintf (buf, "abc", ptr++). */
2189 if (orig)
2190 return false;
2192 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2193 'format' is known to contain no % formats. */
2194 gimple_seq stmts = NULL;
2195 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2196 gimple_seq_add_stmt_without_update (&stmts, repl);
2197 if (gimple_call_lhs (stmt))
2199 repl = gimple_build_assign (gimple_call_lhs (stmt),
2200 build_int_cst (integer_type_node,
2201 strlen (fmt_str)));
2202 gimple_seq_add_stmt_without_update (&stmts, repl);
2203 gsi_replace_with_seq_vops (gsi, stmts);
2204 /* gsi now points at the assignment to the lhs, get a
2205 stmt iterator to the memcpy call.
2206 ??? We can't use gsi_for_stmt as that doesn't work when the
2207 CFG isn't built yet. */
2208 gimple_stmt_iterator gsi2 = *gsi;
2209 gsi_prev (&gsi2);
2210 fold_stmt (&gsi2);
2212 else
2214 gsi_replace_with_seq_vops (gsi, stmts);
2215 fold_stmt (gsi);
2217 return true;
2220 /* If the format is "%s", use strcpy if the result isn't used. */
2221 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2223 tree fn;
2224 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2226 if (!fn)
2227 return false;
2229 /* Don't crash on sprintf (str1, "%s"). */
2230 if (!orig)
2231 return false;
2233 tree len = NULL_TREE;
2234 if (gimple_call_lhs (stmt))
2236 len = c_strlen (orig, 1);
2237 if (!len)
2238 return false;
2241 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2242 gimple_seq stmts = NULL;
2243 gimple repl = gimple_build_call (fn, 2, dest, orig);
2244 gimple_seq_add_stmt_without_update (&stmts, repl);
2245 if (gimple_call_lhs (stmt))
2247 if (!useless_type_conversion_p (integer_type_node, TREE_TYPE (len)))
2248 len = fold_convert (integer_type_node, len);
2249 repl = gimple_build_assign (gimple_call_lhs (stmt), len);
2250 gimple_seq_add_stmt_without_update (&stmts, repl);
2251 gsi_replace_with_seq_vops (gsi, stmts);
2252 /* gsi now points at the assignment to the lhs, get a
2253 stmt iterator to the memcpy call.
2254 ??? We can't use gsi_for_stmt as that doesn't work when the
2255 CFG isn't built yet. */
2256 gimple_stmt_iterator gsi2 = *gsi;
2257 gsi_prev (&gsi2);
2258 fold_stmt (&gsi2);
2260 else
2262 gsi_replace_with_seq_vops (gsi, stmts);
2263 fold_stmt (gsi);
2265 return true;
2267 return false;
2273 /* Fold a call to __builtin_strlen with known length LEN. */
2275 static bool
2276 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi, tree len)
2278 if (!len)
2280 gimple stmt = gsi_stmt (*gsi);
2281 len = c_strlen (gimple_call_arg (stmt, 0), 0);
2283 if (!len)
2284 return false;
2285 replace_call_with_value (gsi, len);
2286 return true;
2290 /* Fold builtins at *GSI with knowledge about a length argument. */
2292 static bool
2293 gimple_fold_builtin_with_strlen (gimple_stmt_iterator *gsi)
2295 gimple stmt = gsi_stmt (*gsi);
2296 tree val[3];
2297 tree a;
2298 int arg_idx, type;
2299 bitmap visited;
2300 bool ignore;
2301 location_t loc = gimple_location (stmt);
2303 ignore = (gimple_call_lhs (stmt) == NULL);
2305 /* Limit the work only for builtins we know how to simplify. */
2306 tree callee = gimple_call_fndecl (stmt);
2307 switch (DECL_FUNCTION_CODE (callee))
2309 case BUILT_IN_STRLEN:
2310 case BUILT_IN_FPUTS:
2311 case BUILT_IN_FPUTS_UNLOCKED:
2312 arg_idx = 0;
2313 type = 0;
2314 break;
2315 case BUILT_IN_STRCPY:
2316 case BUILT_IN_STRNCPY:
2317 case BUILT_IN_STRCAT:
2318 arg_idx = 1;
2319 type = 0;
2320 break;
2321 case BUILT_IN_MEMCPY_CHK:
2322 case BUILT_IN_MEMPCPY_CHK:
2323 case BUILT_IN_MEMMOVE_CHK:
2324 case BUILT_IN_MEMSET_CHK:
2325 case BUILT_IN_STRNCPY_CHK:
2326 case BUILT_IN_STPNCPY_CHK:
2327 arg_idx = 2;
2328 type = 2;
2329 break;
2330 case BUILT_IN_STRCPY_CHK:
2331 case BUILT_IN_STPCPY_CHK:
2332 arg_idx = 1;
2333 type = 1;
2334 break;
2335 case BUILT_IN_SNPRINTF_CHK:
2336 case BUILT_IN_VSNPRINTF_CHK:
2337 arg_idx = 1;
2338 type = 2;
2339 break;
2340 default:
2341 return false;
2344 int nargs = gimple_call_num_args (stmt);
2345 if (arg_idx >= nargs)
2346 return false;
2348 /* Try to use the dataflow information gathered by the CCP process. */
2349 visited = BITMAP_ALLOC (NULL);
2350 bitmap_clear (visited);
2352 memset (val, 0, sizeof (val));
2353 a = gimple_call_arg (stmt, arg_idx);
2354 if (!get_maxval_strlen (a, &val[arg_idx], visited, type)
2355 || !is_gimple_val (val[arg_idx]))
2356 val[arg_idx] = NULL_TREE;
2358 BITMAP_FREE (visited);
2360 switch (DECL_FUNCTION_CODE (callee))
2362 case BUILT_IN_STRLEN:
2363 return gimple_fold_builtin_strlen (gsi, val[0]);
2365 case BUILT_IN_STRCPY:
2366 return gimple_fold_builtin_strcpy (gsi, loc,
2367 gimple_call_arg (stmt, 0),
2368 gimple_call_arg (stmt, 1),
2369 val[1]);
2371 case BUILT_IN_STRNCPY:
2372 return gimple_fold_builtin_strncpy (gsi, loc,
2373 gimple_call_arg (stmt, 0),
2374 gimple_call_arg (stmt, 1),
2375 gimple_call_arg (stmt, 2),
2376 val[1]);
2378 case BUILT_IN_STRCAT:
2379 return gimple_fold_builtin_strcat (gsi, loc, gimple_call_arg (stmt, 0),
2380 gimple_call_arg (stmt, 1),
2381 val[1]);
2383 case BUILT_IN_FPUTS:
2384 return gimple_fold_builtin_fputs (gsi, loc, gimple_call_arg (stmt, 0),
2385 gimple_call_arg (stmt, 1),
2386 ignore, false, val[0]);
2388 case BUILT_IN_FPUTS_UNLOCKED:
2389 return gimple_fold_builtin_fputs (gsi, loc, gimple_call_arg (stmt, 0),
2390 gimple_call_arg (stmt, 1),
2391 ignore, true, val[0]);
2393 case BUILT_IN_MEMCPY_CHK:
2394 case BUILT_IN_MEMPCPY_CHK:
2395 case BUILT_IN_MEMMOVE_CHK:
2396 case BUILT_IN_MEMSET_CHK:
2397 return gimple_fold_builtin_memory_chk (gsi, loc,
2398 gimple_call_arg (stmt, 0),
2399 gimple_call_arg (stmt, 1),
2400 gimple_call_arg (stmt, 2),
2401 gimple_call_arg (stmt, 3),
2402 val[2], ignore,
2403 DECL_FUNCTION_CODE (callee));
2405 case BUILT_IN_STRCPY_CHK:
2406 case BUILT_IN_STPCPY_CHK:
2407 return gimple_fold_builtin_stxcpy_chk (gsi, loc,
2408 gimple_call_arg (stmt, 0),
2409 gimple_call_arg (stmt, 1),
2410 gimple_call_arg (stmt, 2),
2411 val[1], ignore,
2412 DECL_FUNCTION_CODE (callee));
2414 case BUILT_IN_STRNCPY_CHK:
2415 case BUILT_IN_STPNCPY_CHK:
2416 return gimple_fold_builtin_stxncpy_chk (gsi,
2417 gimple_call_arg (stmt, 0),
2418 gimple_call_arg (stmt, 1),
2419 gimple_call_arg (stmt, 2),
2420 gimple_call_arg (stmt, 3),
2421 val[2], ignore,
2422 DECL_FUNCTION_CODE (callee));
2424 case BUILT_IN_SNPRINTF_CHK:
2425 case BUILT_IN_VSNPRINTF_CHK:
2426 return gimple_fold_builtin_snprintf_chk (gsi, val[1],
2427 DECL_FUNCTION_CODE (callee));
2429 default:
2430 gcc_unreachable ();
2433 return false;
2437 /* Fold the non-target builtin at *GSI and return whether any simplification
2438 was made. */
2440 static bool
2441 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2443 gimple stmt = gsi_stmt (*gsi);
2444 tree callee = gimple_call_fndecl (stmt);
2446 /* Give up for always_inline inline builtins until they are
2447 inlined. */
2448 if (avoid_folding_inline_builtin (callee))
2449 return false;
2451 if (gimple_fold_builtin_with_strlen (gsi))
2452 return true;
2454 switch (DECL_FUNCTION_CODE (callee))
2456 case BUILT_IN_BZERO:
2457 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2458 gimple_call_arg (stmt, 1));
2459 case BUILT_IN_MEMSET:
2460 return gimple_fold_builtin_memset (gsi,
2461 gimple_call_arg (stmt, 1),
2462 gimple_call_arg (stmt, 2));
2463 case BUILT_IN_BCOPY:
2464 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2465 gimple_call_arg (stmt, 0), 3);
2466 case BUILT_IN_MEMCPY:
2467 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2468 gimple_call_arg (stmt, 1), 0);
2469 case BUILT_IN_MEMPCPY:
2470 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2471 gimple_call_arg (stmt, 1), 1);
2472 case BUILT_IN_MEMMOVE:
2473 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2474 gimple_call_arg (stmt, 1), 3);
2475 case BUILT_IN_SPRINTF_CHK:
2476 case BUILT_IN_VSPRINTF_CHK:
2477 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2478 case BUILT_IN_SPRINTF:
2479 return gimple_fold_builtin_sprintf (gsi);
2480 default:;
2483 /* Try the generic builtin folder. */
2484 bool ignore = (gimple_call_lhs (stmt) == NULL);
2485 tree result = fold_call_stmt (stmt, ignore);
2486 if (result)
2488 if (ignore)
2489 STRIP_NOPS (result);
2490 else
2491 result = fold_convert (gimple_call_return_type (stmt), result);
2492 if (!update_call_from_tree (gsi, result))
2493 gimplify_and_update_call_from_tree (gsi, result);
2494 return true;
2497 return false;
2500 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2501 The statement may be replaced by another statement, e.g., if the call
2502 simplifies to a constant value. Return true if any changes were made.
2503 It is assumed that the operands have been previously folded. */
2505 static bool
2506 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2508 gimple stmt = gsi_stmt (*gsi);
2509 tree callee;
2510 bool changed = false;
2511 unsigned i;
2513 /* Fold *& in call arguments. */
2514 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2515 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2517 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2518 if (tmp)
2520 gimple_call_set_arg (stmt, i, tmp);
2521 changed = true;
2525 /* Check for virtual calls that became direct calls. */
2526 callee = gimple_call_fn (stmt);
2527 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2529 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2531 if (dump_file && virtual_method_call_p (callee)
2532 && !possible_polymorphic_call_target_p
2533 (callee, cgraph_node::get (gimple_call_addr_fndecl
2534 (OBJ_TYPE_REF_EXPR (callee)))))
2536 fprintf (dump_file,
2537 "Type inheritance inconsistent devirtualization of ");
2538 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2539 fprintf (dump_file, " to ");
2540 print_generic_expr (dump_file, callee, TDF_SLIM);
2541 fprintf (dump_file, "\n");
2544 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
2545 changed = true;
2547 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
2549 bool final;
2550 vec <cgraph_node *>targets
2551 = possible_polymorphic_call_targets (callee, stmt, &final);
2552 if (final && targets.length () <= 1 && dbg_cnt (devirt))
2554 tree lhs = gimple_call_lhs (stmt);
2555 if (dump_enabled_p ())
2557 location_t loc = gimple_location_safe (stmt);
2558 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2559 "folding virtual function call to %s\n",
2560 targets.length () == 1
2561 ? targets[0]->name ()
2562 : "__builtin_unreachable");
2564 if (targets.length () == 1)
2566 gimple_call_set_fndecl (stmt, targets[0]->decl);
2567 changed = true;
2568 /* If the call becomes noreturn, remove the lhs. */
2569 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2571 if (TREE_CODE (lhs) == SSA_NAME)
2573 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2574 tree def = get_or_create_ssa_default_def (cfun, var);
2575 gimple new_stmt = gimple_build_assign (lhs, def);
2576 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2578 gimple_call_set_lhs (stmt, NULL_TREE);
2581 else
2583 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2584 gimple new_stmt = gimple_build_call (fndecl, 0);
2585 gimple_set_location (new_stmt, gimple_location (stmt));
2586 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2588 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2589 tree def = get_or_create_ssa_default_def (cfun, var);
2591 /* To satisfy condition for
2592 cgraph_update_edges_for_call_stmt_node,
2593 we need to preserve GIMPLE_CALL statement
2594 at position of GSI iterator. */
2595 update_call_from_tree (gsi, def);
2596 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
2598 else
2599 gsi_replace (gsi, new_stmt, true);
2600 return true;
2606 if (inplace)
2607 return changed;
2609 /* Check for builtins that CCP can handle using information not
2610 available in the generic fold routines. */
2611 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2613 if (gimple_fold_builtin (gsi))
2614 changed = true;
2616 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
2618 changed |= targetm.gimple_fold_builtin (gsi);
2620 else if (gimple_call_internal_p (stmt))
2622 enum tree_code subcode = ERROR_MARK;
2623 tree result = NULL_TREE;
2624 switch (gimple_call_internal_fn (stmt))
2626 case IFN_BUILTIN_EXPECT:
2627 result = fold_builtin_expect (gimple_location (stmt),
2628 gimple_call_arg (stmt, 0),
2629 gimple_call_arg (stmt, 1),
2630 gimple_call_arg (stmt, 2));
2631 break;
2632 case IFN_UBSAN_CHECK_ADD:
2633 subcode = PLUS_EXPR;
2634 break;
2635 case IFN_UBSAN_CHECK_SUB:
2636 subcode = MINUS_EXPR;
2637 break;
2638 case IFN_UBSAN_CHECK_MUL:
2639 subcode = MULT_EXPR;
2640 break;
2641 default:
2642 break;
2644 if (subcode != ERROR_MARK)
2646 tree arg0 = gimple_call_arg (stmt, 0);
2647 tree arg1 = gimple_call_arg (stmt, 1);
2648 /* x = y + 0; x = y - 0; x = y * 0; */
2649 if (integer_zerop (arg1))
2650 result = subcode == MULT_EXPR
2651 ? build_zero_cst (TREE_TYPE (arg0))
2652 : arg0;
2653 /* x = 0 + y; x = 0 * y; */
2654 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
2655 result = subcode == MULT_EXPR
2656 ? build_zero_cst (TREE_TYPE (arg0))
2657 : arg1;
2658 /* x = y - y; */
2659 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
2660 result = build_zero_cst (TREE_TYPE (arg0));
2661 /* x = y * 1; x = 1 * y; */
2662 else if (subcode == MULT_EXPR)
2664 if (integer_onep (arg1))
2665 result = arg0;
2666 else if (integer_onep (arg0))
2667 result = arg1;
2670 if (result)
2672 if (!update_call_from_tree (gsi, result))
2673 gimplify_and_update_call_from_tree (gsi, result);
2674 changed = true;
2678 return changed;
2681 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2682 distinguishes both cases. */
2684 static bool
2685 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
2687 bool changed = false;
2688 gimple stmt = gsi_stmt (*gsi);
2689 unsigned i;
2691 /* Fold the main computation performed by the statement. */
2692 switch (gimple_code (stmt))
2694 case GIMPLE_ASSIGN:
2696 unsigned old_num_ops = gimple_num_ops (stmt);
2697 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2698 tree lhs = gimple_assign_lhs (stmt);
2699 tree new_rhs;
2700 /* First canonicalize operand order. This avoids building new
2701 trees if this is the only thing fold would later do. */
2702 if ((commutative_tree_code (subcode)
2703 || commutative_ternary_tree_code (subcode))
2704 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2705 gimple_assign_rhs2 (stmt), false))
2707 tree tem = gimple_assign_rhs1 (stmt);
2708 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
2709 gimple_assign_set_rhs2 (stmt, tem);
2710 changed = true;
2712 new_rhs = fold_gimple_assign (gsi);
2713 if (new_rhs
2714 && !useless_type_conversion_p (TREE_TYPE (lhs),
2715 TREE_TYPE (new_rhs)))
2716 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2717 if (new_rhs
2718 && (!inplace
2719 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
2721 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2722 changed = true;
2724 break;
2727 case GIMPLE_COND:
2728 changed |= fold_gimple_cond (stmt);
2729 break;
2731 case GIMPLE_CALL:
2732 changed |= gimple_fold_call (gsi, inplace);
2733 break;
2735 case GIMPLE_ASM:
2736 /* Fold *& in asm operands. */
2738 size_t noutputs;
2739 const char **oconstraints;
2740 const char *constraint;
2741 bool allows_mem, allows_reg;
2743 noutputs = gimple_asm_noutputs (stmt);
2744 oconstraints = XALLOCAVEC (const char *, noutputs);
2746 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2748 tree link = gimple_asm_output_op (stmt, i);
2749 tree op = TREE_VALUE (link);
2750 oconstraints[i]
2751 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
2752 if (REFERENCE_CLASS_P (op)
2753 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
2755 TREE_VALUE (link) = op;
2756 changed = true;
2759 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2761 tree link = gimple_asm_input_op (stmt, i);
2762 tree op = TREE_VALUE (link);
2763 constraint
2764 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
2765 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
2766 oconstraints, &allows_mem, &allows_reg);
2767 if (REFERENCE_CLASS_P (op)
2768 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
2769 != NULL_TREE)
2771 TREE_VALUE (link) = op;
2772 changed = true;
2776 break;
2778 case GIMPLE_DEBUG:
2779 if (gimple_debug_bind_p (stmt))
2781 tree val = gimple_debug_bind_get_value (stmt);
2782 if (val
2783 && REFERENCE_CLASS_P (val))
2785 tree tem = maybe_fold_reference (val, false);
2786 if (tem)
2788 gimple_debug_bind_set_value (stmt, tem);
2789 changed = true;
2792 else if (val
2793 && TREE_CODE (val) == ADDR_EXPR)
2795 tree ref = TREE_OPERAND (val, 0);
2796 tree tem = maybe_fold_reference (ref, false);
2797 if (tem)
2799 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
2800 gimple_debug_bind_set_value (stmt, tem);
2801 changed = true;
2805 break;
2807 default:;
2810 stmt = gsi_stmt (*gsi);
2812 /* Fold *& on the lhs. */
2813 if (gimple_has_lhs (stmt))
2815 tree lhs = gimple_get_lhs (stmt);
2816 if (lhs && REFERENCE_CLASS_P (lhs))
2818 tree new_lhs = maybe_fold_reference (lhs, true);
2819 if (new_lhs)
2821 gimple_set_lhs (stmt, new_lhs);
2822 changed = true;
2827 return changed;
2830 /* Fold the statement pointed to by GSI. In some cases, this function may
2831 replace the whole statement with a new one. Returns true iff folding
2832 makes any changes.
2833 The statement pointed to by GSI should be in valid gimple form but may
2834 be in unfolded state as resulting from for example constant propagation
2835 which can produce *&x = 0. */
2837 bool
2838 fold_stmt (gimple_stmt_iterator *gsi)
2840 return fold_stmt_1 (gsi, false);
2843 /* Perform the minimal folding on statement *GSI. Only operations like
2844 *&x created by constant propagation are handled. The statement cannot
2845 be replaced with a new one. Return true if the statement was
2846 changed, false otherwise.
2847 The statement *GSI should be in valid gimple form but may
2848 be in unfolded state as resulting from for example constant propagation
2849 which can produce *&x = 0. */
2851 bool
2852 fold_stmt_inplace (gimple_stmt_iterator *gsi)
2854 gimple stmt = gsi_stmt (*gsi);
2855 bool changed = fold_stmt_1 (gsi, true);
2856 gcc_assert (gsi_stmt (*gsi) == stmt);
2857 return changed;
2860 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
2861 if EXPR is null or we don't know how.
2862 If non-null, the result always has boolean type. */
2864 static tree
2865 canonicalize_bool (tree expr, bool invert)
2867 if (!expr)
2868 return NULL_TREE;
2869 else if (invert)
2871 if (integer_nonzerop (expr))
2872 return boolean_false_node;
2873 else if (integer_zerop (expr))
2874 return boolean_true_node;
2875 else if (TREE_CODE (expr) == SSA_NAME)
2876 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
2877 build_int_cst (TREE_TYPE (expr), 0));
2878 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
2879 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
2880 boolean_type_node,
2881 TREE_OPERAND (expr, 0),
2882 TREE_OPERAND (expr, 1));
2883 else
2884 return NULL_TREE;
2886 else
2888 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
2889 return expr;
2890 if (integer_nonzerop (expr))
2891 return boolean_true_node;
2892 else if (integer_zerop (expr))
2893 return boolean_false_node;
2894 else if (TREE_CODE (expr) == SSA_NAME)
2895 return fold_build2 (NE_EXPR, boolean_type_node, expr,
2896 build_int_cst (TREE_TYPE (expr), 0));
2897 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
2898 return fold_build2 (TREE_CODE (expr),
2899 boolean_type_node,
2900 TREE_OPERAND (expr, 0),
2901 TREE_OPERAND (expr, 1));
2902 else
2903 return NULL_TREE;
2907 /* Check to see if a boolean expression EXPR is logically equivalent to the
2908 comparison (OP1 CODE OP2). Check for various identities involving
2909 SSA_NAMEs. */
2911 static bool
2912 same_bool_comparison_p (const_tree expr, enum tree_code code,
2913 const_tree op1, const_tree op2)
2915 gimple s;
2917 /* The obvious case. */
2918 if (TREE_CODE (expr) == code
2919 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
2920 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
2921 return true;
2923 /* Check for comparing (name, name != 0) and the case where expr
2924 is an SSA_NAME with a definition matching the comparison. */
2925 if (TREE_CODE (expr) == SSA_NAME
2926 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
2928 if (operand_equal_p (expr, op1, 0))
2929 return ((code == NE_EXPR && integer_zerop (op2))
2930 || (code == EQ_EXPR && integer_nonzerop (op2)));
2931 s = SSA_NAME_DEF_STMT (expr);
2932 if (is_gimple_assign (s)
2933 && gimple_assign_rhs_code (s) == code
2934 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
2935 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
2936 return true;
2939 /* If op1 is of the form (name != 0) or (name == 0), and the definition
2940 of name is a comparison, recurse. */
2941 if (TREE_CODE (op1) == SSA_NAME
2942 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
2944 s = SSA_NAME_DEF_STMT (op1);
2945 if (is_gimple_assign (s)
2946 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
2948 enum tree_code c = gimple_assign_rhs_code (s);
2949 if ((c == NE_EXPR && integer_zerop (op2))
2950 || (c == EQ_EXPR && integer_nonzerop (op2)))
2951 return same_bool_comparison_p (expr, c,
2952 gimple_assign_rhs1 (s),
2953 gimple_assign_rhs2 (s));
2954 if ((c == EQ_EXPR && integer_zerop (op2))
2955 || (c == NE_EXPR && integer_nonzerop (op2)))
2956 return same_bool_comparison_p (expr,
2957 invert_tree_comparison (c, false),
2958 gimple_assign_rhs1 (s),
2959 gimple_assign_rhs2 (s));
2962 return false;
2965 /* Check to see if two boolean expressions OP1 and OP2 are logically
2966 equivalent. */
2968 static bool
2969 same_bool_result_p (const_tree op1, const_tree op2)
2971 /* Simple cases first. */
2972 if (operand_equal_p (op1, op2, 0))
2973 return true;
2975 /* Check the cases where at least one of the operands is a comparison.
2976 These are a bit smarter than operand_equal_p in that they apply some
2977 identifies on SSA_NAMEs. */
2978 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
2979 && same_bool_comparison_p (op1, TREE_CODE (op2),
2980 TREE_OPERAND (op2, 0),
2981 TREE_OPERAND (op2, 1)))
2982 return true;
2983 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
2984 && same_bool_comparison_p (op2, TREE_CODE (op1),
2985 TREE_OPERAND (op1, 0),
2986 TREE_OPERAND (op1, 1)))
2987 return true;
2989 /* Default case. */
2990 return false;
2993 /* Forward declarations for some mutually recursive functions. */
2995 static tree
2996 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2997 enum tree_code code2, tree op2a, tree op2b);
2998 static tree
2999 and_var_with_comparison (tree var, bool invert,
3000 enum tree_code code2, tree op2a, tree op2b);
3001 static tree
3002 and_var_with_comparison_1 (gimple stmt,
3003 enum tree_code code2, tree op2a, tree op2b);
3004 static tree
3005 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3006 enum tree_code code2, tree op2a, tree op2b);
3007 static tree
3008 or_var_with_comparison (tree var, bool invert,
3009 enum tree_code code2, tree op2a, tree op2b);
3010 static tree
3011 or_var_with_comparison_1 (gimple stmt,
3012 enum tree_code code2, tree op2a, tree op2b);
3014 /* Helper function for and_comparisons_1: try to simplify the AND of the
3015 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3016 If INVERT is true, invert the value of the VAR before doing the AND.
3017 Return NULL_EXPR if we can't simplify this to a single expression. */
3019 static tree
3020 and_var_with_comparison (tree var, bool invert,
3021 enum tree_code code2, tree op2a, tree op2b)
3023 tree t;
3024 gimple stmt = SSA_NAME_DEF_STMT (var);
3026 /* We can only deal with variables whose definitions are assignments. */
3027 if (!is_gimple_assign (stmt))
3028 return NULL_TREE;
3030 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3031 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3032 Then we only have to consider the simpler non-inverted cases. */
3033 if (invert)
3034 t = or_var_with_comparison_1 (stmt,
3035 invert_tree_comparison (code2, false),
3036 op2a, op2b);
3037 else
3038 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3039 return canonicalize_bool (t, invert);
3042 /* Try to simplify the AND of the ssa variable defined by the assignment
3043 STMT with the comparison specified by (OP2A CODE2 OP2B).
3044 Return NULL_EXPR if we can't simplify this to a single expression. */
3046 static tree
3047 and_var_with_comparison_1 (gimple stmt,
3048 enum tree_code code2, tree op2a, tree op2b)
3050 tree var = gimple_assign_lhs (stmt);
3051 tree true_test_var = NULL_TREE;
3052 tree false_test_var = NULL_TREE;
3053 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3055 /* Check for identities like (var AND (var == 0)) => false. */
3056 if (TREE_CODE (op2a) == SSA_NAME
3057 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3059 if ((code2 == NE_EXPR && integer_zerop (op2b))
3060 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3062 true_test_var = op2a;
3063 if (var == true_test_var)
3064 return var;
3066 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3067 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3069 false_test_var = op2a;
3070 if (var == false_test_var)
3071 return boolean_false_node;
3075 /* If the definition is a comparison, recurse on it. */
3076 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3078 tree t = and_comparisons_1 (innercode,
3079 gimple_assign_rhs1 (stmt),
3080 gimple_assign_rhs2 (stmt),
3081 code2,
3082 op2a,
3083 op2b);
3084 if (t)
3085 return t;
3088 /* If the definition is an AND or OR expression, we may be able to
3089 simplify by reassociating. */
3090 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3091 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3093 tree inner1 = gimple_assign_rhs1 (stmt);
3094 tree inner2 = gimple_assign_rhs2 (stmt);
3095 gimple s;
3096 tree t;
3097 tree partial = NULL_TREE;
3098 bool is_and = (innercode == BIT_AND_EXPR);
3100 /* Check for boolean identities that don't require recursive examination
3101 of inner1/inner2:
3102 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3103 inner1 AND (inner1 OR inner2) => inner1
3104 !inner1 AND (inner1 AND inner2) => false
3105 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3106 Likewise for similar cases involving inner2. */
3107 if (inner1 == true_test_var)
3108 return (is_and ? var : inner1);
3109 else if (inner2 == true_test_var)
3110 return (is_and ? var : inner2);
3111 else if (inner1 == false_test_var)
3112 return (is_and
3113 ? boolean_false_node
3114 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
3115 else if (inner2 == false_test_var)
3116 return (is_and
3117 ? boolean_false_node
3118 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
3120 /* Next, redistribute/reassociate the AND across the inner tests.
3121 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3122 if (TREE_CODE (inner1) == SSA_NAME
3123 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3124 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3125 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3126 gimple_assign_rhs1 (s),
3127 gimple_assign_rhs2 (s),
3128 code2, op2a, op2b)))
3130 /* Handle the AND case, where we are reassociating:
3131 (inner1 AND inner2) AND (op2a code2 op2b)
3132 => (t AND inner2)
3133 If the partial result t is a constant, we win. Otherwise
3134 continue on to try reassociating with the other inner test. */
3135 if (is_and)
3137 if (integer_onep (t))
3138 return inner2;
3139 else if (integer_zerop (t))
3140 return boolean_false_node;
3143 /* Handle the OR case, where we are redistributing:
3144 (inner1 OR inner2) AND (op2a code2 op2b)
3145 => (t OR (inner2 AND (op2a code2 op2b))) */
3146 else if (integer_onep (t))
3147 return boolean_true_node;
3149 /* Save partial result for later. */
3150 partial = t;
3153 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3154 if (TREE_CODE (inner2) == SSA_NAME
3155 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3156 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3157 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3158 gimple_assign_rhs1 (s),
3159 gimple_assign_rhs2 (s),
3160 code2, op2a, op2b)))
3162 /* Handle the AND case, where we are reassociating:
3163 (inner1 AND inner2) AND (op2a code2 op2b)
3164 => (inner1 AND t) */
3165 if (is_and)
3167 if (integer_onep (t))
3168 return inner1;
3169 else if (integer_zerop (t))
3170 return boolean_false_node;
3171 /* If both are the same, we can apply the identity
3172 (x AND x) == x. */
3173 else if (partial && same_bool_result_p (t, partial))
3174 return t;
3177 /* Handle the OR case. where we are redistributing:
3178 (inner1 OR inner2) AND (op2a code2 op2b)
3179 => (t OR (inner1 AND (op2a code2 op2b)))
3180 => (t OR partial) */
3181 else
3183 if (integer_onep (t))
3184 return boolean_true_node;
3185 else if (partial)
3187 /* We already got a simplification for the other
3188 operand to the redistributed OR expression. The
3189 interesting case is when at least one is false.
3190 Or, if both are the same, we can apply the identity
3191 (x OR x) == x. */
3192 if (integer_zerop (partial))
3193 return t;
3194 else if (integer_zerop (t))
3195 return partial;
3196 else if (same_bool_result_p (t, partial))
3197 return t;
3202 return NULL_TREE;
3205 /* Try to simplify the AND of two comparisons defined by
3206 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3207 If this can be done without constructing an intermediate value,
3208 return the resulting tree; otherwise NULL_TREE is returned.
3209 This function is deliberately asymmetric as it recurses on SSA_DEFs
3210 in the first comparison but not the second. */
3212 static tree
3213 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3214 enum tree_code code2, tree op2a, tree op2b)
3216 tree truth_type = truth_type_for (TREE_TYPE (op1a));
3218 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3219 if (operand_equal_p (op1a, op2a, 0)
3220 && operand_equal_p (op1b, op2b, 0))
3222 /* Result will be either NULL_TREE, or a combined comparison. */
3223 tree t = combine_comparisons (UNKNOWN_LOCATION,
3224 TRUTH_ANDIF_EXPR, code1, code2,
3225 truth_type, op1a, op1b);
3226 if (t)
3227 return t;
3230 /* Likewise the swapped case of the above. */
3231 if (operand_equal_p (op1a, op2b, 0)
3232 && operand_equal_p (op1b, op2a, 0))
3234 /* Result will be either NULL_TREE, or a combined comparison. */
3235 tree t = combine_comparisons (UNKNOWN_LOCATION,
3236 TRUTH_ANDIF_EXPR, code1,
3237 swap_tree_comparison (code2),
3238 truth_type, op1a, op1b);
3239 if (t)
3240 return t;
3243 /* If both comparisons are of the same value against constants, we might
3244 be able to merge them. */
3245 if (operand_equal_p (op1a, op2a, 0)
3246 && TREE_CODE (op1b) == INTEGER_CST
3247 && TREE_CODE (op2b) == INTEGER_CST)
3249 int cmp = tree_int_cst_compare (op1b, op2b);
3251 /* If we have (op1a == op1b), we should either be able to
3252 return that or FALSE, depending on whether the constant op1b
3253 also satisfies the other comparison against op2b. */
3254 if (code1 == EQ_EXPR)
3256 bool done = true;
3257 bool val;
3258 switch (code2)
3260 case EQ_EXPR: val = (cmp == 0); break;
3261 case NE_EXPR: val = (cmp != 0); break;
3262 case LT_EXPR: val = (cmp < 0); break;
3263 case GT_EXPR: val = (cmp > 0); break;
3264 case LE_EXPR: val = (cmp <= 0); break;
3265 case GE_EXPR: val = (cmp >= 0); break;
3266 default: done = false;
3268 if (done)
3270 if (val)
3271 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3272 else
3273 return boolean_false_node;
3276 /* Likewise if the second comparison is an == comparison. */
3277 else if (code2 == EQ_EXPR)
3279 bool done = true;
3280 bool val;
3281 switch (code1)
3283 case EQ_EXPR: val = (cmp == 0); break;
3284 case NE_EXPR: val = (cmp != 0); break;
3285 case LT_EXPR: val = (cmp > 0); break;
3286 case GT_EXPR: val = (cmp < 0); break;
3287 case LE_EXPR: val = (cmp >= 0); break;
3288 case GE_EXPR: val = (cmp <= 0); break;
3289 default: done = false;
3291 if (done)
3293 if (val)
3294 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3295 else
3296 return boolean_false_node;
3300 /* Same business with inequality tests. */
3301 else if (code1 == NE_EXPR)
3303 bool val;
3304 switch (code2)
3306 case EQ_EXPR: val = (cmp != 0); break;
3307 case NE_EXPR: val = (cmp == 0); break;
3308 case LT_EXPR: val = (cmp >= 0); break;
3309 case GT_EXPR: val = (cmp <= 0); break;
3310 case LE_EXPR: val = (cmp > 0); break;
3311 case GE_EXPR: val = (cmp < 0); break;
3312 default:
3313 val = false;
3315 if (val)
3316 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3318 else if (code2 == NE_EXPR)
3320 bool val;
3321 switch (code1)
3323 case EQ_EXPR: val = (cmp == 0); break;
3324 case NE_EXPR: val = (cmp != 0); break;
3325 case LT_EXPR: val = (cmp <= 0); break;
3326 case GT_EXPR: val = (cmp >= 0); break;
3327 case LE_EXPR: val = (cmp < 0); break;
3328 case GE_EXPR: val = (cmp > 0); break;
3329 default:
3330 val = false;
3332 if (val)
3333 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3336 /* Chose the more restrictive of two < or <= comparisons. */
3337 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3338 && (code2 == LT_EXPR || code2 == LE_EXPR))
3340 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3341 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3342 else
3343 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3346 /* Likewise chose the more restrictive of two > or >= comparisons. */
3347 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3348 && (code2 == GT_EXPR || code2 == GE_EXPR))
3350 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3351 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3352 else
3353 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3356 /* Check for singleton ranges. */
3357 else if (cmp == 0
3358 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3359 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3360 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3362 /* Check for disjoint ranges. */
3363 else if (cmp <= 0
3364 && (code1 == LT_EXPR || code1 == LE_EXPR)
3365 && (code2 == GT_EXPR || code2 == GE_EXPR))
3366 return boolean_false_node;
3367 else if (cmp >= 0
3368 && (code1 == GT_EXPR || code1 == GE_EXPR)
3369 && (code2 == LT_EXPR || code2 == LE_EXPR))
3370 return boolean_false_node;
3373 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3374 NAME's definition is a truth value. See if there are any simplifications
3375 that can be done against the NAME's definition. */
3376 if (TREE_CODE (op1a) == SSA_NAME
3377 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3378 && (integer_zerop (op1b) || integer_onep (op1b)))
3380 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3381 || (code1 == NE_EXPR && integer_onep (op1b)));
3382 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3383 switch (gimple_code (stmt))
3385 case GIMPLE_ASSIGN:
3386 /* Try to simplify by copy-propagating the definition. */
3387 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3389 case GIMPLE_PHI:
3390 /* If every argument to the PHI produces the same result when
3391 ANDed with the second comparison, we win.
3392 Do not do this unless the type is bool since we need a bool
3393 result here anyway. */
3394 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3396 tree result = NULL_TREE;
3397 unsigned i;
3398 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3400 tree arg = gimple_phi_arg_def (stmt, i);
3402 /* If this PHI has itself as an argument, ignore it.
3403 If all the other args produce the same result,
3404 we're still OK. */
3405 if (arg == gimple_phi_result (stmt))
3406 continue;
3407 else if (TREE_CODE (arg) == INTEGER_CST)
3409 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3411 if (!result)
3412 result = boolean_false_node;
3413 else if (!integer_zerop (result))
3414 return NULL_TREE;
3416 else if (!result)
3417 result = fold_build2 (code2, boolean_type_node,
3418 op2a, op2b);
3419 else if (!same_bool_comparison_p (result,
3420 code2, op2a, op2b))
3421 return NULL_TREE;
3423 else if (TREE_CODE (arg) == SSA_NAME
3424 && !SSA_NAME_IS_DEFAULT_DEF (arg))
3426 tree temp;
3427 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3428 /* In simple cases we can look through PHI nodes,
3429 but we have to be careful with loops.
3430 See PR49073. */
3431 if (! dom_info_available_p (CDI_DOMINATORS)
3432 || gimple_bb (def_stmt) == gimple_bb (stmt)
3433 || dominated_by_p (CDI_DOMINATORS,
3434 gimple_bb (def_stmt),
3435 gimple_bb (stmt)))
3436 return NULL_TREE;
3437 temp = and_var_with_comparison (arg, invert, code2,
3438 op2a, op2b);
3439 if (!temp)
3440 return NULL_TREE;
3441 else if (!result)
3442 result = temp;
3443 else if (!same_bool_result_p (result, temp))
3444 return NULL_TREE;
3446 else
3447 return NULL_TREE;
3449 return result;
3452 default:
3453 break;
3456 return NULL_TREE;
3459 /* Try to simplify the AND of two comparisons, specified by
3460 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3461 If this can be simplified to a single expression (without requiring
3462 introducing more SSA variables to hold intermediate values),
3463 return the resulting tree. Otherwise return NULL_TREE.
3464 If the result expression is non-null, it has boolean type. */
3466 tree
3467 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
3468 enum tree_code code2, tree op2a, tree op2b)
3470 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
3471 if (t)
3472 return t;
3473 else
3474 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
3477 /* Helper function for or_comparisons_1: try to simplify the OR of the
3478 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3479 If INVERT is true, invert the value of VAR before doing the OR.
3480 Return NULL_EXPR if we can't simplify this to a single expression. */
3482 static tree
3483 or_var_with_comparison (tree var, bool invert,
3484 enum tree_code code2, tree op2a, tree op2b)
3486 tree t;
3487 gimple stmt = SSA_NAME_DEF_STMT (var);
3489 /* We can only deal with variables whose definitions are assignments. */
3490 if (!is_gimple_assign (stmt))
3491 return NULL_TREE;
3493 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3494 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3495 Then we only have to consider the simpler non-inverted cases. */
3496 if (invert)
3497 t = and_var_with_comparison_1 (stmt,
3498 invert_tree_comparison (code2, false),
3499 op2a, op2b);
3500 else
3501 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
3502 return canonicalize_bool (t, invert);
3505 /* Try to simplify the OR of the ssa variable defined by the assignment
3506 STMT with the comparison specified by (OP2A CODE2 OP2B).
3507 Return NULL_EXPR if we can't simplify this to a single expression. */
3509 static tree
3510 or_var_with_comparison_1 (gimple stmt,
3511 enum tree_code code2, tree op2a, tree op2b)
3513 tree var = gimple_assign_lhs (stmt);
3514 tree true_test_var = NULL_TREE;
3515 tree false_test_var = NULL_TREE;
3516 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3518 /* Check for identities like (var OR (var != 0)) => true . */
3519 if (TREE_CODE (op2a) == SSA_NAME
3520 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3522 if ((code2 == NE_EXPR && integer_zerop (op2b))
3523 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3525 true_test_var = op2a;
3526 if (var == true_test_var)
3527 return var;
3529 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3530 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3532 false_test_var = op2a;
3533 if (var == false_test_var)
3534 return boolean_true_node;
3538 /* If the definition is a comparison, recurse on it. */
3539 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3541 tree t = or_comparisons_1 (innercode,
3542 gimple_assign_rhs1 (stmt),
3543 gimple_assign_rhs2 (stmt),
3544 code2,
3545 op2a,
3546 op2b);
3547 if (t)
3548 return t;
3551 /* If the definition is an AND or OR expression, we may be able to
3552 simplify by reassociating. */
3553 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3554 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3556 tree inner1 = gimple_assign_rhs1 (stmt);
3557 tree inner2 = gimple_assign_rhs2 (stmt);
3558 gimple s;
3559 tree t;
3560 tree partial = NULL_TREE;
3561 bool is_or = (innercode == BIT_IOR_EXPR);
3563 /* Check for boolean identities that don't require recursive examination
3564 of inner1/inner2:
3565 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
3566 inner1 OR (inner1 AND inner2) => inner1
3567 !inner1 OR (inner1 OR inner2) => true
3568 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
3570 if (inner1 == true_test_var)
3571 return (is_or ? var : inner1);
3572 else if (inner2 == true_test_var)
3573 return (is_or ? var : inner2);
3574 else if (inner1 == false_test_var)
3575 return (is_or
3576 ? boolean_true_node
3577 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
3578 else if (inner2 == false_test_var)
3579 return (is_or
3580 ? boolean_true_node
3581 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
3583 /* Next, redistribute/reassociate the OR across the inner tests.
3584 Compute the first partial result, (inner1 OR (op2a code op2b)) */
3585 if (TREE_CODE (inner1) == SSA_NAME
3586 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3587 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3588 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
3589 gimple_assign_rhs1 (s),
3590 gimple_assign_rhs2 (s),
3591 code2, op2a, op2b)))
3593 /* Handle the OR case, where we are reassociating:
3594 (inner1 OR inner2) OR (op2a code2 op2b)
3595 => (t OR inner2)
3596 If the partial result t is a constant, we win. Otherwise
3597 continue on to try reassociating with the other inner test. */
3598 if (is_or)
3600 if (integer_onep (t))
3601 return boolean_true_node;
3602 else if (integer_zerop (t))
3603 return inner2;
3606 /* Handle the AND case, where we are redistributing:
3607 (inner1 AND inner2) OR (op2a code2 op2b)
3608 => (t AND (inner2 OR (op2a code op2b))) */
3609 else if (integer_zerop (t))
3610 return boolean_false_node;
3612 /* Save partial result for later. */
3613 partial = t;
3616 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
3617 if (TREE_CODE (inner2) == SSA_NAME
3618 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3619 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3620 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
3621 gimple_assign_rhs1 (s),
3622 gimple_assign_rhs2 (s),
3623 code2, op2a, op2b)))
3625 /* Handle the OR case, where we are reassociating:
3626 (inner1 OR inner2) OR (op2a code2 op2b)
3627 => (inner1 OR t)
3628 => (t OR partial) */
3629 if (is_or)
3631 if (integer_zerop (t))
3632 return inner1;
3633 else if (integer_onep (t))
3634 return boolean_true_node;
3635 /* If both are the same, we can apply the identity
3636 (x OR x) == x. */
3637 else if (partial && same_bool_result_p (t, partial))
3638 return t;
3641 /* Handle the AND case, where we are redistributing:
3642 (inner1 AND inner2) OR (op2a code2 op2b)
3643 => (t AND (inner1 OR (op2a code2 op2b)))
3644 => (t AND partial) */
3645 else
3647 if (integer_zerop (t))
3648 return boolean_false_node;
3649 else if (partial)
3651 /* We already got a simplification for the other
3652 operand to the redistributed AND expression. The
3653 interesting case is when at least one is true.
3654 Or, if both are the same, we can apply the identity
3655 (x AND x) == x. */
3656 if (integer_onep (partial))
3657 return t;
3658 else if (integer_onep (t))
3659 return partial;
3660 else if (same_bool_result_p (t, partial))
3661 return t;
3666 return NULL_TREE;
3669 /* Try to simplify the OR of two comparisons defined by
3670 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3671 If this can be done without constructing an intermediate value,
3672 return the resulting tree; otherwise NULL_TREE is returned.
3673 This function is deliberately asymmetric as it recurses on SSA_DEFs
3674 in the first comparison but not the second. */
3676 static tree
3677 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3678 enum tree_code code2, tree op2a, tree op2b)
3680 tree truth_type = truth_type_for (TREE_TYPE (op1a));
3682 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
3683 if (operand_equal_p (op1a, op2a, 0)
3684 && operand_equal_p (op1b, op2b, 0))
3686 /* Result will be either NULL_TREE, or a combined comparison. */
3687 tree t = combine_comparisons (UNKNOWN_LOCATION,
3688 TRUTH_ORIF_EXPR, code1, code2,
3689 truth_type, op1a, op1b);
3690 if (t)
3691 return t;
3694 /* Likewise the swapped case of the above. */
3695 if (operand_equal_p (op1a, op2b, 0)
3696 && operand_equal_p (op1b, op2a, 0))
3698 /* Result will be either NULL_TREE, or a combined comparison. */
3699 tree t = combine_comparisons (UNKNOWN_LOCATION,
3700 TRUTH_ORIF_EXPR, code1,
3701 swap_tree_comparison (code2),
3702 truth_type, op1a, op1b);
3703 if (t)
3704 return t;
3707 /* If both comparisons are of the same value against constants, we might
3708 be able to merge them. */
3709 if (operand_equal_p (op1a, op2a, 0)
3710 && TREE_CODE (op1b) == INTEGER_CST
3711 && TREE_CODE (op2b) == INTEGER_CST)
3713 int cmp = tree_int_cst_compare (op1b, op2b);
3715 /* If we have (op1a != op1b), we should either be able to
3716 return that or TRUE, depending on whether the constant op1b
3717 also satisfies the other comparison against op2b. */
3718 if (code1 == NE_EXPR)
3720 bool done = true;
3721 bool val;
3722 switch (code2)
3724 case EQ_EXPR: val = (cmp == 0); break;
3725 case NE_EXPR: val = (cmp != 0); break;
3726 case LT_EXPR: val = (cmp < 0); break;
3727 case GT_EXPR: val = (cmp > 0); break;
3728 case LE_EXPR: val = (cmp <= 0); break;
3729 case GE_EXPR: val = (cmp >= 0); break;
3730 default: done = false;
3732 if (done)
3734 if (val)
3735 return boolean_true_node;
3736 else
3737 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3740 /* Likewise if the second comparison is a != comparison. */
3741 else if (code2 == NE_EXPR)
3743 bool done = true;
3744 bool val;
3745 switch (code1)
3747 case EQ_EXPR: val = (cmp == 0); break;
3748 case NE_EXPR: val = (cmp != 0); break;
3749 case LT_EXPR: val = (cmp > 0); break;
3750 case GT_EXPR: val = (cmp < 0); break;
3751 case LE_EXPR: val = (cmp >= 0); break;
3752 case GE_EXPR: val = (cmp <= 0); break;
3753 default: done = false;
3755 if (done)
3757 if (val)
3758 return boolean_true_node;
3759 else
3760 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3764 /* See if an equality test is redundant with the other comparison. */
3765 else if (code1 == EQ_EXPR)
3767 bool val;
3768 switch (code2)
3770 case EQ_EXPR: val = (cmp == 0); break;
3771 case NE_EXPR: val = (cmp != 0); break;
3772 case LT_EXPR: val = (cmp < 0); break;
3773 case GT_EXPR: val = (cmp > 0); break;
3774 case LE_EXPR: val = (cmp <= 0); break;
3775 case GE_EXPR: val = (cmp >= 0); break;
3776 default:
3777 val = false;
3779 if (val)
3780 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3782 else if (code2 == EQ_EXPR)
3784 bool val;
3785 switch (code1)
3787 case EQ_EXPR: val = (cmp == 0); break;
3788 case NE_EXPR: val = (cmp != 0); break;
3789 case LT_EXPR: val = (cmp > 0); break;
3790 case GT_EXPR: val = (cmp < 0); break;
3791 case LE_EXPR: val = (cmp >= 0); break;
3792 case GE_EXPR: val = (cmp <= 0); break;
3793 default:
3794 val = false;
3796 if (val)
3797 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3800 /* Chose the less restrictive of two < or <= comparisons. */
3801 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3802 && (code2 == LT_EXPR || code2 == LE_EXPR))
3804 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3805 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3806 else
3807 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3810 /* Likewise chose the less restrictive of two > or >= comparisons. */
3811 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3812 && (code2 == GT_EXPR || code2 == GE_EXPR))
3814 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3815 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3816 else
3817 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3820 /* Check for singleton ranges. */
3821 else if (cmp == 0
3822 && ((code1 == LT_EXPR && code2 == GT_EXPR)
3823 || (code1 == GT_EXPR && code2 == LT_EXPR)))
3824 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
3826 /* Check for less/greater pairs that don't restrict the range at all. */
3827 else if (cmp >= 0
3828 && (code1 == LT_EXPR || code1 == LE_EXPR)
3829 && (code2 == GT_EXPR || code2 == GE_EXPR))
3830 return boolean_true_node;
3831 else if (cmp <= 0
3832 && (code1 == GT_EXPR || code1 == GE_EXPR)
3833 && (code2 == LT_EXPR || code2 == LE_EXPR))
3834 return boolean_true_node;
3837 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3838 NAME's definition is a truth value. See if there are any simplifications
3839 that can be done against the NAME's definition. */
3840 if (TREE_CODE (op1a) == SSA_NAME
3841 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3842 && (integer_zerop (op1b) || integer_onep (op1b)))
3844 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3845 || (code1 == NE_EXPR && integer_onep (op1b)));
3846 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3847 switch (gimple_code (stmt))
3849 case GIMPLE_ASSIGN:
3850 /* Try to simplify by copy-propagating the definition. */
3851 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
3853 case GIMPLE_PHI:
3854 /* If every argument to the PHI produces the same result when
3855 ORed with the second comparison, we win.
3856 Do not do this unless the type is bool since we need a bool
3857 result here anyway. */
3858 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3860 tree result = NULL_TREE;
3861 unsigned i;
3862 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3864 tree arg = gimple_phi_arg_def (stmt, i);
3866 /* If this PHI has itself as an argument, ignore it.
3867 If all the other args produce the same result,
3868 we're still OK. */
3869 if (arg == gimple_phi_result (stmt))
3870 continue;
3871 else if (TREE_CODE (arg) == INTEGER_CST)
3873 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
3875 if (!result)
3876 result = boolean_true_node;
3877 else if (!integer_onep (result))
3878 return NULL_TREE;
3880 else if (!result)
3881 result = fold_build2 (code2, boolean_type_node,
3882 op2a, op2b);
3883 else if (!same_bool_comparison_p (result,
3884 code2, op2a, op2b))
3885 return NULL_TREE;
3887 else if (TREE_CODE (arg) == SSA_NAME
3888 && !SSA_NAME_IS_DEFAULT_DEF (arg))
3890 tree temp;
3891 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3892 /* In simple cases we can look through PHI nodes,
3893 but we have to be careful with loops.
3894 See PR49073. */
3895 if (! dom_info_available_p (CDI_DOMINATORS)
3896 || gimple_bb (def_stmt) == gimple_bb (stmt)
3897 || dominated_by_p (CDI_DOMINATORS,
3898 gimple_bb (def_stmt),
3899 gimple_bb (stmt)))
3900 return NULL_TREE;
3901 temp = or_var_with_comparison (arg, invert, code2,
3902 op2a, op2b);
3903 if (!temp)
3904 return NULL_TREE;
3905 else if (!result)
3906 result = temp;
3907 else if (!same_bool_result_p (result, temp))
3908 return NULL_TREE;
3910 else
3911 return NULL_TREE;
3913 return result;
3916 default:
3917 break;
3920 return NULL_TREE;
3923 /* Try to simplify the OR of two comparisons, specified by
3924 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3925 If this can be simplified to a single expression (without requiring
3926 introducing more SSA variables to hold intermediate values),
3927 return the resulting tree. Otherwise return NULL_TREE.
3928 If the result expression is non-null, it has boolean type. */
3930 tree
3931 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
3932 enum tree_code code2, tree op2a, tree op2b)
3934 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
3935 if (t)
3936 return t;
3937 else
3938 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
3942 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
3944 Either NULL_TREE, a simplified but non-constant or a constant
3945 is returned.
3947 ??? This should go into a gimple-fold-inline.h file to be eventually
3948 privatized with the single valueize function used in the various TUs
3949 to avoid the indirect function call overhead. */
3951 tree
3952 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
3954 location_t loc = gimple_location (stmt);
3955 switch (gimple_code (stmt))
3957 case GIMPLE_ASSIGN:
3959 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3961 switch (get_gimple_rhs_class (subcode))
3963 case GIMPLE_SINGLE_RHS:
3965 tree rhs = gimple_assign_rhs1 (stmt);
3966 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
3968 if (TREE_CODE (rhs) == SSA_NAME)
3970 /* If the RHS is an SSA_NAME, return its known constant value,
3971 if any. */
3972 return (*valueize) (rhs);
3974 /* Handle propagating invariant addresses into address
3975 operations. */
3976 else if (TREE_CODE (rhs) == ADDR_EXPR
3977 && !is_gimple_min_invariant (rhs))
3979 HOST_WIDE_INT offset = 0;
3980 tree base;
3981 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
3982 &offset,
3983 valueize);
3984 if (base
3985 && (CONSTANT_CLASS_P (base)
3986 || decl_address_invariant_p (base)))
3987 return build_invariant_address (TREE_TYPE (rhs),
3988 base, offset);
3990 else if (TREE_CODE (rhs) == CONSTRUCTOR
3991 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
3992 && (CONSTRUCTOR_NELTS (rhs)
3993 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
3995 unsigned i;
3996 tree val, *vec;
3998 vec = XALLOCAVEC (tree,
3999 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4000 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4002 val = (*valueize) (val);
4003 if (TREE_CODE (val) == INTEGER_CST
4004 || TREE_CODE (val) == REAL_CST
4005 || TREE_CODE (val) == FIXED_CST)
4006 vec[i] = val;
4007 else
4008 return NULL_TREE;
4011 return build_vector (TREE_TYPE (rhs), vec);
4013 if (subcode == OBJ_TYPE_REF)
4015 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4016 /* If callee is constant, we can fold away the wrapper. */
4017 if (is_gimple_min_invariant (val))
4018 return val;
4021 if (kind == tcc_reference)
4023 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4024 || TREE_CODE (rhs) == REALPART_EXPR
4025 || TREE_CODE (rhs) == IMAGPART_EXPR)
4026 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4028 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4029 return fold_unary_loc (EXPR_LOCATION (rhs),
4030 TREE_CODE (rhs),
4031 TREE_TYPE (rhs), val);
4033 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4034 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4036 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4037 return fold_ternary_loc (EXPR_LOCATION (rhs),
4038 TREE_CODE (rhs),
4039 TREE_TYPE (rhs), val,
4040 TREE_OPERAND (rhs, 1),
4041 TREE_OPERAND (rhs, 2));
4043 else if (TREE_CODE (rhs) == MEM_REF
4044 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4046 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4047 if (TREE_CODE (val) == ADDR_EXPR
4048 && is_gimple_min_invariant (val))
4050 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4051 unshare_expr (val),
4052 TREE_OPERAND (rhs, 1));
4053 if (tem)
4054 rhs = tem;
4057 return fold_const_aggregate_ref_1 (rhs, valueize);
4059 else if (kind == tcc_declaration)
4060 return get_symbol_constant_value (rhs);
4061 return rhs;
4064 case GIMPLE_UNARY_RHS:
4066 /* Handle unary operators that can appear in GIMPLE form.
4067 Note that we know the single operand must be a constant,
4068 so this should almost always return a simplified RHS. */
4069 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4071 return
4072 fold_unary_ignore_overflow_loc (loc, subcode,
4073 gimple_expr_type (stmt), op0);
4076 case GIMPLE_BINARY_RHS:
4078 /* Handle binary operators that can appear in GIMPLE form. */
4079 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4080 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4082 /* Translate &x + CST into an invariant form suitable for
4083 further propagation. */
4084 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4085 && TREE_CODE (op0) == ADDR_EXPR
4086 && TREE_CODE (op1) == INTEGER_CST)
4088 tree off = fold_convert (ptr_type_node, op1);
4089 return build_fold_addr_expr_loc
4090 (loc,
4091 fold_build2 (MEM_REF,
4092 TREE_TYPE (TREE_TYPE (op0)),
4093 unshare_expr (op0), off));
4096 return fold_binary_loc (loc, subcode,
4097 gimple_expr_type (stmt), op0, op1);
4100 case GIMPLE_TERNARY_RHS:
4102 /* Handle ternary operators that can appear in GIMPLE form. */
4103 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4104 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4105 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
4107 /* Fold embedded expressions in ternary codes. */
4108 if ((subcode == COND_EXPR
4109 || subcode == VEC_COND_EXPR)
4110 && COMPARISON_CLASS_P (op0))
4112 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
4113 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
4114 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
4115 TREE_TYPE (op0), op00, op01);
4116 if (tem)
4117 op0 = tem;
4120 return fold_ternary_loc (loc, subcode,
4121 gimple_expr_type (stmt), op0, op1, op2);
4124 default:
4125 gcc_unreachable ();
4129 case GIMPLE_CALL:
4131 tree fn;
4133 if (gimple_call_internal_p (stmt))
4135 enum tree_code subcode = ERROR_MARK;
4136 switch (gimple_call_internal_fn (stmt))
4138 case IFN_UBSAN_CHECK_ADD:
4139 subcode = PLUS_EXPR;
4140 break;
4141 case IFN_UBSAN_CHECK_SUB:
4142 subcode = MINUS_EXPR;
4143 break;
4144 case IFN_UBSAN_CHECK_MUL:
4145 subcode = MULT_EXPR;
4146 break;
4147 default:
4148 return NULL_TREE;
4150 tree arg0 = gimple_call_arg (stmt, 0);
4151 tree arg1 = gimple_call_arg (stmt, 1);
4152 tree op0 = (*valueize) (arg0);
4153 tree op1 = (*valueize) (arg1);
4155 if (TREE_CODE (op0) != INTEGER_CST
4156 || TREE_CODE (op1) != INTEGER_CST)
4158 switch (subcode)
4160 case MULT_EXPR:
4161 /* x * 0 = 0 * x = 0 without overflow. */
4162 if (integer_zerop (op0) || integer_zerop (op1))
4163 return build_zero_cst (TREE_TYPE (arg0));
4164 break;
4165 case MINUS_EXPR:
4166 /* y - y = 0 without overflow. */
4167 if (operand_equal_p (op0, op1, 0))
4168 return build_zero_cst (TREE_TYPE (arg0));
4169 break;
4170 default:
4171 break;
4174 tree res
4175 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
4176 if (res
4177 && TREE_CODE (res) == INTEGER_CST
4178 && !TREE_OVERFLOW (res))
4179 return res;
4180 return NULL_TREE;
4183 fn = (*valueize) (gimple_call_fn (stmt));
4184 if (TREE_CODE (fn) == ADDR_EXPR
4185 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4186 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4187 && gimple_builtin_call_types_compatible_p (stmt,
4188 TREE_OPERAND (fn, 0)))
4190 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4191 tree call, retval;
4192 unsigned i;
4193 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4194 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4195 call = build_call_array_loc (loc,
4196 gimple_call_return_type (stmt),
4197 fn, gimple_call_num_args (stmt), args);
4198 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4199 if (retval)
4201 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4202 STRIP_NOPS (retval);
4203 retval = fold_convert (gimple_call_return_type (stmt), retval);
4205 return retval;
4207 return NULL_TREE;
4210 default:
4211 return NULL_TREE;
4215 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4216 Returns NULL_TREE if folding to a constant is not possible, otherwise
4217 returns a constant according to is_gimple_min_invariant. */
4219 tree
4220 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4222 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4223 if (res && is_gimple_min_invariant (res))
4224 return res;
4225 return NULL_TREE;
4229 /* The following set of functions are supposed to fold references using
4230 their constant initializers. */
4232 static tree fold_ctor_reference (tree type, tree ctor,
4233 unsigned HOST_WIDE_INT offset,
4234 unsigned HOST_WIDE_INT size, tree);
4236 /* See if we can find constructor defining value of BASE.
4237 When we know the consructor with constant offset (such as
4238 base is array[40] and we do know constructor of array), then
4239 BIT_OFFSET is adjusted accordingly.
4241 As a special case, return error_mark_node when constructor
4242 is not explicitly available, but it is known to be zero
4243 such as 'static const int a;'. */
4244 static tree
4245 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4246 tree (*valueize)(tree))
4248 HOST_WIDE_INT bit_offset2, size, max_size;
4249 if (TREE_CODE (base) == MEM_REF)
4251 if (!integer_zerop (TREE_OPERAND (base, 1)))
4253 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
4254 return NULL_TREE;
4255 *bit_offset += (mem_ref_offset (base).to_short_addr ()
4256 * BITS_PER_UNIT);
4259 if (valueize
4260 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4261 base = valueize (TREE_OPERAND (base, 0));
4262 if (!base || TREE_CODE (base) != ADDR_EXPR)
4263 return NULL_TREE;
4264 base = TREE_OPERAND (base, 0);
4267 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4268 DECL_INITIAL. If BASE is a nested reference into another
4269 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4270 the inner reference. */
4271 switch (TREE_CODE (base))
4273 case VAR_DECL:
4274 case CONST_DECL:
4276 tree init = ctor_for_folding (base);
4278 /* Our semantic is exact opposite of ctor_for_folding;
4279 NULL means unknown, while error_mark_node is 0. */
4280 if (init == error_mark_node)
4281 return NULL_TREE;
4282 if (!init)
4283 return error_mark_node;
4284 return init;
4287 case ARRAY_REF:
4288 case COMPONENT_REF:
4289 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4290 if (max_size == -1 || size != max_size)
4291 return NULL_TREE;
4292 *bit_offset += bit_offset2;
4293 return get_base_constructor (base, bit_offset, valueize);
4295 case STRING_CST:
4296 case CONSTRUCTOR:
4297 return base;
4299 default:
4300 return NULL_TREE;
4304 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4305 SIZE to the memory at bit OFFSET. */
4307 static tree
4308 fold_array_ctor_reference (tree type, tree ctor,
4309 unsigned HOST_WIDE_INT offset,
4310 unsigned HOST_WIDE_INT size,
4311 tree from_decl)
4313 unsigned HOST_WIDE_INT cnt;
4314 tree cfield, cval;
4315 offset_int low_bound;
4316 offset_int elt_size;
4317 offset_int index, max_index;
4318 offset_int access_index;
4319 tree domain_type = NULL_TREE, index_type = NULL_TREE;
4320 HOST_WIDE_INT inner_offset;
4322 /* Compute low bound and elt size. */
4323 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4324 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
4325 if (domain_type && TYPE_MIN_VALUE (domain_type))
4327 /* Static constructors for variably sized objects makes no sense. */
4328 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
4329 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
4330 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
4332 else
4333 low_bound = 0;
4334 /* Static constructors for variably sized objects makes no sense. */
4335 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
4336 == INTEGER_CST);
4337 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
4339 /* We can handle only constantly sized accesses that are known to not
4340 be larger than size of array element. */
4341 if (!TYPE_SIZE_UNIT (type)
4342 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
4343 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4344 || elt_size == 0)
4345 return NULL_TREE;
4347 /* Compute the array index we look for. */
4348 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
4349 elt_size);
4350 access_index += low_bound;
4351 if (index_type)
4352 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4353 TYPE_SIGN (index_type));
4355 /* And offset within the access. */
4356 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
4358 /* See if the array field is large enough to span whole access. We do not
4359 care to fold accesses spanning multiple array indexes. */
4360 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
4361 return NULL_TREE;
4363 index = low_bound - 1;
4364 if (index_type)
4365 index = wi::ext (index, TYPE_PRECISION (index_type),
4366 TYPE_SIGN (index_type));
4368 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4370 /* Array constructor might explicitely set index, or specify range
4371 or leave index NULL meaning that it is next index after previous
4372 one. */
4373 if (cfield)
4375 if (TREE_CODE (cfield) == INTEGER_CST)
4376 max_index = index = wi::to_offset (cfield);
4377 else
4379 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
4380 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4381 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
4384 else
4386 index += 1;
4387 if (index_type)
4388 index = wi::ext (index, TYPE_PRECISION (index_type),
4389 TYPE_SIGN (index_type));
4390 max_index = index;
4393 /* Do we have match? */
4394 if (wi::cmpu (access_index, index) >= 0
4395 && wi::cmpu (access_index, max_index) <= 0)
4396 return fold_ctor_reference (type, cval, inner_offset, size,
4397 from_decl);
4399 /* When memory is not explicitely mentioned in constructor,
4400 it is 0 (or out of range). */
4401 return build_zero_cst (type);
4404 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4405 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4407 static tree
4408 fold_nonarray_ctor_reference (tree type, tree ctor,
4409 unsigned HOST_WIDE_INT offset,
4410 unsigned HOST_WIDE_INT size,
4411 tree from_decl)
4413 unsigned HOST_WIDE_INT cnt;
4414 tree cfield, cval;
4416 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4417 cval)
4419 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4420 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4421 tree field_size = DECL_SIZE (cfield);
4422 offset_int bitoffset;
4423 offset_int bitoffset_end, access_end;
4425 /* Variable sized objects in static constructors makes no sense,
4426 but field_size can be NULL for flexible array members. */
4427 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4428 && TREE_CODE (byte_offset) == INTEGER_CST
4429 && (field_size != NULL_TREE
4430 ? TREE_CODE (field_size) == INTEGER_CST
4431 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4433 /* Compute bit offset of the field. */
4434 bitoffset = (wi::to_offset (field_offset)
4435 + wi::lshift (wi::to_offset (byte_offset),
4436 LOG2_BITS_PER_UNIT));
4437 /* Compute bit offset where the field ends. */
4438 if (field_size != NULL_TREE)
4439 bitoffset_end = bitoffset + wi::to_offset (field_size);
4440 else
4441 bitoffset_end = 0;
4443 access_end = offset_int (offset) + size;
4445 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4446 [BITOFFSET, BITOFFSET_END)? */
4447 if (wi::cmps (access_end, bitoffset) > 0
4448 && (field_size == NULL_TREE
4449 || wi::lts_p (offset, bitoffset_end)))
4451 offset_int inner_offset = offset_int (offset) - bitoffset;
4452 /* We do have overlap. Now see if field is large enough to
4453 cover the access. Give up for accesses spanning multiple
4454 fields. */
4455 if (wi::cmps (access_end, bitoffset_end) > 0)
4456 return NULL_TREE;
4457 if (wi::lts_p (offset, bitoffset))
4458 return NULL_TREE;
4459 return fold_ctor_reference (type, cval,
4460 inner_offset.to_uhwi (), size,
4461 from_decl);
4464 /* When memory is not explicitely mentioned in constructor, it is 0. */
4465 return build_zero_cst (type);
4468 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4469 to the memory at bit OFFSET. */
4471 static tree
4472 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
4473 unsigned HOST_WIDE_INT size, tree from_decl)
4475 tree ret;
4477 /* We found the field with exact match. */
4478 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
4479 && !offset)
4480 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
4482 /* We are at the end of walk, see if we can view convert the
4483 result. */
4484 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
4485 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
4486 && !compare_tree_int (TYPE_SIZE (type), size)
4487 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
4489 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
4490 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
4491 if (ret)
4492 STRIP_NOPS (ret);
4493 return ret;
4495 /* For constants and byte-aligned/sized reads try to go through
4496 native_encode/interpret. */
4497 if (CONSTANT_CLASS_P (ctor)
4498 && BITS_PER_UNIT == 8
4499 && offset % BITS_PER_UNIT == 0
4500 && size % BITS_PER_UNIT == 0
4501 && size <= MAX_BITSIZE_MODE_ANY_MODE)
4503 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
4504 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
4505 offset / BITS_PER_UNIT) > 0)
4506 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
4508 if (TREE_CODE (ctor) == CONSTRUCTOR)
4511 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
4512 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
4513 return fold_array_ctor_reference (type, ctor, offset, size,
4514 from_decl);
4515 else
4516 return fold_nonarray_ctor_reference (type, ctor, offset, size,
4517 from_decl);
4520 return NULL_TREE;
4523 /* Return the tree representing the element referenced by T if T is an
4524 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4525 names using VALUEIZE. Return NULL_TREE otherwise. */
4527 tree
4528 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
4530 tree ctor, idx, base;
4531 HOST_WIDE_INT offset, size, max_size;
4532 tree tem;
4534 if (TREE_THIS_VOLATILE (t))
4535 return NULL_TREE;
4537 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4538 return get_symbol_constant_value (t);
4540 tem = fold_read_from_constant_string (t);
4541 if (tem)
4542 return tem;
4544 switch (TREE_CODE (t))
4546 case ARRAY_REF:
4547 case ARRAY_RANGE_REF:
4548 /* Constant indexes are handled well by get_base_constructor.
4549 Only special case variable offsets.
4550 FIXME: This code can't handle nested references with variable indexes
4551 (they will be handled only by iteration of ccp). Perhaps we can bring
4552 get_ref_base_and_extent here and make it use a valueize callback. */
4553 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
4554 && valueize
4555 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
4556 && TREE_CODE (idx) == INTEGER_CST)
4558 tree low_bound, unit_size;
4560 /* If the resulting bit-offset is constant, track it. */
4561 if ((low_bound = array_ref_low_bound (t),
4562 TREE_CODE (low_bound) == INTEGER_CST)
4563 && (unit_size = array_ref_element_size (t),
4564 tree_fits_uhwi_p (unit_size)))
4566 offset_int woffset
4567 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
4568 TYPE_PRECISION (TREE_TYPE (idx)));
4570 if (wi::fits_shwi_p (woffset))
4572 offset = woffset.to_shwi ();
4573 /* TODO: This code seems wrong, multiply then check
4574 to see if it fits. */
4575 offset *= tree_to_uhwi (unit_size);
4576 offset *= BITS_PER_UNIT;
4578 base = TREE_OPERAND (t, 0);
4579 ctor = get_base_constructor (base, &offset, valueize);
4580 /* Empty constructor. Always fold to 0. */
4581 if (ctor == error_mark_node)
4582 return build_zero_cst (TREE_TYPE (t));
4583 /* Out of bound array access. Value is undefined,
4584 but don't fold. */
4585 if (offset < 0)
4586 return NULL_TREE;
4587 /* We can not determine ctor. */
4588 if (!ctor)
4589 return NULL_TREE;
4590 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
4591 tree_to_uhwi (unit_size)
4592 * BITS_PER_UNIT,
4593 base);
4597 /* Fallthru. */
4599 case COMPONENT_REF:
4600 case BIT_FIELD_REF:
4601 case TARGET_MEM_REF:
4602 case MEM_REF:
4603 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
4604 ctor = get_base_constructor (base, &offset, valueize);
4606 /* Empty constructor. Always fold to 0. */
4607 if (ctor == error_mark_node)
4608 return build_zero_cst (TREE_TYPE (t));
4609 /* We do not know precise address. */
4610 if (max_size == -1 || max_size != size)
4611 return NULL_TREE;
4612 /* We can not determine ctor. */
4613 if (!ctor)
4614 return NULL_TREE;
4616 /* Out of bound array access. Value is undefined, but don't fold. */
4617 if (offset < 0)
4618 return NULL_TREE;
4620 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
4621 base);
4623 case REALPART_EXPR:
4624 case IMAGPART_EXPR:
4626 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
4627 if (c && TREE_CODE (c) == COMPLEX_CST)
4628 return fold_build1_loc (EXPR_LOCATION (t),
4629 TREE_CODE (t), TREE_TYPE (t), c);
4630 break;
4633 default:
4634 break;
4637 return NULL_TREE;
4640 tree
4641 fold_const_aggregate_ref (tree t)
4643 return fold_const_aggregate_ref_1 (t, NULL);
4646 /* Lookup virtual method with index TOKEN in a virtual table V
4647 at OFFSET.
4648 Set CAN_REFER if non-NULL to false if method
4649 is not referable or if the virtual table is ill-formed (such as rewriten
4650 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
4652 tree
4653 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
4654 tree v,
4655 unsigned HOST_WIDE_INT offset,
4656 bool *can_refer)
4658 tree vtable = v, init, fn;
4659 unsigned HOST_WIDE_INT size;
4660 unsigned HOST_WIDE_INT elt_size, access_index;
4661 tree domain_type;
4663 if (can_refer)
4664 *can_refer = true;
4666 /* First of all double check we have virtual table. */
4667 if (TREE_CODE (v) != VAR_DECL
4668 || !DECL_VIRTUAL_P (v))
4670 gcc_assert (in_lto_p);
4671 /* Pass down that we lost track of the target. */
4672 if (can_refer)
4673 *can_refer = false;
4674 return NULL_TREE;
4677 init = ctor_for_folding (v);
4679 /* The virtual tables should always be born with constructors
4680 and we always should assume that they are avaialble for
4681 folding. At the moment we do not stream them in all cases,
4682 but it should never happen that ctor seem unreachable. */
4683 gcc_assert (init);
4684 if (init == error_mark_node)
4686 gcc_assert (in_lto_p);
4687 /* Pass down that we lost track of the target. */
4688 if (can_refer)
4689 *can_refer = false;
4690 return NULL_TREE;
4692 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
4693 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
4694 offset *= BITS_PER_UNIT;
4695 offset += token * size;
4697 /* Lookup the value in the constructor that is assumed to be array.
4698 This is equivalent to
4699 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
4700 offset, size, NULL);
4701 but in a constant time. We expect that frontend produced a simple
4702 array without indexed initializers. */
4704 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
4705 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
4706 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
4707 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
4709 access_index = offset / BITS_PER_UNIT / elt_size;
4710 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
4712 /* This code makes an assumption that there are no
4713 indexed fileds produced by C++ FE, so we can directly index the array. */
4714 if (access_index < CONSTRUCTOR_NELTS (init))
4716 fn = CONSTRUCTOR_ELT (init, access_index)->value;
4717 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
4718 STRIP_NOPS (fn);
4720 else
4721 fn = NULL;
4723 /* For type inconsistent program we may end up looking up virtual method
4724 in virtual table that does not contain TOKEN entries. We may overrun
4725 the virtual table and pick up a constant or RTTI info pointer.
4726 In any case the call is undefined. */
4727 if (!fn
4728 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
4729 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
4730 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4731 else
4733 fn = TREE_OPERAND (fn, 0);
4735 /* When cgraph node is missing and function is not public, we cannot
4736 devirtualize. This can happen in WHOPR when the actual method
4737 ends up in other partition, because we found devirtualization
4738 possibility too late. */
4739 if (!can_refer_decl_in_current_unit_p (fn, vtable))
4741 if (can_refer)
4743 *can_refer = false;
4744 return fn;
4746 return NULL_TREE;
4750 /* Make sure we create a cgraph node for functions we'll reference.
4751 They can be non-existent if the reference comes from an entry
4752 of an external vtable for example. */
4753 cgraph_node::get_create (fn);
4755 return fn;
4758 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
4759 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
4760 KNOWN_BINFO carries the binfo describing the true type of
4761 OBJ_TYPE_REF_OBJECT(REF).
4762 Set CAN_REFER if non-NULL to false if method
4763 is not referable or if the virtual table is ill-formed (such as rewriten
4764 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
4766 tree
4767 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
4768 bool *can_refer)
4770 unsigned HOST_WIDE_INT offset;
4771 tree v;
4773 v = BINFO_VTABLE (known_binfo);
4774 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
4775 if (!v)
4776 return NULL_TREE;
4778 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
4780 if (can_refer)
4781 *can_refer = false;
4782 return NULL_TREE;
4784 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
4787 /* Return true iff VAL is a gimple expression that is known to be
4788 non-negative. Restricted to floating-point inputs. */
4790 bool
4791 gimple_val_nonnegative_real_p (tree val)
4793 gimple def_stmt;
4795 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
4797 /* Use existing logic for non-gimple trees. */
4798 if (tree_expr_nonnegative_p (val))
4799 return true;
4801 if (TREE_CODE (val) != SSA_NAME)
4802 return false;
4804 /* Currently we look only at the immediately defining statement
4805 to make this determination, since recursion on defining
4806 statements of operands can lead to quadratic behavior in the
4807 worst case. This is expected to catch almost all occurrences
4808 in practice. It would be possible to implement limited-depth
4809 recursion if important cases are lost. Alternatively, passes
4810 that need this information (such as the pow/powi lowering code
4811 in the cse_sincos pass) could be revised to provide it through
4812 dataflow propagation. */
4814 def_stmt = SSA_NAME_DEF_STMT (val);
4816 if (is_gimple_assign (def_stmt))
4818 tree op0, op1;
4820 /* See fold-const.c:tree_expr_nonnegative_p for additional
4821 cases that could be handled with recursion. */
4823 switch (gimple_assign_rhs_code (def_stmt))
4825 case ABS_EXPR:
4826 /* Always true for floating-point operands. */
4827 return true;
4829 case MULT_EXPR:
4830 /* True if the two operands are identical (since we are
4831 restricted to floating-point inputs). */
4832 op0 = gimple_assign_rhs1 (def_stmt);
4833 op1 = gimple_assign_rhs2 (def_stmt);
4835 if (op0 == op1
4836 || operand_equal_p (op0, op1, 0))
4837 return true;
4839 default:
4840 return false;
4843 else if (is_gimple_call (def_stmt))
4845 tree fndecl = gimple_call_fndecl (def_stmt);
4846 if (fndecl
4847 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4849 tree arg1;
4851 switch (DECL_FUNCTION_CODE (fndecl))
4853 CASE_FLT_FN (BUILT_IN_ACOS):
4854 CASE_FLT_FN (BUILT_IN_ACOSH):
4855 CASE_FLT_FN (BUILT_IN_CABS):
4856 CASE_FLT_FN (BUILT_IN_COSH):
4857 CASE_FLT_FN (BUILT_IN_ERFC):
4858 CASE_FLT_FN (BUILT_IN_EXP):
4859 CASE_FLT_FN (BUILT_IN_EXP10):
4860 CASE_FLT_FN (BUILT_IN_EXP2):
4861 CASE_FLT_FN (BUILT_IN_FABS):
4862 CASE_FLT_FN (BUILT_IN_FDIM):
4863 CASE_FLT_FN (BUILT_IN_HYPOT):
4864 CASE_FLT_FN (BUILT_IN_POW10):
4865 return true;
4867 CASE_FLT_FN (BUILT_IN_SQRT):
4868 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
4869 nonnegative inputs. */
4870 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
4871 return true;
4873 break;
4875 CASE_FLT_FN (BUILT_IN_POWI):
4876 /* True if the second argument is an even integer. */
4877 arg1 = gimple_call_arg (def_stmt, 1);
4879 if (TREE_CODE (arg1) == INTEGER_CST
4880 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
4881 return true;
4883 break;
4885 CASE_FLT_FN (BUILT_IN_POW):
4886 /* True if the second argument is an even integer-valued
4887 real. */
4888 arg1 = gimple_call_arg (def_stmt, 1);
4890 if (TREE_CODE (arg1) == REAL_CST)
4892 REAL_VALUE_TYPE c;
4893 HOST_WIDE_INT n;
4895 c = TREE_REAL_CST (arg1);
4896 n = real_to_integer (&c);
4898 if ((n & 1) == 0)
4900 REAL_VALUE_TYPE cint;
4901 real_from_integer (&cint, VOIDmode, n, SIGNED);
4902 if (real_identical (&c, &cint))
4903 return true;
4907 break;
4909 default:
4910 return false;
4915 return false;
4918 /* Given a pointer value OP0, return a simplified version of an
4919 indirection through OP0, or NULL_TREE if no simplification is
4920 possible. Note that the resulting type may be different from
4921 the type pointed to in the sense that it is still compatible
4922 from the langhooks point of view. */
4924 tree
4925 gimple_fold_indirect_ref (tree t)
4927 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
4928 tree sub = t;
4929 tree subtype;
4931 STRIP_NOPS (sub);
4932 subtype = TREE_TYPE (sub);
4933 if (!POINTER_TYPE_P (subtype))
4934 return NULL_TREE;
4936 if (TREE_CODE (sub) == ADDR_EXPR)
4938 tree op = TREE_OPERAND (sub, 0);
4939 tree optype = TREE_TYPE (op);
4940 /* *&p => p */
4941 if (useless_type_conversion_p (type, optype))
4942 return op;
4944 /* *(foo *)&fooarray => fooarray[0] */
4945 if (TREE_CODE (optype) == ARRAY_TYPE
4946 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
4947 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4949 tree type_domain = TYPE_DOMAIN (optype);
4950 tree min_val = size_zero_node;
4951 if (type_domain && TYPE_MIN_VALUE (type_domain))
4952 min_val = TYPE_MIN_VALUE (type_domain);
4953 if (TREE_CODE (min_val) == INTEGER_CST)
4954 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
4956 /* *(foo *)&complexfoo => __real__ complexfoo */
4957 else if (TREE_CODE (optype) == COMPLEX_TYPE
4958 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4959 return fold_build1 (REALPART_EXPR, type, op);
4960 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4961 else if (TREE_CODE (optype) == VECTOR_TYPE
4962 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4964 tree part_width = TYPE_SIZE (type);
4965 tree index = bitsize_int (0);
4966 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4970 /* *(p + CST) -> ... */
4971 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4972 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4974 tree addr = TREE_OPERAND (sub, 0);
4975 tree off = TREE_OPERAND (sub, 1);
4976 tree addrtype;
4978 STRIP_NOPS (addr);
4979 addrtype = TREE_TYPE (addr);
4981 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
4982 if (TREE_CODE (addr) == ADDR_EXPR
4983 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
4984 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
4985 && tree_fits_uhwi_p (off))
4987 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
4988 tree part_width = TYPE_SIZE (type);
4989 unsigned HOST_WIDE_INT part_widthi
4990 = tree_to_shwi (part_width) / BITS_PER_UNIT;
4991 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4992 tree index = bitsize_int (indexi);
4993 if (offset / part_widthi
4994 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
4995 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
4996 part_width, index);
4999 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5000 if (TREE_CODE (addr) == ADDR_EXPR
5001 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5002 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5004 tree size = TYPE_SIZE_UNIT (type);
5005 if (tree_int_cst_equal (size, off))
5006 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5009 /* *(p + CST) -> MEM_REF <p, CST>. */
5010 if (TREE_CODE (addr) != ADDR_EXPR
5011 || DECL_P (TREE_OPERAND (addr, 0)))
5012 return fold_build2 (MEM_REF, type,
5013 addr,
5014 wide_int_to_tree (ptype, off));
5017 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5018 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5019 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5020 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5022 tree type_domain;
5023 tree min_val = size_zero_node;
5024 tree osub = sub;
5025 sub = gimple_fold_indirect_ref (sub);
5026 if (! sub)
5027 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5028 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5029 if (type_domain && TYPE_MIN_VALUE (type_domain))
5030 min_val = TYPE_MIN_VALUE (type_domain);
5031 if (TREE_CODE (min_val) == INTEGER_CST)
5032 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5035 return NULL_TREE;
5038 /* Return true if CODE is an operation that when operating on signed
5039 integer types involves undefined behavior on overflow and the
5040 operation can be expressed with unsigned arithmetic. */
5042 bool
5043 arith_code_with_undefined_signed_overflow (tree_code code)
5045 switch (code)
5047 case PLUS_EXPR:
5048 case MINUS_EXPR:
5049 case MULT_EXPR:
5050 case NEGATE_EXPR:
5051 case POINTER_PLUS_EXPR:
5052 return true;
5053 default:
5054 return false;
5058 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5059 operation that can be transformed to unsigned arithmetic by converting
5060 its operand, carrying out the operation in the corresponding unsigned
5061 type and converting the result back to the original type.
5063 Returns a sequence of statements that replace STMT and also contain
5064 a modified form of STMT itself. */
5066 gimple_seq
5067 rewrite_to_defined_overflow (gimple stmt)
5069 if (dump_file && (dump_flags & TDF_DETAILS))
5071 fprintf (dump_file, "rewriting stmt with undefined signed "
5072 "overflow ");
5073 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5076 tree lhs = gimple_assign_lhs (stmt);
5077 tree type = unsigned_type_for (TREE_TYPE (lhs));
5078 gimple_seq stmts = NULL;
5079 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5081 gimple_seq stmts2 = NULL;
5082 gimple_set_op (stmt, i,
5083 force_gimple_operand (fold_convert (type,
5084 gimple_op (stmt, i)),
5085 &stmts2, true, NULL_TREE));
5086 gimple_seq_add_seq (&stmts, stmts2);
5088 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5089 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5090 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5091 gimple_seq_add_stmt (&stmts, stmt);
5092 gimple cvt = gimple_build_assign_with_ops
5093 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
5094 gimple_seq_add_stmt (&stmts, cvt);
5096 return stmts;