ipa-utils.h (polymorphic_call_context): Add metdhos dump, debug and clear_outer_type.
[official-gcc.git] / gcc / gimple-fold.c
blob8ac22113387de9987ea7a95f44d60b5627b052db
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "dumpfile.h"
33 #include "bitmap.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "tree-dfa.h"
47 #include "tree-ssa.h"
48 #include "tree-ssa-propagate.h"
49 #include "target.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
55 #include "dbgcnt.h"
56 #include "builtins.h"
57 #include "output.h"
59 /* Return true when DECL can be referenced from current unit.
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
62 reasons:
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
69 set.
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
74 declaring the body.
75 3) COMDAT functions referred by external vtables that
76 we devirtualize only during final compilation stage.
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
79 directly. */
81 static bool
82 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
84 varpool_node *vnode;
85 struct cgraph_node *node;
86 symtab_node *snode;
88 if (DECL_ABSTRACT_P (decl))
89 return false;
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 return true;
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (symtab->function_flags_ready)
102 return true;
103 snode = symtab_node::get (decl);
104 if (!snode || !snode->definition)
105 return false;
106 node = dyn_cast <cgraph_node *> (snode);
107 return !node || !node->global.inlined_to;
110 /* We will later output the initializer, so we can refer to it.
111 So we are concerned only when DECL comes from initializer of
112 external var or var that has been optimized out. */
113 if (!from_decl
114 || TREE_CODE (from_decl) != VAR_DECL
115 || (!DECL_EXTERNAL (from_decl)
116 && (vnode = varpool_node::get (from_decl)) != NULL
117 && vnode->definition)
118 || (flag_ltrans
119 && (vnode = varpool_node::get (from_decl)) != NULL
120 && vnode->in_other_partition))
121 return true;
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
129 return false;
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134 return true;
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
139 As observed in PR20991 for already optimized out comdat virtual functions
140 it may be tempting to not necessarily give up because the copy will be
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
145 was privatized. */
146 if (!symtab->function_flags_ready)
147 return true;
149 snode = symtab_node::get (decl);
150 if (!snode
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
154 return false;
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
163 tree
164 canonicalize_constructor_val (tree cval, tree from_decl)
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
180 if (TREE_CODE (cval) == ADDR_EXPR)
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
191 if (!base)
192 return NULL_TREE;
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
196 && !can_refer_decl_in_current_unit_p (base, from_decl))
197 return NULL_TREE;
198 if (TREE_CODE (base) == VAR_DECL)
199 TREE_ADDRESSABLE (base) = 1;
200 else if (TREE_CODE (base) == FUNCTION_DECL)
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
205 cgraph_node::get_create (base);
207 /* Fixup types in global initializers. */
208 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
209 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
212 cval = fold_convert (TREE_TYPE (orig_cval), cval);
213 return cval;
215 if (TREE_OVERFLOW_P (cval))
216 return drop_tree_overflow (cval);
217 return orig_cval;
220 /* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
223 tree
224 get_symbol_constant_value (tree sym)
226 tree val = ctor_for_folding (sym);
227 if (val != error_mark_node)
229 if (val)
231 val = canonicalize_constructor_val (unshare_expr (val), sym);
232 if (val && is_gimple_min_invariant (val))
233 return val;
234 else
235 return NULL_TREE;
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
240 if (!val
241 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
242 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
243 return build_zero_cst (TREE_TYPE (sym));
246 return NULL_TREE;
251 /* Subroutine of fold_stmt. We perform several simplifications of the
252 memory reference tree EXPR and make sure to re-gimplify them properly
253 after propagation of constant addresses. IS_LHS is true if the
254 reference is supposed to be an lvalue. */
256 static tree
257 maybe_fold_reference (tree expr, bool is_lhs)
259 tree result;
261 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
262 || TREE_CODE (expr) == REALPART_EXPR
263 || TREE_CODE (expr) == IMAGPART_EXPR)
264 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
265 return fold_unary_loc (EXPR_LOCATION (expr),
266 TREE_CODE (expr),
267 TREE_TYPE (expr),
268 TREE_OPERAND (expr, 0));
269 else if (TREE_CODE (expr) == BIT_FIELD_REF
270 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
271 return fold_ternary_loc (EXPR_LOCATION (expr),
272 TREE_CODE (expr),
273 TREE_TYPE (expr),
274 TREE_OPERAND (expr, 0),
275 TREE_OPERAND (expr, 1),
276 TREE_OPERAND (expr, 2));
278 if (!is_lhs
279 && (result = fold_const_aggregate_ref (expr))
280 && is_gimple_min_invariant (result))
281 return result;
283 return NULL_TREE;
287 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
288 replacement rhs for the statement or NULL_TREE if no simplification
289 could be made. It is assumed that the operands have been previously
290 folded. */
292 static tree
293 fold_gimple_assign (gimple_stmt_iterator *si)
295 gimple stmt = gsi_stmt (*si);
296 enum tree_code subcode = gimple_assign_rhs_code (stmt);
297 location_t loc = gimple_location (stmt);
299 tree result = NULL_TREE;
301 switch (get_gimple_rhs_class (subcode))
303 case GIMPLE_SINGLE_RHS:
305 tree rhs = gimple_assign_rhs1 (stmt);
307 if (REFERENCE_CLASS_P (rhs))
308 return maybe_fold_reference (rhs, false);
310 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
312 tree val = OBJ_TYPE_REF_EXPR (rhs);
313 if (is_gimple_min_invariant (val))
314 return val;
315 else if (flag_devirtualize && virtual_method_call_p (rhs))
317 bool final;
318 vec <cgraph_node *>targets
319 = possible_polymorphic_call_targets (rhs, stmt, &final);
320 if (final && targets.length () <= 1 && dbg_cnt (devirt))
322 if (dump_enabled_p ())
324 location_t loc = gimple_location_safe (stmt);
325 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
326 "resolving virtual function address "
327 "reference to function %s\n",
328 targets.length () == 1
329 ? targets[0]->name ()
330 : "NULL");
332 if (targets.length () == 1)
334 val = fold_convert (TREE_TYPE (val),
335 build_fold_addr_expr_loc
336 (loc, targets[0]->decl));
337 STRIP_USELESS_TYPE_CONVERSION (val);
339 else
340 /* We can not use __builtin_unreachable here because it
341 can not have address taken. */
342 val = build_int_cst (TREE_TYPE (val), 0);
343 return val;
348 else if (TREE_CODE (rhs) == ADDR_EXPR)
350 tree ref = TREE_OPERAND (rhs, 0);
351 tree tem = maybe_fold_reference (ref, true);
352 if (tem
353 && TREE_CODE (tem) == MEM_REF
354 && integer_zerop (TREE_OPERAND (tem, 1)))
355 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
356 else if (tem)
357 result = fold_convert (TREE_TYPE (rhs),
358 build_fold_addr_expr_loc (loc, tem));
359 else if (TREE_CODE (ref) == MEM_REF
360 && integer_zerop (TREE_OPERAND (ref, 1)))
361 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
364 else if (TREE_CODE (rhs) == CONSTRUCTOR
365 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
366 && (CONSTRUCTOR_NELTS (rhs)
367 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
369 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
370 unsigned i;
371 tree val;
373 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
374 if (TREE_CODE (val) != INTEGER_CST
375 && TREE_CODE (val) != REAL_CST
376 && TREE_CODE (val) != FIXED_CST)
377 return NULL_TREE;
379 return build_vector_from_ctor (TREE_TYPE (rhs),
380 CONSTRUCTOR_ELTS (rhs));
383 else if (DECL_P (rhs))
384 return get_symbol_constant_value (rhs);
386 /* If we couldn't fold the RHS, hand over to the generic
387 fold routines. */
388 if (result == NULL_TREE)
389 result = fold (rhs);
391 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
392 that may have been added by fold, and "useless" type
393 conversions that might now be apparent due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
399 return NULL_TREE;
401 break;
403 case GIMPLE_UNARY_RHS:
405 tree rhs = gimple_assign_rhs1 (stmt);
407 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
408 if (result)
410 /* If the operation was a conversion do _not_ mark a
411 resulting constant with TREE_OVERFLOW if the original
412 constant was not. These conversions have implementation
413 defined behavior and retaining the TREE_OVERFLOW flag
414 here would confuse later passes such as VRP. */
415 if (CONVERT_EXPR_CODE_P (subcode)
416 && TREE_CODE (result) == INTEGER_CST
417 && TREE_CODE (rhs) == INTEGER_CST)
418 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
420 STRIP_USELESS_TYPE_CONVERSION (result);
421 if (valid_gimple_rhs_p (result))
422 return result;
425 break;
427 case GIMPLE_BINARY_RHS:
428 /* Try to canonicalize for boolean-typed X the comparisons
429 X == 0, X == 1, X != 0, and X != 1. */
430 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
431 || gimple_assign_rhs_code (stmt) == NE_EXPR)
433 tree lhs = gimple_assign_lhs (stmt);
434 tree op1 = gimple_assign_rhs1 (stmt);
435 tree op2 = gimple_assign_rhs2 (stmt);
436 tree type = TREE_TYPE (op1);
438 /* Check whether the comparison operands are of the same boolean
439 type as the result type is.
440 Check that second operand is an integer-constant with value
441 one or zero. */
442 if (TREE_CODE (op2) == INTEGER_CST
443 && (integer_zerop (op2) || integer_onep (op2))
444 && useless_type_conversion_p (TREE_TYPE (lhs), type))
446 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
447 bool is_logical_not = false;
449 /* X == 0 and X != 1 is a logical-not.of X
450 X == 1 and X != 0 is X */
451 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
452 || (cmp_code == NE_EXPR && integer_onep (op2)))
453 is_logical_not = true;
455 if (is_logical_not == false)
456 result = op1;
457 /* Only for one-bit precision typed X the transformation
458 !X -> ~X is valied. */
459 else if (TYPE_PRECISION (type) == 1)
460 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
461 type, op1);
462 /* Otherwise we use !X -> X ^ 1. */
463 else
464 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
465 type, op1, build_int_cst (type, 1));
470 if (!result)
471 result = fold_binary_loc (loc, subcode,
472 TREE_TYPE (gimple_assign_lhs (stmt)),
473 gimple_assign_rhs1 (stmt),
474 gimple_assign_rhs2 (stmt));
476 if (result)
478 STRIP_USELESS_TYPE_CONVERSION (result);
479 if (valid_gimple_rhs_p (result))
480 return result;
482 break;
484 case GIMPLE_TERNARY_RHS:
485 /* Try to fold a conditional expression. */
486 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
488 tree op0 = gimple_assign_rhs1 (stmt);
489 tree tem;
490 bool set = false;
491 location_t cond_loc = gimple_location (stmt);
493 if (COMPARISON_CLASS_P (op0))
495 fold_defer_overflow_warnings ();
496 tem = fold_binary_loc (cond_loc,
497 TREE_CODE (op0), TREE_TYPE (op0),
498 TREE_OPERAND (op0, 0),
499 TREE_OPERAND (op0, 1));
500 /* This is actually a conditional expression, not a GIMPLE
501 conditional statement, however, the valid_gimple_rhs_p
502 test still applies. */
503 set = (tem && is_gimple_condexpr (tem)
504 && valid_gimple_rhs_p (tem));
505 fold_undefer_overflow_warnings (set, stmt, 0);
507 else if (is_gimple_min_invariant (op0))
509 tem = op0;
510 set = true;
512 else
513 return NULL_TREE;
515 if (set)
516 result = fold_build3_loc (cond_loc, COND_EXPR,
517 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
518 gimple_assign_rhs2 (stmt),
519 gimple_assign_rhs3 (stmt));
522 if (!result)
523 result = fold_ternary_loc (loc, subcode,
524 TREE_TYPE (gimple_assign_lhs (stmt)),
525 gimple_assign_rhs1 (stmt),
526 gimple_assign_rhs2 (stmt),
527 gimple_assign_rhs3 (stmt));
529 if (result)
531 STRIP_USELESS_TYPE_CONVERSION (result);
532 if (valid_gimple_rhs_p (result))
533 return result;
535 break;
537 case GIMPLE_INVALID_RHS:
538 gcc_unreachable ();
541 return NULL_TREE;
544 /* Attempt to fold a conditional statement. Return true if any changes were
545 made. We only attempt to fold the condition expression, and do not perform
546 any transformation that would require alteration of the cfg. It is
547 assumed that the operands have been previously folded. */
549 static bool
550 fold_gimple_cond (gimple stmt)
552 tree result = fold_binary_loc (gimple_location (stmt),
553 gimple_cond_code (stmt),
554 boolean_type_node,
555 gimple_cond_lhs (stmt),
556 gimple_cond_rhs (stmt));
558 if (result)
560 STRIP_USELESS_TYPE_CONVERSION (result);
561 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
563 gimple_cond_set_condition_from_tree (stmt, result);
564 return true;
568 return false;
572 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
573 adjusting the replacement stmts location and virtual operands.
574 If the statement has a lhs the last stmt in the sequence is expected
575 to assign to that lhs. */
577 static void
578 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
580 gimple stmt = gsi_stmt (*si_p);
582 if (gimple_has_location (stmt))
583 annotate_all_with_location (stmts, gimple_location (stmt));
585 /* First iterate over the replacement statements backward, assigning
586 virtual operands to their defining statements. */
587 gimple laststore = NULL;
588 for (gimple_stmt_iterator i = gsi_last (stmts);
589 !gsi_end_p (i); gsi_prev (&i))
591 gimple new_stmt = gsi_stmt (i);
592 if ((gimple_assign_single_p (new_stmt)
593 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
594 || (is_gimple_call (new_stmt)
595 && (gimple_call_flags (new_stmt)
596 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
598 tree vdef;
599 if (!laststore)
600 vdef = gimple_vdef (stmt);
601 else
602 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
603 gimple_set_vdef (new_stmt, vdef);
604 if (vdef && TREE_CODE (vdef) == SSA_NAME)
605 SSA_NAME_DEF_STMT (vdef) = new_stmt;
606 laststore = new_stmt;
610 /* Second iterate over the statements forward, assigning virtual
611 operands to their uses. */
612 tree reaching_vuse = gimple_vuse (stmt);
613 for (gimple_stmt_iterator i = gsi_start (stmts);
614 !gsi_end_p (i); gsi_next (&i))
616 gimple new_stmt = gsi_stmt (i);
617 /* If the new statement possibly has a VUSE, update it with exact SSA
618 name we know will reach this one. */
619 if (gimple_has_mem_ops (new_stmt))
620 gimple_set_vuse (new_stmt, reaching_vuse);
621 gimple_set_modified (new_stmt, true);
622 if (gimple_vdef (new_stmt))
623 reaching_vuse = gimple_vdef (new_stmt);
626 /* If the new sequence does not do a store release the virtual
627 definition of the original statement. */
628 if (reaching_vuse
629 && reaching_vuse == gimple_vuse (stmt))
631 tree vdef = gimple_vdef (stmt);
632 if (vdef
633 && TREE_CODE (vdef) == SSA_NAME)
635 unlink_stmt_vdef (stmt);
636 release_ssa_name (vdef);
640 /* Finally replace the original statement with the sequence. */
641 gsi_replace_with_seq (si_p, stmts, false);
644 /* Convert EXPR into a GIMPLE value suitable for substitution on the
645 RHS of an assignment. Insert the necessary statements before
646 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
647 is replaced. If the call is expected to produces a result, then it
648 is replaced by an assignment of the new RHS to the result variable.
649 If the result is to be ignored, then the call is replaced by a
650 GIMPLE_NOP. A proper VDEF chain is retained by making the first
651 VUSE and the last VDEF of the whole sequence be the same as the replaced
652 statement and using new SSA names for stores in between. */
654 void
655 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
657 tree lhs;
658 gimple stmt, new_stmt;
659 gimple_stmt_iterator i;
660 gimple_seq stmts = NULL;
662 stmt = gsi_stmt (*si_p);
664 gcc_assert (is_gimple_call (stmt));
666 push_gimplify_context (gimple_in_ssa_p (cfun));
668 lhs = gimple_call_lhs (stmt);
669 if (lhs == NULL_TREE)
671 gimplify_and_add (expr, &stmts);
672 /* We can end up with folding a memcpy of an empty class assignment
673 which gets optimized away by C++ gimplification. */
674 if (gimple_seq_empty_p (stmts))
676 pop_gimplify_context (NULL);
677 if (gimple_in_ssa_p (cfun))
679 unlink_stmt_vdef (stmt);
680 release_defs (stmt);
682 gsi_replace (si_p, gimple_build_nop (), true);
683 return;
686 else
688 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
689 new_stmt = gimple_build_assign (lhs, tmp);
690 i = gsi_last (stmts);
691 gsi_insert_after_without_update (&i, new_stmt,
692 GSI_CONTINUE_LINKING);
695 pop_gimplify_context (NULL);
697 gsi_replace_with_seq_vops (si_p, stmts);
701 /* Replace the call at *GSI with the gimple value VAL. */
703 static void
704 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
706 gimple stmt = gsi_stmt (*gsi);
707 tree lhs = gimple_call_lhs (stmt);
708 gimple repl;
709 if (lhs)
711 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
712 val = fold_convert (TREE_TYPE (lhs), val);
713 repl = gimple_build_assign (lhs, val);
715 else
716 repl = gimple_build_nop ();
717 tree vdef = gimple_vdef (stmt);
718 if (vdef && TREE_CODE (vdef) == SSA_NAME)
720 unlink_stmt_vdef (stmt);
721 release_ssa_name (vdef);
723 gsi_replace (gsi, repl, true);
726 /* Replace the call at *GSI with the new call REPL and fold that
727 again. */
729 static void
730 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
732 gimple stmt = gsi_stmt (*gsi);
733 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
734 gimple_set_location (repl, gimple_location (stmt));
735 if (gimple_vdef (stmt)
736 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
738 gimple_set_vdef (repl, gimple_vdef (stmt));
739 gimple_set_vuse (repl, gimple_vuse (stmt));
740 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
742 gsi_replace (gsi, repl, true);
743 fold_stmt (gsi);
746 /* Return true if VAR is a VAR_DECL or a component thereof. */
748 static bool
749 var_decl_component_p (tree var)
751 tree inner = var;
752 while (handled_component_p (inner))
753 inner = TREE_OPERAND (inner, 0);
754 return SSA_VAR_P (inner);
757 /* Fold function call to builtin mem{{,p}cpy,move}. Return
758 NULL_TREE if no simplification can be made.
759 If ENDP is 0, return DEST (like memcpy).
760 If ENDP is 1, return DEST+LEN (like mempcpy).
761 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
762 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
763 (memmove). */
765 static bool
766 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
767 tree dest, tree src, int endp)
769 gimple stmt = gsi_stmt (*gsi);
770 tree lhs = gimple_call_lhs (stmt);
771 tree len = gimple_call_arg (stmt, 2);
772 tree destvar, srcvar;
773 location_t loc = gimple_location (stmt);
775 /* If the LEN parameter is zero, return DEST. */
776 if (integer_zerop (len))
778 gimple repl;
779 if (gimple_call_lhs (stmt))
780 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
781 else
782 repl = gimple_build_nop ();
783 tree vdef = gimple_vdef (stmt);
784 if (vdef && TREE_CODE (vdef) == SSA_NAME)
786 unlink_stmt_vdef (stmt);
787 release_ssa_name (vdef);
789 gsi_replace (gsi, repl, true);
790 return true;
793 /* If SRC and DEST are the same (and not volatile), return
794 DEST{,+LEN,+LEN-1}. */
795 if (operand_equal_p (src, dest, 0))
797 unlink_stmt_vdef (stmt);
798 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
799 release_ssa_name (gimple_vdef (stmt));
800 if (!lhs)
802 gsi_replace (gsi, gimple_build_nop (), true);
803 return true;
805 goto done;
807 else
809 tree srctype, desttype;
810 unsigned int src_align, dest_align;
811 tree off0;
813 /* Build accesses at offset zero with a ref-all character type. */
814 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
815 ptr_mode, true), 0);
817 /* If we can perform the copy efficiently with first doing all loads
818 and then all stores inline it that way. Currently efficiently
819 means that we can load all the memory into a single integer
820 register which is what MOVE_MAX gives us. */
821 src_align = get_pointer_alignment (src);
822 dest_align = get_pointer_alignment (dest);
823 if (tree_fits_uhwi_p (len)
824 && compare_tree_int (len, MOVE_MAX) <= 0
825 /* ??? Don't transform copies from strings with known length this
826 confuses the tree-ssa-strlen.c. This doesn't handle
827 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
828 reason. */
829 && !c_strlen (src, 2))
831 unsigned ilen = tree_to_uhwi (len);
832 if (exact_log2 (ilen) != -1)
834 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
835 if (type
836 && TYPE_MODE (type) != BLKmode
837 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
838 == ilen * 8)
839 /* If the destination pointer is not aligned we must be able
840 to emit an unaligned store. */
841 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
842 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
844 tree srctype = type;
845 tree desttype = type;
846 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
847 srctype = build_aligned_type (type, src_align);
848 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
849 tree tem = fold_const_aggregate_ref (srcmem);
850 if (tem)
851 srcmem = tem;
852 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
853 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
854 src_align))
855 srcmem = NULL_TREE;
856 if (srcmem)
858 gimple new_stmt;
859 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
861 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
862 if (gimple_in_ssa_p (cfun))
863 srcmem = make_ssa_name (TREE_TYPE (srcmem),
864 new_stmt);
865 else
866 srcmem = create_tmp_reg (TREE_TYPE (srcmem),
867 NULL);
868 gimple_assign_set_lhs (new_stmt, srcmem);
869 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
870 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
872 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
873 desttype = build_aligned_type (type, dest_align);
874 new_stmt
875 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
876 dest, off0),
877 srcmem);
878 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
879 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
880 if (gimple_vdef (new_stmt)
881 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
882 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
883 if (!lhs)
885 gsi_replace (gsi, new_stmt, true);
886 return true;
888 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
889 goto done;
895 if (endp == 3)
897 /* Both DEST and SRC must be pointer types.
898 ??? This is what old code did. Is the testing for pointer types
899 really mandatory?
901 If either SRC is readonly or length is 1, we can use memcpy. */
902 if (!dest_align || !src_align)
903 return false;
904 if (readonly_data_expr (src)
905 || (tree_fits_uhwi_p (len)
906 && (MIN (src_align, dest_align) / BITS_PER_UNIT
907 >= tree_to_uhwi (len))))
909 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
910 if (!fn)
911 return false;
912 gimple_call_set_fndecl (stmt, fn);
913 gimple_call_set_arg (stmt, 0, dest);
914 gimple_call_set_arg (stmt, 1, src);
915 fold_stmt (gsi);
916 return true;
919 /* If *src and *dest can't overlap, optimize into memcpy as well. */
920 if (TREE_CODE (src) == ADDR_EXPR
921 && TREE_CODE (dest) == ADDR_EXPR)
923 tree src_base, dest_base, fn;
924 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
925 HOST_WIDE_INT size = -1;
926 HOST_WIDE_INT maxsize = -1;
928 srcvar = TREE_OPERAND (src, 0);
929 src_base = get_ref_base_and_extent (srcvar, &src_offset,
930 &size, &maxsize);
931 destvar = TREE_OPERAND (dest, 0);
932 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
933 &size, &maxsize);
934 if (tree_fits_uhwi_p (len))
935 maxsize = tree_to_uhwi (len);
936 else
937 maxsize = -1;
938 src_offset /= BITS_PER_UNIT;
939 dest_offset /= BITS_PER_UNIT;
940 if (SSA_VAR_P (src_base)
941 && SSA_VAR_P (dest_base))
943 if (operand_equal_p (src_base, dest_base, 0)
944 && ranges_overlap_p (src_offset, maxsize,
945 dest_offset, maxsize))
946 return false;
948 else if (TREE_CODE (src_base) == MEM_REF
949 && TREE_CODE (dest_base) == MEM_REF)
951 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
952 TREE_OPERAND (dest_base, 0), 0))
953 return false;
954 offset_int off = mem_ref_offset (src_base) + src_offset;
955 if (!wi::fits_shwi_p (off))
956 return false;
957 src_offset = off.to_shwi ();
959 off = mem_ref_offset (dest_base) + dest_offset;
960 if (!wi::fits_shwi_p (off))
961 return false;
962 dest_offset = off.to_shwi ();
963 if (ranges_overlap_p (src_offset, maxsize,
964 dest_offset, maxsize))
965 return false;
967 else
968 return false;
970 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
971 if (!fn)
972 return false;
973 gimple_call_set_fndecl (stmt, fn);
974 gimple_call_set_arg (stmt, 0, dest);
975 gimple_call_set_arg (stmt, 1, src);
976 fold_stmt (gsi);
977 return true;
980 /* If the destination and source do not alias optimize into
981 memcpy as well. */
982 if ((is_gimple_min_invariant (dest)
983 || TREE_CODE (dest) == SSA_NAME)
984 && (is_gimple_min_invariant (src)
985 || TREE_CODE (src) == SSA_NAME))
987 ao_ref destr, srcr;
988 ao_ref_init_from_ptr_and_size (&destr, dest, len);
989 ao_ref_init_from_ptr_and_size (&srcr, src, len);
990 if (!refs_may_alias_p_1 (&destr, &srcr, false))
992 tree fn;
993 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
994 if (!fn)
995 return false;
996 gimple_call_set_fndecl (stmt, fn);
997 gimple_call_set_arg (stmt, 0, dest);
998 gimple_call_set_arg (stmt, 1, src);
999 fold_stmt (gsi);
1000 return true;
1004 return false;
1007 if (!tree_fits_shwi_p (len))
1008 return false;
1009 /* FIXME:
1010 This logic lose for arguments like (type *)malloc (sizeof (type)),
1011 since we strip the casts of up to VOID return value from malloc.
1012 Perhaps we ought to inherit type from non-VOID argument here? */
1013 STRIP_NOPS (src);
1014 STRIP_NOPS (dest);
1015 if (!POINTER_TYPE_P (TREE_TYPE (src))
1016 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1017 return false;
1018 /* In the following try to find a type that is most natural to be
1019 used for the memcpy source and destination and that allows
1020 the most optimization when memcpy is turned into a plain assignment
1021 using that type. In theory we could always use a char[len] type
1022 but that only gains us that the destination and source possibly
1023 no longer will have their address taken. */
1024 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1025 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1027 tree tem = TREE_OPERAND (src, 0);
1028 STRIP_NOPS (tem);
1029 if (tem != TREE_OPERAND (src, 0))
1030 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1032 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1034 tree tem = TREE_OPERAND (dest, 0);
1035 STRIP_NOPS (tem);
1036 if (tem != TREE_OPERAND (dest, 0))
1037 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1039 srctype = TREE_TYPE (TREE_TYPE (src));
1040 if (TREE_CODE (srctype) == ARRAY_TYPE
1041 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1043 srctype = TREE_TYPE (srctype);
1044 STRIP_NOPS (src);
1045 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1047 desttype = TREE_TYPE (TREE_TYPE (dest));
1048 if (TREE_CODE (desttype) == ARRAY_TYPE
1049 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1051 desttype = TREE_TYPE (desttype);
1052 STRIP_NOPS (dest);
1053 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1055 if (TREE_ADDRESSABLE (srctype)
1056 || TREE_ADDRESSABLE (desttype))
1057 return false;
1059 /* Make sure we are not copying using a floating-point mode or
1060 a type whose size possibly does not match its precision. */
1061 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1062 || TREE_CODE (desttype) == BOOLEAN_TYPE
1063 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1064 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1065 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1066 || TREE_CODE (srctype) == BOOLEAN_TYPE
1067 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1068 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1069 if (!srctype)
1070 srctype = desttype;
1071 if (!desttype)
1072 desttype = srctype;
1073 if (!srctype)
1074 return false;
1076 src_align = get_pointer_alignment (src);
1077 dest_align = get_pointer_alignment (dest);
1078 if (dest_align < TYPE_ALIGN (desttype)
1079 || src_align < TYPE_ALIGN (srctype))
1080 return false;
1082 destvar = dest;
1083 STRIP_NOPS (destvar);
1084 if (TREE_CODE (destvar) == ADDR_EXPR
1085 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1086 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1087 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1088 else
1089 destvar = NULL_TREE;
1091 srcvar = src;
1092 STRIP_NOPS (srcvar);
1093 if (TREE_CODE (srcvar) == ADDR_EXPR
1094 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1095 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1097 if (!destvar
1098 || src_align >= TYPE_ALIGN (desttype))
1099 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1100 srcvar, off0);
1101 else if (!STRICT_ALIGNMENT)
1103 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1104 src_align);
1105 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1107 else
1108 srcvar = NULL_TREE;
1110 else
1111 srcvar = NULL_TREE;
1113 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1114 return false;
1116 if (srcvar == NULL_TREE)
1118 STRIP_NOPS (src);
1119 if (src_align >= TYPE_ALIGN (desttype))
1120 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1121 else
1123 if (STRICT_ALIGNMENT)
1124 return false;
1125 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1126 src_align);
1127 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1130 else if (destvar == NULL_TREE)
1132 STRIP_NOPS (dest);
1133 if (dest_align >= TYPE_ALIGN (srctype))
1134 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1135 else
1137 if (STRICT_ALIGNMENT)
1138 return false;
1139 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1140 dest_align);
1141 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1145 gimple new_stmt;
1146 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1148 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1149 if (gimple_in_ssa_p (cfun))
1150 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1151 else
1152 srcvar = create_tmp_reg (TREE_TYPE (srcvar), NULL);
1153 gimple_assign_set_lhs (new_stmt, srcvar);
1154 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1155 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1157 new_stmt = gimple_build_assign (destvar, srcvar);
1158 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1159 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1160 if (gimple_vdef (new_stmt)
1161 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1162 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1163 if (!lhs)
1165 gsi_replace (gsi, new_stmt, true);
1166 return true;
1168 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1171 done:
1172 if (endp == 0 || endp == 3)
1173 len = NULL_TREE;
1174 else if (endp == 2)
1175 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1176 ssize_int (1));
1177 if (endp == 2 || endp == 1)
1178 dest = fold_build_pointer_plus_loc (loc, dest, len);
1180 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1181 GSI_SAME_STMT);
1182 gimple repl = gimple_build_assign (lhs, dest);
1183 gsi_replace (gsi, repl, true);
1184 return true;
1187 /* Fold function call to builtin memset or bzero at *GSI setting the
1188 memory of size LEN to VAL. Return whether a simplification was made. */
1190 static bool
1191 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1193 gimple stmt = gsi_stmt (*gsi);
1194 tree etype;
1195 unsigned HOST_WIDE_INT length, cval;
1197 /* If the LEN parameter is zero, return DEST. */
1198 if (integer_zerop (len))
1200 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1201 return true;
1204 if (! tree_fits_uhwi_p (len))
1205 return false;
1207 if (TREE_CODE (c) != INTEGER_CST)
1208 return false;
1210 tree dest = gimple_call_arg (stmt, 0);
1211 tree var = dest;
1212 if (TREE_CODE (var) != ADDR_EXPR)
1213 return false;
1215 var = TREE_OPERAND (var, 0);
1216 if (TREE_THIS_VOLATILE (var))
1217 return false;
1219 etype = TREE_TYPE (var);
1220 if (TREE_CODE (etype) == ARRAY_TYPE)
1221 etype = TREE_TYPE (etype);
1223 if (!INTEGRAL_TYPE_P (etype)
1224 && !POINTER_TYPE_P (etype))
1225 return NULL_TREE;
1227 if (! var_decl_component_p (var))
1228 return NULL_TREE;
1230 length = tree_to_uhwi (len);
1231 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1232 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1233 return NULL_TREE;
1235 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1236 return NULL_TREE;
1238 if (integer_zerop (c))
1239 cval = 0;
1240 else
1242 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1243 return NULL_TREE;
1245 cval = TREE_INT_CST_LOW (c);
1246 cval &= 0xff;
1247 cval |= cval << 8;
1248 cval |= cval << 16;
1249 cval |= (cval << 31) << 1;
1252 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1253 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1254 gimple_set_vuse (store, gimple_vuse (stmt));
1255 tree vdef = gimple_vdef (stmt);
1256 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1258 gimple_set_vdef (store, gimple_vdef (stmt));
1259 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1261 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1262 if (gimple_call_lhs (stmt))
1264 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1265 gsi_replace (gsi, asgn, true);
1267 else
1269 gimple_stmt_iterator gsi2 = *gsi;
1270 gsi_prev (gsi);
1271 gsi_remove (&gsi2, true);
1274 return true;
1278 /* Return the string length, maximum string length or maximum value of
1279 ARG in LENGTH.
1280 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1281 is not NULL and, for TYPE == 0, its value is not equal to the length
1282 we determine or if we are unable to determine the length or value,
1283 return false. VISITED is a bitmap of visited variables.
1284 TYPE is 0 if string length should be returned, 1 for maximum string
1285 length and 2 for maximum value ARG can have. */
1287 static bool
1288 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1290 tree var, val;
1291 gimple def_stmt;
1293 if (TREE_CODE (arg) != SSA_NAME)
1295 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1296 if (TREE_CODE (arg) == ADDR_EXPR
1297 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1298 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1300 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1301 if (TREE_CODE (aop0) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1303 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1304 length, visited, type);
1307 if (type == 2)
1309 val = arg;
1310 if (TREE_CODE (val) != INTEGER_CST
1311 || tree_int_cst_sgn (val) < 0)
1312 return false;
1314 else
1315 val = c_strlen (arg, 1);
1316 if (!val)
1317 return false;
1319 if (*length)
1321 if (type > 0)
1323 if (TREE_CODE (*length) != INTEGER_CST
1324 || TREE_CODE (val) != INTEGER_CST)
1325 return false;
1327 if (tree_int_cst_lt (*length, val))
1328 *length = val;
1329 return true;
1331 else if (simple_cst_equal (val, *length) != 1)
1332 return false;
1335 *length = val;
1336 return true;
1339 /* If ARG is registered for SSA update we cannot look at its defining
1340 statement. */
1341 if (name_registered_for_update_p (arg))
1342 return false;
1344 /* If we were already here, break the infinite cycle. */
1345 if (!*visited)
1346 *visited = BITMAP_ALLOC (NULL);
1347 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1348 return true;
1350 var = arg;
1351 def_stmt = SSA_NAME_DEF_STMT (var);
1353 switch (gimple_code (def_stmt))
1355 case GIMPLE_ASSIGN:
1356 /* The RHS of the statement defining VAR must either have a
1357 constant length or come from another SSA_NAME with a constant
1358 length. */
1359 if (gimple_assign_single_p (def_stmt)
1360 || gimple_assign_unary_nop_p (def_stmt))
1362 tree rhs = gimple_assign_rhs1 (def_stmt);
1363 return get_maxval_strlen (rhs, length, visited, type);
1365 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1367 tree op2 = gimple_assign_rhs2 (def_stmt);
1368 tree op3 = gimple_assign_rhs3 (def_stmt);
1369 return get_maxval_strlen (op2, length, visited, type)
1370 && get_maxval_strlen (op3, length, visited, type);
1372 return false;
1374 case GIMPLE_PHI:
1376 /* All the arguments of the PHI node must have the same constant
1377 length. */
1378 unsigned i;
1380 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1382 tree arg = gimple_phi_arg (def_stmt, i)->def;
1384 /* If this PHI has itself as an argument, we cannot
1385 determine the string length of this argument. However,
1386 if we can find a constant string length for the other
1387 PHI args then we can still be sure that this is a
1388 constant string length. So be optimistic and just
1389 continue with the next argument. */
1390 if (arg == gimple_phi_result (def_stmt))
1391 continue;
1393 if (!get_maxval_strlen (arg, length, visited, type))
1394 return false;
1397 return true;
1399 default:
1400 return false;
1404 tree
1405 get_maxval_strlen (tree arg, int type)
1407 bitmap visited = NULL;
1408 tree len = NULL_TREE;
1409 if (!get_maxval_strlen (arg, &len, &visited, type))
1410 len = NULL_TREE;
1411 if (visited)
1412 BITMAP_FREE (visited);
1414 return len;
1418 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1419 If LEN is not NULL, it represents the length of the string to be
1420 copied. Return NULL_TREE if no simplification can be made. */
1422 static bool
1423 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1424 tree dest, tree src)
1426 location_t loc = gimple_location (gsi_stmt (*gsi));
1427 tree fn;
1429 /* If SRC and DEST are the same (and not volatile), return DEST. */
1430 if (operand_equal_p (src, dest, 0))
1432 replace_call_with_value (gsi, dest);
1433 return true;
1436 if (optimize_function_for_size_p (cfun))
1437 return false;
1439 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1440 if (!fn)
1441 return false;
1443 tree len = get_maxval_strlen (src, 0);
1444 if (!len)
1445 return false;
1447 len = fold_convert_loc (loc, size_type_node, len);
1448 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1449 len = force_gimple_operand_gsi (gsi, len, true,
1450 NULL_TREE, true, GSI_SAME_STMT);
1451 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1452 replace_call_with_call_and_fold (gsi, repl);
1453 return true;
1456 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1457 If SLEN is not NULL, it represents the length of the source string.
1458 Return NULL_TREE if no simplification can be made. */
1460 static bool
1461 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1462 tree dest, tree src, tree len)
1464 location_t loc = gimple_location (gsi_stmt (*gsi));
1465 tree fn;
1467 /* If the LEN parameter is zero, return DEST. */
1468 if (integer_zerop (len))
1470 replace_call_with_value (gsi, dest);
1471 return true;
1474 /* We can't compare slen with len as constants below if len is not a
1475 constant. */
1476 if (TREE_CODE (len) != INTEGER_CST)
1477 return false;
1479 /* Now, we must be passed a constant src ptr parameter. */
1480 tree slen = get_maxval_strlen (src, 0);
1481 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1482 return false;
1484 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1486 /* We do not support simplification of this case, though we do
1487 support it when expanding trees into RTL. */
1488 /* FIXME: generate a call to __builtin_memset. */
1489 if (tree_int_cst_lt (slen, len))
1490 return false;
1492 /* OK transform into builtin memcpy. */
1493 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1494 if (!fn)
1495 return false;
1497 len = fold_convert_loc (loc, size_type_node, len);
1498 len = force_gimple_operand_gsi (gsi, len, true,
1499 NULL_TREE, true, GSI_SAME_STMT);
1500 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1501 replace_call_with_call_and_fold (gsi, repl);
1502 return true;
1505 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1506 to the call.
1508 Return NULL_TREE if no simplification was possible, otherwise return the
1509 simplified form of the call as a tree.
1511 The simplified form may be a constant or other expression which
1512 computes the same value, but in a more efficient manner (including
1513 calls to other builtin functions).
1515 The call may contain arguments which need to be evaluated, but
1516 which are not useful to determine the result of the call. In
1517 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1518 COMPOUND_EXPR will be an argument which must be evaluated.
1519 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1520 COMPOUND_EXPR in the chain will contain the tree for the simplified
1521 form of the builtin function call. */
1523 static bool
1524 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1526 gimple stmt = gsi_stmt (*gsi);
1527 location_t loc = gimple_location (stmt);
1529 const char *p = c_getstr (src);
1531 /* If the string length is zero, return the dst parameter. */
1532 if (p && *p == '\0')
1534 replace_call_with_value (gsi, dst);
1535 return true;
1538 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1539 return false;
1541 /* See if we can store by pieces into (dst + strlen(dst)). */
1542 tree newdst;
1543 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1544 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1546 if (!strlen_fn || !memcpy_fn)
1547 return false;
1549 /* If the length of the source string isn't computable don't
1550 split strcat into strlen and memcpy. */
1551 tree len = get_maxval_strlen (src, 0);
1552 if (! len)
1553 return false;
1555 /* Create strlen (dst). */
1556 gimple_seq stmts = NULL, stmts2;
1557 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1558 gimple_set_location (repl, loc);
1559 if (gimple_in_ssa_p (cfun))
1560 newdst = make_ssa_name (size_type_node, NULL);
1561 else
1562 newdst = create_tmp_reg (size_type_node, NULL);
1563 gimple_call_set_lhs (repl, newdst);
1564 gimple_seq_add_stmt_without_update (&stmts, repl);
1566 /* Create (dst p+ strlen (dst)). */
1567 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1568 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1569 gimple_seq_add_seq_without_update (&stmts, stmts2);
1571 len = fold_convert_loc (loc, size_type_node, len);
1572 len = size_binop_loc (loc, PLUS_EXPR, len,
1573 build_int_cst (size_type_node, 1));
1574 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1575 gimple_seq_add_seq_without_update (&stmts, stmts2);
1577 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1578 gimple_seq_add_stmt_without_update (&stmts, repl);
1579 if (gimple_call_lhs (stmt))
1581 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1582 gimple_seq_add_stmt_without_update (&stmts, repl);
1583 gsi_replace_with_seq_vops (gsi, stmts);
1584 /* gsi now points at the assignment to the lhs, get a
1585 stmt iterator to the memcpy call.
1586 ??? We can't use gsi_for_stmt as that doesn't work when the
1587 CFG isn't built yet. */
1588 gimple_stmt_iterator gsi2 = *gsi;
1589 gsi_prev (&gsi2);
1590 fold_stmt (&gsi2);
1592 else
1594 gsi_replace_with_seq_vops (gsi, stmts);
1595 fold_stmt (gsi);
1597 return true;
1600 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1601 are the arguments to the call. */
1603 static bool
1604 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1606 gimple stmt = gsi_stmt (*gsi);
1607 tree dest = gimple_call_arg (stmt, 0);
1608 tree src = gimple_call_arg (stmt, 1);
1609 tree size = gimple_call_arg (stmt, 2);
1610 tree fn;
1611 const char *p;
1614 p = c_getstr (src);
1615 /* If the SRC parameter is "", return DEST. */
1616 if (p && *p == '\0')
1618 replace_call_with_value (gsi, dest);
1619 return true;
1622 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1623 return false;
1625 /* If __builtin_strcat_chk is used, assume strcat is available. */
1626 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1627 if (!fn)
1628 return false;
1630 gimple repl = gimple_build_call (fn, 2, dest, src);
1631 replace_call_with_call_and_fold (gsi, repl);
1632 return true;
1635 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1636 to the call. IGNORE is true if the value returned
1637 by the builtin will be ignored. UNLOCKED is true is true if this
1638 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1639 the known length of the string. Return NULL_TREE if no simplification
1640 was possible. */
1642 static bool
1643 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1644 tree arg0, tree arg1,
1645 bool unlocked)
1647 gimple stmt = gsi_stmt (*gsi);
1649 /* If we're using an unlocked function, assume the other unlocked
1650 functions exist explicitly. */
1651 tree const fn_fputc = (unlocked
1652 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1653 : builtin_decl_implicit (BUILT_IN_FPUTC));
1654 tree const fn_fwrite = (unlocked
1655 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1656 : builtin_decl_implicit (BUILT_IN_FWRITE));
1658 /* If the return value is used, don't do the transformation. */
1659 if (gimple_call_lhs (stmt))
1660 return false;
1662 /* Get the length of the string passed to fputs. If the length
1663 can't be determined, punt. */
1664 tree len = get_maxval_strlen (arg0, 0);
1665 if (!len
1666 || TREE_CODE (len) != INTEGER_CST)
1667 return false;
1669 switch (compare_tree_int (len, 1))
1671 case -1: /* length is 0, delete the call entirely . */
1672 replace_call_with_value (gsi, integer_zero_node);
1673 return true;
1675 case 0: /* length is 1, call fputc. */
1677 const char *p = c_getstr (arg0);
1678 if (p != NULL)
1680 if (!fn_fputc)
1681 return false;
1683 gimple repl = gimple_build_call (fn_fputc, 2,
1684 build_int_cst
1685 (integer_type_node, p[0]), arg1);
1686 replace_call_with_call_and_fold (gsi, repl);
1687 return true;
1690 /* FALLTHROUGH */
1691 case 1: /* length is greater than 1, call fwrite. */
1693 /* If optimizing for size keep fputs. */
1694 if (optimize_function_for_size_p (cfun))
1695 return false;
1696 /* New argument list transforming fputs(string, stream) to
1697 fwrite(string, 1, len, stream). */
1698 if (!fn_fwrite)
1699 return false;
1701 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1702 size_one_node, len, arg1);
1703 replace_call_with_call_and_fold (gsi, repl);
1704 return true;
1706 default:
1707 gcc_unreachable ();
1709 return false;
1712 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1713 DEST, SRC, LEN, and SIZE are the arguments to the call.
1714 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1715 code of the builtin. If MAXLEN is not NULL, it is maximum length
1716 passed as third argument. */
1718 static bool
1719 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1720 tree dest, tree src, tree len, tree size,
1721 enum built_in_function fcode)
1723 gimple stmt = gsi_stmt (*gsi);
1724 location_t loc = gimple_location (stmt);
1725 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1726 tree fn;
1728 /* If SRC and DEST are the same (and not volatile), return DEST
1729 (resp. DEST+LEN for __mempcpy_chk). */
1730 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1732 if (fcode != BUILT_IN_MEMPCPY_CHK)
1734 replace_call_with_value (gsi, dest);
1735 return true;
1737 else
1739 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1740 temp = force_gimple_operand_gsi (gsi, temp,
1741 false, NULL_TREE, true,
1742 GSI_SAME_STMT);
1743 replace_call_with_value (gsi, temp);
1744 return true;
1748 if (! tree_fits_uhwi_p (size))
1749 return false;
1751 tree maxlen = get_maxval_strlen (len, 2);
1752 if (! integer_all_onesp (size))
1754 if (! tree_fits_uhwi_p (len))
1756 /* If LEN is not constant, try MAXLEN too.
1757 For MAXLEN only allow optimizing into non-_ocs function
1758 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1759 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1761 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1763 /* (void) __mempcpy_chk () can be optimized into
1764 (void) __memcpy_chk (). */
1765 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1766 if (!fn)
1767 return false;
1769 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1770 replace_call_with_call_and_fold (gsi, repl);
1771 return true;
1773 return false;
1776 else
1777 maxlen = len;
1779 if (tree_int_cst_lt (size, maxlen))
1780 return false;
1783 fn = NULL_TREE;
1784 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1785 mem{cpy,pcpy,move,set} is available. */
1786 switch (fcode)
1788 case BUILT_IN_MEMCPY_CHK:
1789 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1790 break;
1791 case BUILT_IN_MEMPCPY_CHK:
1792 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1793 break;
1794 case BUILT_IN_MEMMOVE_CHK:
1795 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1796 break;
1797 case BUILT_IN_MEMSET_CHK:
1798 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1799 break;
1800 default:
1801 break;
1804 if (!fn)
1805 return false;
1807 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1808 replace_call_with_call_and_fold (gsi, repl);
1809 return true;
1812 /* Fold a call to the __st[rp]cpy_chk builtin.
1813 DEST, SRC, and SIZE are the arguments to the call.
1814 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1815 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1816 strings passed as second argument. */
1818 static bool
1819 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1820 tree dest,
1821 tree src, tree size,
1822 enum built_in_function fcode)
1824 gimple stmt = gsi_stmt (*gsi);
1825 location_t loc = gimple_location (stmt);
1826 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1827 tree len, fn;
1829 /* If SRC and DEST are the same (and not volatile), return DEST. */
1830 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1832 replace_call_with_value (gsi, dest);
1833 return true;
1836 if (! tree_fits_uhwi_p (size))
1837 return false;
1839 tree maxlen = get_maxval_strlen (src, 1);
1840 if (! integer_all_onesp (size))
1842 len = c_strlen (src, 1);
1843 if (! len || ! tree_fits_uhwi_p (len))
1845 /* If LEN is not constant, try MAXLEN too.
1846 For MAXLEN only allow optimizing into non-_ocs function
1847 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1848 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1850 if (fcode == BUILT_IN_STPCPY_CHK)
1852 if (! ignore)
1853 return false;
1855 /* If return value of __stpcpy_chk is ignored,
1856 optimize into __strcpy_chk. */
1857 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1858 if (!fn)
1859 return false;
1861 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1862 replace_call_with_call_and_fold (gsi, repl);
1863 return true;
1866 if (! len || TREE_SIDE_EFFECTS (len))
1867 return false;
1869 /* If c_strlen returned something, but not a constant,
1870 transform __strcpy_chk into __memcpy_chk. */
1871 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1872 if (!fn)
1873 return false;
1875 len = fold_convert_loc (loc, size_type_node, len);
1876 len = size_binop_loc (loc, PLUS_EXPR, len,
1877 build_int_cst (size_type_node, 1));
1878 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1879 true, GSI_SAME_STMT);
1880 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1881 replace_call_with_call_and_fold (gsi, repl);
1882 return true;
1885 else
1886 maxlen = len;
1888 if (! tree_int_cst_lt (maxlen, size))
1889 return false;
1892 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1893 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1894 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1895 if (!fn)
1896 return false;
1898 gimple repl = gimple_build_call (fn, 2, dest, src);
1899 replace_call_with_call_and_fold (gsi, repl);
1900 return true;
1903 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1904 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1905 length passed as third argument. IGNORE is true if return value can be
1906 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1908 static bool
1909 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1910 tree dest, tree src,
1911 tree len, tree size,
1912 enum built_in_function fcode)
1914 gimple stmt = gsi_stmt (*gsi);
1915 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1916 tree fn;
1918 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1920 /* If return value of __stpncpy_chk is ignored,
1921 optimize into __strncpy_chk. */
1922 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1923 if (fn)
1925 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1926 replace_call_with_call_and_fold (gsi, repl);
1927 return true;
1931 if (! tree_fits_uhwi_p (size))
1932 return false;
1934 tree maxlen = get_maxval_strlen (len, 2);
1935 if (! integer_all_onesp (size))
1937 if (! tree_fits_uhwi_p (len))
1939 /* If LEN is not constant, try MAXLEN too.
1940 For MAXLEN only allow optimizing into non-_ocs function
1941 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1942 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1943 return false;
1945 else
1946 maxlen = len;
1948 if (tree_int_cst_lt (size, maxlen))
1949 return false;
1952 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1953 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1954 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1955 if (!fn)
1956 return false;
1958 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1959 replace_call_with_call_and_fold (gsi, repl);
1960 return true;
1963 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1964 NULL_TREE if a normal call should be emitted rather than expanding
1965 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1966 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1967 passed as second argument. */
1969 static bool
1970 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
1971 enum built_in_function fcode)
1973 gimple stmt = gsi_stmt (*gsi);
1974 tree dest, size, len, fn, fmt, flag;
1975 const char *fmt_str;
1977 /* Verify the required arguments in the original call. */
1978 if (gimple_call_num_args (stmt) < 5)
1979 return false;
1981 dest = gimple_call_arg (stmt, 0);
1982 len = gimple_call_arg (stmt, 1);
1983 flag = gimple_call_arg (stmt, 2);
1984 size = gimple_call_arg (stmt, 3);
1985 fmt = gimple_call_arg (stmt, 4);
1987 if (! tree_fits_uhwi_p (size))
1988 return false;
1990 if (! integer_all_onesp (size))
1992 tree maxlen = get_maxval_strlen (len, 2);
1993 if (! tree_fits_uhwi_p (len))
1995 /* If LEN is not constant, try MAXLEN too.
1996 For MAXLEN only allow optimizing into non-_ocs function
1997 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1998 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1999 return false;
2001 else
2002 maxlen = len;
2004 if (tree_int_cst_lt (size, maxlen))
2005 return false;
2008 if (!init_target_chars ())
2009 return false;
2011 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2012 or if format doesn't contain % chars or is "%s". */
2013 if (! integer_zerop (flag))
2015 fmt_str = c_getstr (fmt);
2016 if (fmt_str == NULL)
2017 return false;
2018 if (strchr (fmt_str, target_percent) != NULL
2019 && strcmp (fmt_str, target_percent_s))
2020 return false;
2023 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2024 available. */
2025 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2026 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2027 if (!fn)
2028 return false;
2030 /* Replace the called function and the first 5 argument by 3 retaining
2031 trailing varargs. */
2032 gimple_call_set_fndecl (stmt, fn);
2033 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2034 gimple_call_set_arg (stmt, 0, dest);
2035 gimple_call_set_arg (stmt, 1, len);
2036 gimple_call_set_arg (stmt, 2, fmt);
2037 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2038 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2039 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2040 fold_stmt (gsi);
2041 return true;
2044 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2045 Return NULL_TREE if a normal call should be emitted rather than
2046 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2047 or BUILT_IN_VSPRINTF_CHK. */
2049 static bool
2050 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2051 enum built_in_function fcode)
2053 gimple stmt = gsi_stmt (*gsi);
2054 tree dest, size, len, fn, fmt, flag;
2055 const char *fmt_str;
2056 unsigned nargs = gimple_call_num_args (stmt);
2058 /* Verify the required arguments in the original call. */
2059 if (nargs < 4)
2060 return false;
2061 dest = gimple_call_arg (stmt, 0);
2062 flag = gimple_call_arg (stmt, 1);
2063 size = gimple_call_arg (stmt, 2);
2064 fmt = gimple_call_arg (stmt, 3);
2066 if (! tree_fits_uhwi_p (size))
2067 return false;
2069 len = NULL_TREE;
2071 if (!init_target_chars ())
2072 return false;
2074 /* Check whether the format is a literal string constant. */
2075 fmt_str = c_getstr (fmt);
2076 if (fmt_str != NULL)
2078 /* If the format doesn't contain % args or %%, we know the size. */
2079 if (strchr (fmt_str, target_percent) == 0)
2081 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2082 len = build_int_cstu (size_type_node, strlen (fmt_str));
2084 /* If the format is "%s" and first ... argument is a string literal,
2085 we know the size too. */
2086 else if (fcode == BUILT_IN_SPRINTF_CHK
2087 && strcmp (fmt_str, target_percent_s) == 0)
2089 tree arg;
2091 if (nargs == 5)
2093 arg = gimple_call_arg (stmt, 4);
2094 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2096 len = c_strlen (arg, 1);
2097 if (! len || ! tree_fits_uhwi_p (len))
2098 len = NULL_TREE;
2104 if (! integer_all_onesp (size))
2106 if (! len || ! tree_int_cst_lt (len, size))
2107 return false;
2110 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2111 or if format doesn't contain % chars or is "%s". */
2112 if (! integer_zerop (flag))
2114 if (fmt_str == NULL)
2115 return false;
2116 if (strchr (fmt_str, target_percent) != NULL
2117 && strcmp (fmt_str, target_percent_s))
2118 return false;
2121 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2122 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2123 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2124 if (!fn)
2125 return false;
2127 /* Replace the called function and the first 4 argument by 2 retaining
2128 trailing varargs. */
2129 gimple_call_set_fndecl (stmt, fn);
2130 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2131 gimple_call_set_arg (stmt, 0, dest);
2132 gimple_call_set_arg (stmt, 1, fmt);
2133 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2134 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2135 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2136 fold_stmt (gsi);
2137 return true;
2140 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2141 ORIG may be null if this is a 2-argument call. We don't attempt to
2142 simplify calls with more than 3 arguments.
2144 Return NULL_TREE if no simplification was possible, otherwise return the
2145 simplified form of the call as a tree. If IGNORED is true, it means that
2146 the caller does not use the returned value of the function. */
2148 static bool
2149 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2151 gimple stmt = gsi_stmt (*gsi);
2152 tree dest = gimple_call_arg (stmt, 0);
2153 tree fmt = gimple_call_arg (stmt, 1);
2154 tree orig = NULL_TREE;
2155 const char *fmt_str = NULL;
2157 /* Verify the required arguments in the original call. We deal with two
2158 types of sprintf() calls: 'sprintf (str, fmt)' and
2159 'sprintf (dest, "%s", orig)'. */
2160 if (gimple_call_num_args (stmt) > 3)
2161 return false;
2163 if (gimple_call_num_args (stmt) == 3)
2164 orig = gimple_call_arg (stmt, 2);
2166 /* Check whether the format is a literal string constant. */
2167 fmt_str = c_getstr (fmt);
2168 if (fmt_str == NULL)
2169 return false;
2171 if (!init_target_chars ())
2172 return false;
2174 /* If the format doesn't contain % args or %%, use strcpy. */
2175 if (strchr (fmt_str, target_percent) == NULL)
2177 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2179 if (!fn)
2180 return false;
2182 /* Don't optimize sprintf (buf, "abc", ptr++). */
2183 if (orig)
2184 return false;
2186 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2187 'format' is known to contain no % formats. */
2188 gimple_seq stmts = NULL;
2189 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2190 gimple_seq_add_stmt_without_update (&stmts, repl);
2191 if (gimple_call_lhs (stmt))
2193 repl = gimple_build_assign (gimple_call_lhs (stmt),
2194 build_int_cst (integer_type_node,
2195 strlen (fmt_str)));
2196 gimple_seq_add_stmt_without_update (&stmts, repl);
2197 gsi_replace_with_seq_vops (gsi, stmts);
2198 /* gsi now points at the assignment to the lhs, get a
2199 stmt iterator to the memcpy call.
2200 ??? We can't use gsi_for_stmt as that doesn't work when the
2201 CFG isn't built yet. */
2202 gimple_stmt_iterator gsi2 = *gsi;
2203 gsi_prev (&gsi2);
2204 fold_stmt (&gsi2);
2206 else
2208 gsi_replace_with_seq_vops (gsi, stmts);
2209 fold_stmt (gsi);
2211 return true;
2214 /* If the format is "%s", use strcpy if the result isn't used. */
2215 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2217 tree fn;
2218 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2220 if (!fn)
2221 return false;
2223 /* Don't crash on sprintf (str1, "%s"). */
2224 if (!orig)
2225 return false;
2227 tree orig_len = NULL_TREE;
2228 if (gimple_call_lhs (stmt))
2230 orig_len = get_maxval_strlen (orig, 0);
2231 if (!orig_len)
2232 return false;
2235 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2236 gimple_seq stmts = NULL;
2237 gimple repl = gimple_build_call (fn, 2, dest, orig);
2238 gimple_seq_add_stmt_without_update (&stmts, repl);
2239 if (gimple_call_lhs (stmt))
2241 if (!useless_type_conversion_p (integer_type_node,
2242 TREE_TYPE (orig_len)))
2243 orig_len = fold_convert (integer_type_node, orig_len);
2244 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2245 gimple_seq_add_stmt_without_update (&stmts, repl);
2246 gsi_replace_with_seq_vops (gsi, stmts);
2247 /* gsi now points at the assignment to the lhs, get a
2248 stmt iterator to the memcpy call.
2249 ??? We can't use gsi_for_stmt as that doesn't work when the
2250 CFG isn't built yet. */
2251 gimple_stmt_iterator gsi2 = *gsi;
2252 gsi_prev (&gsi2);
2253 fold_stmt (&gsi2);
2255 else
2257 gsi_replace_with_seq_vops (gsi, stmts);
2258 fold_stmt (gsi);
2260 return true;
2262 return false;
2265 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2266 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2267 attempt to simplify calls with more than 4 arguments.
2269 Return NULL_TREE if no simplification was possible, otherwise return the
2270 simplified form of the call as a tree. If IGNORED is true, it means that
2271 the caller does not use the returned value of the function. */
2273 static bool
2274 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2276 gimple stmt = gsi_stmt (*gsi);
2277 tree dest = gimple_call_arg (stmt, 0);
2278 tree destsize = gimple_call_arg (stmt, 1);
2279 tree fmt = gimple_call_arg (stmt, 2);
2280 tree orig = NULL_TREE;
2281 const char *fmt_str = NULL;
2283 if (gimple_call_num_args (stmt) > 4)
2284 return false;
2286 if (gimple_call_num_args (stmt) == 4)
2287 orig = gimple_call_arg (stmt, 3);
2289 if (!tree_fits_uhwi_p (destsize))
2290 return false;
2291 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2293 /* Check whether the format is a literal string constant. */
2294 fmt_str = c_getstr (fmt);
2295 if (fmt_str == NULL)
2296 return false;
2298 if (!init_target_chars ())
2299 return false;
2301 /* If the format doesn't contain % args or %%, use strcpy. */
2302 if (strchr (fmt_str, target_percent) == NULL)
2304 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2305 if (!fn)
2306 return false;
2308 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2309 if (orig)
2310 return false;
2312 /* We could expand this as
2313 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2314 or to
2315 memcpy (str, fmt_with_nul_at_cstm1, cst);
2316 but in the former case that might increase code size
2317 and in the latter case grow .rodata section too much.
2318 So punt for now. */
2319 size_t len = strlen (fmt_str);
2320 if (len >= destlen)
2321 return false;
2323 gimple_seq stmts = NULL;
2324 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2325 gimple_seq_add_stmt_without_update (&stmts, repl);
2326 if (gimple_call_lhs (stmt))
2328 repl = gimple_build_assign (gimple_call_lhs (stmt),
2329 build_int_cst (integer_type_node, len));
2330 gimple_seq_add_stmt_without_update (&stmts, repl);
2331 gsi_replace_with_seq_vops (gsi, stmts);
2332 /* gsi now points at the assignment to the lhs, get a
2333 stmt iterator to the memcpy call.
2334 ??? We can't use gsi_for_stmt as that doesn't work when the
2335 CFG isn't built yet. */
2336 gimple_stmt_iterator gsi2 = *gsi;
2337 gsi_prev (&gsi2);
2338 fold_stmt (&gsi2);
2340 else
2342 gsi_replace_with_seq_vops (gsi, stmts);
2343 fold_stmt (gsi);
2345 return true;
2348 /* If the format is "%s", use strcpy if the result isn't used. */
2349 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2351 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2352 if (!fn)
2353 return false;
2355 /* Don't crash on snprintf (str1, cst, "%s"). */
2356 if (!orig)
2357 return false;
2359 tree orig_len = get_maxval_strlen (orig, 0);
2360 if (!orig_len)
2361 return false;
2363 /* We could expand this as
2364 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2365 or to
2366 memcpy (str1, str2_with_nul_at_cstm1, cst);
2367 but in the former case that might increase code size
2368 and in the latter case grow .rodata section too much.
2369 So punt for now. */
2370 if (compare_tree_int (orig_len, destlen) >= 0)
2371 return false;
2373 /* Convert snprintf (str1, cst, "%s", str2) into
2374 strcpy (str1, str2) if strlen (str2) < cst. */
2375 gimple_seq stmts = NULL;
2376 gimple repl = gimple_build_call (fn, 2, dest, orig);
2377 gimple_seq_add_stmt_without_update (&stmts, repl);
2378 if (gimple_call_lhs (stmt))
2380 if (!useless_type_conversion_p (integer_type_node,
2381 TREE_TYPE (orig_len)))
2382 orig_len = fold_convert (integer_type_node, orig_len);
2383 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2384 gimple_seq_add_stmt_without_update (&stmts, repl);
2385 gsi_replace_with_seq_vops (gsi, stmts);
2386 /* gsi now points at the assignment to the lhs, get a
2387 stmt iterator to the memcpy call.
2388 ??? We can't use gsi_for_stmt as that doesn't work when the
2389 CFG isn't built yet. */
2390 gimple_stmt_iterator gsi2 = *gsi;
2391 gsi_prev (&gsi2);
2392 fold_stmt (&gsi2);
2394 else
2396 gsi_replace_with_seq_vops (gsi, stmts);
2397 fold_stmt (gsi);
2399 return true;
2401 return false;
2405 /* Fold a call to __builtin_strlen with known length LEN. */
2407 static bool
2408 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2410 gimple stmt = gsi_stmt (*gsi);
2411 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2412 if (!len)
2413 return false;
2414 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2415 replace_call_with_value (gsi, len);
2416 return true;
2420 /* Fold the non-target builtin at *GSI and return whether any simplification
2421 was made. */
2423 static bool
2424 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2426 gimple stmt = gsi_stmt (*gsi);
2427 tree callee = gimple_call_fndecl (stmt);
2429 /* Give up for always_inline inline builtins until they are
2430 inlined. */
2431 if (avoid_folding_inline_builtin (callee))
2432 return false;
2434 switch (DECL_FUNCTION_CODE (callee))
2436 case BUILT_IN_BZERO:
2437 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2438 gimple_call_arg (stmt, 1));
2439 case BUILT_IN_MEMSET:
2440 return gimple_fold_builtin_memset (gsi,
2441 gimple_call_arg (stmt, 1),
2442 gimple_call_arg (stmt, 2));
2443 case BUILT_IN_BCOPY:
2444 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2445 gimple_call_arg (stmt, 0), 3);
2446 case BUILT_IN_MEMCPY:
2447 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2448 gimple_call_arg (stmt, 1), 0);
2449 case BUILT_IN_MEMPCPY:
2450 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2451 gimple_call_arg (stmt, 1), 1);
2452 case BUILT_IN_MEMMOVE:
2453 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2454 gimple_call_arg (stmt, 1), 3);
2455 case BUILT_IN_SPRINTF_CHK:
2456 case BUILT_IN_VSPRINTF_CHK:
2457 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2458 case BUILT_IN_STRCAT_CHK:
2459 return gimple_fold_builtin_strcat_chk (gsi);
2460 case BUILT_IN_STRLEN:
2461 return gimple_fold_builtin_strlen (gsi);
2462 case BUILT_IN_STRCPY:
2463 return gimple_fold_builtin_strcpy (gsi,
2464 gimple_call_arg (stmt, 0),
2465 gimple_call_arg (stmt, 1));
2466 case BUILT_IN_STRNCPY:
2467 return gimple_fold_builtin_strncpy (gsi,
2468 gimple_call_arg (stmt, 0),
2469 gimple_call_arg (stmt, 1),
2470 gimple_call_arg (stmt, 2));
2471 case BUILT_IN_STRCAT:
2472 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2473 gimple_call_arg (stmt, 1));
2474 case BUILT_IN_FPUTS:
2475 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2476 gimple_call_arg (stmt, 1), false);
2477 case BUILT_IN_FPUTS_UNLOCKED:
2478 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2479 gimple_call_arg (stmt, 1), true);
2480 case BUILT_IN_MEMCPY_CHK:
2481 case BUILT_IN_MEMPCPY_CHK:
2482 case BUILT_IN_MEMMOVE_CHK:
2483 case BUILT_IN_MEMSET_CHK:
2484 return gimple_fold_builtin_memory_chk (gsi,
2485 gimple_call_arg (stmt, 0),
2486 gimple_call_arg (stmt, 1),
2487 gimple_call_arg (stmt, 2),
2488 gimple_call_arg (stmt, 3),
2489 DECL_FUNCTION_CODE (callee));
2490 case BUILT_IN_STRCPY_CHK:
2491 case BUILT_IN_STPCPY_CHK:
2492 return gimple_fold_builtin_stxcpy_chk (gsi,
2493 gimple_call_arg (stmt, 0),
2494 gimple_call_arg (stmt, 1),
2495 gimple_call_arg (stmt, 2),
2496 DECL_FUNCTION_CODE (callee));
2497 case BUILT_IN_STRNCPY_CHK:
2498 case BUILT_IN_STPNCPY_CHK:
2499 return gimple_fold_builtin_stxncpy_chk (gsi,
2500 gimple_call_arg (stmt, 0),
2501 gimple_call_arg (stmt, 1),
2502 gimple_call_arg (stmt, 2),
2503 gimple_call_arg (stmt, 3),
2504 DECL_FUNCTION_CODE (callee));
2505 case BUILT_IN_SNPRINTF_CHK:
2506 case BUILT_IN_VSNPRINTF_CHK:
2507 return gimple_fold_builtin_snprintf_chk (gsi,
2508 DECL_FUNCTION_CODE (callee));
2509 case BUILT_IN_SNPRINTF:
2510 return gimple_fold_builtin_snprintf (gsi);
2511 case BUILT_IN_SPRINTF:
2512 return gimple_fold_builtin_sprintf (gsi);
2513 default:;
2516 /* Try the generic builtin folder. */
2517 bool ignore = (gimple_call_lhs (stmt) == NULL);
2518 tree result = fold_call_stmt (stmt, ignore);
2519 if (result)
2521 if (ignore)
2522 STRIP_NOPS (result);
2523 else
2524 result = fold_convert (gimple_call_return_type (stmt), result);
2525 if (!update_call_from_tree (gsi, result))
2526 gimplify_and_update_call_from_tree (gsi, result);
2527 return true;
2530 return false;
2533 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2534 The statement may be replaced by another statement, e.g., if the call
2535 simplifies to a constant value. Return true if any changes were made.
2536 It is assumed that the operands have been previously folded. */
2538 static bool
2539 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2541 gimple stmt = gsi_stmt (*gsi);
2542 tree callee;
2543 bool changed = false;
2544 unsigned i;
2546 /* Fold *& in call arguments. */
2547 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2548 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2550 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2551 if (tmp)
2553 gimple_call_set_arg (stmt, i, tmp);
2554 changed = true;
2558 /* Check for virtual calls that became direct calls. */
2559 callee = gimple_call_fn (stmt);
2560 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2562 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2564 if (dump_file && virtual_method_call_p (callee)
2565 && !possible_polymorphic_call_target_p
2566 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2567 (OBJ_TYPE_REF_EXPR (callee)))))
2569 fprintf (dump_file,
2570 "Type inheritance inconsistent devirtualization of ");
2571 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2572 fprintf (dump_file, " to ");
2573 print_generic_expr (dump_file, callee, TDF_SLIM);
2574 fprintf (dump_file, "\n");
2577 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
2578 changed = true;
2580 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
2582 bool final;
2583 vec <cgraph_node *>targets
2584 = possible_polymorphic_call_targets (callee, stmt, &final);
2585 if (final && targets.length () <= 1 && dbg_cnt (devirt))
2587 tree lhs = gimple_call_lhs (stmt);
2588 if (dump_enabled_p ())
2590 location_t loc = gimple_location_safe (stmt);
2591 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2592 "folding virtual function call to %s\n",
2593 targets.length () == 1
2594 ? targets[0]->name ()
2595 : "__builtin_unreachable");
2597 if (targets.length () == 1)
2599 gimple_call_set_fndecl (stmt, targets[0]->decl);
2600 changed = true;
2601 /* If the call becomes noreturn, remove the lhs. */
2602 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2604 if (TREE_CODE (lhs) == SSA_NAME)
2606 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2607 tree def = get_or_create_ssa_default_def (cfun, var);
2608 gimple new_stmt = gimple_build_assign (lhs, def);
2609 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2611 gimple_call_set_lhs (stmt, NULL_TREE);
2614 else
2616 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2617 gimple new_stmt = gimple_build_call (fndecl, 0);
2618 gimple_set_location (new_stmt, gimple_location (stmt));
2619 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2621 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2622 tree def = get_or_create_ssa_default_def (cfun, var);
2624 /* To satisfy condition for
2625 cgraph_update_edges_for_call_stmt_node,
2626 we need to preserve GIMPLE_CALL statement
2627 at position of GSI iterator. */
2628 update_call_from_tree (gsi, def);
2629 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
2631 else
2632 gsi_replace (gsi, new_stmt, true);
2633 return true;
2639 if (inplace)
2640 return changed;
2642 /* Check for builtins that CCP can handle using information not
2643 available in the generic fold routines. */
2644 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2646 if (gimple_fold_builtin (gsi))
2647 changed = true;
2649 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
2651 changed |= targetm.gimple_fold_builtin (gsi);
2653 else if (gimple_call_internal_p (stmt))
2655 enum tree_code subcode = ERROR_MARK;
2656 tree result = NULL_TREE;
2657 switch (gimple_call_internal_fn (stmt))
2659 case IFN_BUILTIN_EXPECT:
2660 result = fold_builtin_expect (gimple_location (stmt),
2661 gimple_call_arg (stmt, 0),
2662 gimple_call_arg (stmt, 1),
2663 gimple_call_arg (stmt, 2));
2664 break;
2665 case IFN_UBSAN_CHECK_ADD:
2666 subcode = PLUS_EXPR;
2667 break;
2668 case IFN_UBSAN_CHECK_SUB:
2669 subcode = MINUS_EXPR;
2670 break;
2671 case IFN_UBSAN_CHECK_MUL:
2672 subcode = MULT_EXPR;
2673 break;
2674 default:
2675 break;
2677 if (subcode != ERROR_MARK)
2679 tree arg0 = gimple_call_arg (stmt, 0);
2680 tree arg1 = gimple_call_arg (stmt, 1);
2681 /* x = y + 0; x = y - 0; x = y * 0; */
2682 if (integer_zerop (arg1))
2683 result = subcode == MULT_EXPR
2684 ? build_zero_cst (TREE_TYPE (arg0))
2685 : arg0;
2686 /* x = 0 + y; x = 0 * y; */
2687 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
2688 result = subcode == MULT_EXPR
2689 ? build_zero_cst (TREE_TYPE (arg0))
2690 : arg1;
2691 /* x = y - y; */
2692 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
2693 result = build_zero_cst (TREE_TYPE (arg0));
2694 /* x = y * 1; x = 1 * y; */
2695 else if (subcode == MULT_EXPR)
2697 if (integer_onep (arg1))
2698 result = arg0;
2699 else if (integer_onep (arg0))
2700 result = arg1;
2703 if (result)
2705 if (!update_call_from_tree (gsi, result))
2706 gimplify_and_update_call_from_tree (gsi, result);
2707 changed = true;
2711 return changed;
2714 /* Canonicalize MEM_REFs invariant address operand after propagation. */
2716 static bool
2717 maybe_canonicalize_mem_ref_addr (tree *t)
2719 bool res = false;
2721 if (TREE_CODE (*t) == ADDR_EXPR)
2722 t = &TREE_OPERAND (*t, 0);
2724 while (handled_component_p (*t))
2725 t = &TREE_OPERAND (*t, 0);
2727 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
2728 of invariant addresses into a SSA name MEM_REF address. */
2729 if (TREE_CODE (*t) == MEM_REF
2730 || TREE_CODE (*t) == TARGET_MEM_REF)
2732 tree addr = TREE_OPERAND (*t, 0);
2733 if (TREE_CODE (addr) == ADDR_EXPR
2734 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
2735 || handled_component_p (TREE_OPERAND (addr, 0))))
2737 tree base;
2738 HOST_WIDE_INT coffset;
2739 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
2740 &coffset);
2741 if (!base)
2742 gcc_unreachable ();
2744 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
2745 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
2746 TREE_OPERAND (*t, 1),
2747 size_int (coffset));
2748 res = true;
2750 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
2751 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
2754 /* Canonicalize back MEM_REFs to plain reference trees if the object
2755 accessed is a decl that has the same access semantics as the MEM_REF. */
2756 if (TREE_CODE (*t) == MEM_REF
2757 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
2758 && integer_zerop (TREE_OPERAND (*t, 1)))
2760 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2761 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
2762 if (/* Same volatile qualification. */
2763 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
2764 /* Same TBAA behavior with -fstrict-aliasing. */
2765 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
2766 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
2767 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
2768 /* Same alignment. */
2769 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
2770 /* We have to look out here to not drop a required conversion
2771 from the rhs to the lhs if *t appears on the lhs or vice-versa
2772 if it appears on the rhs. Thus require strict type
2773 compatibility. */
2774 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
2776 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2777 res = true;
2781 /* Canonicalize TARGET_MEM_REF in particular with respect to
2782 the indexes becoming constant. */
2783 else if (TREE_CODE (*t) == TARGET_MEM_REF)
2785 tree tem = maybe_fold_tmr (*t);
2786 if (tem)
2788 *t = tem;
2789 res = true;
2793 return res;
2796 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2797 distinguishes both cases. */
2799 static bool
2800 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
2802 bool changed = false;
2803 gimple stmt = gsi_stmt (*gsi);
2804 unsigned i;
2806 /* First do required canonicalization of [TARGET_]MEM_REF addresses
2807 after propagation.
2808 ??? This shouldn't be done in generic folding but in the
2809 propagation helpers which also know whether an address was
2810 propagated. */
2811 switch (gimple_code (stmt))
2813 case GIMPLE_ASSIGN:
2814 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
2816 tree *rhs = gimple_assign_rhs1_ptr (stmt);
2817 if ((REFERENCE_CLASS_P (*rhs)
2818 || TREE_CODE (*rhs) == ADDR_EXPR)
2819 && maybe_canonicalize_mem_ref_addr (rhs))
2820 changed = true;
2821 tree *lhs = gimple_assign_lhs_ptr (stmt);
2822 if (REFERENCE_CLASS_P (*lhs)
2823 && maybe_canonicalize_mem_ref_addr (lhs))
2824 changed = true;
2826 break;
2827 case GIMPLE_CALL:
2829 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2831 tree *arg = gimple_call_arg_ptr (stmt, i);
2832 if (REFERENCE_CLASS_P (*arg)
2833 && maybe_canonicalize_mem_ref_addr (arg))
2834 changed = true;
2836 tree *lhs = gimple_call_lhs_ptr (stmt);
2837 if (*lhs
2838 && REFERENCE_CLASS_P (*lhs)
2839 && maybe_canonicalize_mem_ref_addr (lhs))
2840 changed = true;
2841 break;
2843 case GIMPLE_ASM:
2845 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2847 tree link = gimple_asm_output_op (stmt, i);
2848 tree op = TREE_VALUE (link);
2849 if (REFERENCE_CLASS_P (op)
2850 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
2851 changed = true;
2853 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2855 tree link = gimple_asm_input_op (stmt, i);
2856 tree op = TREE_VALUE (link);
2857 if ((REFERENCE_CLASS_P (op)
2858 || TREE_CODE (op) == ADDR_EXPR)
2859 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
2860 changed = true;
2863 break;
2864 case GIMPLE_DEBUG:
2865 if (gimple_debug_bind_p (stmt))
2867 tree *val = gimple_debug_bind_get_value_ptr (stmt);
2868 if (*val
2869 && (REFERENCE_CLASS_P (*val)
2870 || TREE_CODE (*val) == ADDR_EXPR)
2871 && maybe_canonicalize_mem_ref_addr (val))
2872 changed = true;
2874 break;
2875 default:;
2878 /* Fold the main computation performed by the statement. */
2879 switch (gimple_code (stmt))
2881 case GIMPLE_ASSIGN:
2883 unsigned old_num_ops = gimple_num_ops (stmt);
2884 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2885 tree lhs = gimple_assign_lhs (stmt);
2886 tree new_rhs;
2887 /* First canonicalize operand order. This avoids building new
2888 trees if this is the only thing fold would later do. */
2889 if ((commutative_tree_code (subcode)
2890 || commutative_ternary_tree_code (subcode))
2891 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2892 gimple_assign_rhs2 (stmt), false))
2894 tree tem = gimple_assign_rhs1 (stmt);
2895 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
2896 gimple_assign_set_rhs2 (stmt, tem);
2897 changed = true;
2899 new_rhs = fold_gimple_assign (gsi);
2900 if (new_rhs
2901 && !useless_type_conversion_p (TREE_TYPE (lhs),
2902 TREE_TYPE (new_rhs)))
2903 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2904 if (new_rhs
2905 && (!inplace
2906 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
2908 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2909 changed = true;
2911 break;
2914 case GIMPLE_COND:
2915 changed |= fold_gimple_cond (stmt);
2916 break;
2918 case GIMPLE_CALL:
2919 changed |= gimple_fold_call (gsi, inplace);
2920 break;
2922 case GIMPLE_ASM:
2923 /* Fold *& in asm operands. */
2925 size_t noutputs;
2926 const char **oconstraints;
2927 const char *constraint;
2928 bool allows_mem, allows_reg;
2930 noutputs = gimple_asm_noutputs (stmt);
2931 oconstraints = XALLOCAVEC (const char *, noutputs);
2933 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2935 tree link = gimple_asm_output_op (stmt, i);
2936 tree op = TREE_VALUE (link);
2937 oconstraints[i]
2938 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
2939 if (REFERENCE_CLASS_P (op)
2940 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
2942 TREE_VALUE (link) = op;
2943 changed = true;
2946 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2948 tree link = gimple_asm_input_op (stmt, i);
2949 tree op = TREE_VALUE (link);
2950 constraint
2951 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
2952 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
2953 oconstraints, &allows_mem, &allows_reg);
2954 if (REFERENCE_CLASS_P (op)
2955 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
2956 != NULL_TREE)
2958 TREE_VALUE (link) = op;
2959 changed = true;
2963 break;
2965 case GIMPLE_DEBUG:
2966 if (gimple_debug_bind_p (stmt))
2968 tree val = gimple_debug_bind_get_value (stmt);
2969 if (val
2970 && REFERENCE_CLASS_P (val))
2972 tree tem = maybe_fold_reference (val, false);
2973 if (tem)
2975 gimple_debug_bind_set_value (stmt, tem);
2976 changed = true;
2979 else if (val
2980 && TREE_CODE (val) == ADDR_EXPR)
2982 tree ref = TREE_OPERAND (val, 0);
2983 tree tem = maybe_fold_reference (ref, false);
2984 if (tem)
2986 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
2987 gimple_debug_bind_set_value (stmt, tem);
2988 changed = true;
2992 break;
2994 default:;
2997 stmt = gsi_stmt (*gsi);
2999 /* Fold *& on the lhs. */
3000 if (gimple_has_lhs (stmt))
3002 tree lhs = gimple_get_lhs (stmt);
3003 if (lhs && REFERENCE_CLASS_P (lhs))
3005 tree new_lhs = maybe_fold_reference (lhs, true);
3006 if (new_lhs)
3008 gimple_set_lhs (stmt, new_lhs);
3009 changed = true;
3014 return changed;
3017 /* Fold the statement pointed to by GSI. In some cases, this function may
3018 replace the whole statement with a new one. Returns true iff folding
3019 makes any changes.
3020 The statement pointed to by GSI should be in valid gimple form but may
3021 be in unfolded state as resulting from for example constant propagation
3022 which can produce *&x = 0. */
3024 bool
3025 fold_stmt (gimple_stmt_iterator *gsi)
3027 return fold_stmt_1 (gsi, false);
3030 /* Perform the minimal folding on statement *GSI. Only operations like
3031 *&x created by constant propagation are handled. The statement cannot
3032 be replaced with a new one. Return true if the statement was
3033 changed, false otherwise.
3034 The statement *GSI should be in valid gimple form but may
3035 be in unfolded state as resulting from for example constant propagation
3036 which can produce *&x = 0. */
3038 bool
3039 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3041 gimple stmt = gsi_stmt (*gsi);
3042 bool changed = fold_stmt_1 (gsi, true);
3043 gcc_assert (gsi_stmt (*gsi) == stmt);
3044 return changed;
3047 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3048 if EXPR is null or we don't know how.
3049 If non-null, the result always has boolean type. */
3051 static tree
3052 canonicalize_bool (tree expr, bool invert)
3054 if (!expr)
3055 return NULL_TREE;
3056 else if (invert)
3058 if (integer_nonzerop (expr))
3059 return boolean_false_node;
3060 else if (integer_zerop (expr))
3061 return boolean_true_node;
3062 else if (TREE_CODE (expr) == SSA_NAME)
3063 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3064 build_int_cst (TREE_TYPE (expr), 0));
3065 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3066 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3067 boolean_type_node,
3068 TREE_OPERAND (expr, 0),
3069 TREE_OPERAND (expr, 1));
3070 else
3071 return NULL_TREE;
3073 else
3075 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3076 return expr;
3077 if (integer_nonzerop (expr))
3078 return boolean_true_node;
3079 else if (integer_zerop (expr))
3080 return boolean_false_node;
3081 else if (TREE_CODE (expr) == SSA_NAME)
3082 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3083 build_int_cst (TREE_TYPE (expr), 0));
3084 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3085 return fold_build2 (TREE_CODE (expr),
3086 boolean_type_node,
3087 TREE_OPERAND (expr, 0),
3088 TREE_OPERAND (expr, 1));
3089 else
3090 return NULL_TREE;
3094 /* Check to see if a boolean expression EXPR is logically equivalent to the
3095 comparison (OP1 CODE OP2). Check for various identities involving
3096 SSA_NAMEs. */
3098 static bool
3099 same_bool_comparison_p (const_tree expr, enum tree_code code,
3100 const_tree op1, const_tree op2)
3102 gimple s;
3104 /* The obvious case. */
3105 if (TREE_CODE (expr) == code
3106 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3107 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3108 return true;
3110 /* Check for comparing (name, name != 0) and the case where expr
3111 is an SSA_NAME with a definition matching the comparison. */
3112 if (TREE_CODE (expr) == SSA_NAME
3113 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3115 if (operand_equal_p (expr, op1, 0))
3116 return ((code == NE_EXPR && integer_zerop (op2))
3117 || (code == EQ_EXPR && integer_nonzerop (op2)));
3118 s = SSA_NAME_DEF_STMT (expr);
3119 if (is_gimple_assign (s)
3120 && gimple_assign_rhs_code (s) == code
3121 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3122 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3123 return true;
3126 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3127 of name is a comparison, recurse. */
3128 if (TREE_CODE (op1) == SSA_NAME
3129 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3131 s = SSA_NAME_DEF_STMT (op1);
3132 if (is_gimple_assign (s)
3133 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3135 enum tree_code c = gimple_assign_rhs_code (s);
3136 if ((c == NE_EXPR && integer_zerop (op2))
3137 || (c == EQ_EXPR && integer_nonzerop (op2)))
3138 return same_bool_comparison_p (expr, c,
3139 gimple_assign_rhs1 (s),
3140 gimple_assign_rhs2 (s));
3141 if ((c == EQ_EXPR && integer_zerop (op2))
3142 || (c == NE_EXPR && integer_nonzerop (op2)))
3143 return same_bool_comparison_p (expr,
3144 invert_tree_comparison (c, false),
3145 gimple_assign_rhs1 (s),
3146 gimple_assign_rhs2 (s));
3149 return false;
3152 /* Check to see if two boolean expressions OP1 and OP2 are logically
3153 equivalent. */
3155 static bool
3156 same_bool_result_p (const_tree op1, const_tree op2)
3158 /* Simple cases first. */
3159 if (operand_equal_p (op1, op2, 0))
3160 return true;
3162 /* Check the cases where at least one of the operands is a comparison.
3163 These are a bit smarter than operand_equal_p in that they apply some
3164 identifies on SSA_NAMEs. */
3165 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3166 && same_bool_comparison_p (op1, TREE_CODE (op2),
3167 TREE_OPERAND (op2, 0),
3168 TREE_OPERAND (op2, 1)))
3169 return true;
3170 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3171 && same_bool_comparison_p (op2, TREE_CODE (op1),
3172 TREE_OPERAND (op1, 0),
3173 TREE_OPERAND (op1, 1)))
3174 return true;
3176 /* Default case. */
3177 return false;
3180 /* Forward declarations for some mutually recursive functions. */
3182 static tree
3183 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3184 enum tree_code code2, tree op2a, tree op2b);
3185 static tree
3186 and_var_with_comparison (tree var, bool invert,
3187 enum tree_code code2, tree op2a, tree op2b);
3188 static tree
3189 and_var_with_comparison_1 (gimple stmt,
3190 enum tree_code code2, tree op2a, tree op2b);
3191 static tree
3192 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3193 enum tree_code code2, tree op2a, tree op2b);
3194 static tree
3195 or_var_with_comparison (tree var, bool invert,
3196 enum tree_code code2, tree op2a, tree op2b);
3197 static tree
3198 or_var_with_comparison_1 (gimple stmt,
3199 enum tree_code code2, tree op2a, tree op2b);
3201 /* Helper function for and_comparisons_1: try to simplify the AND of the
3202 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3203 If INVERT is true, invert the value of the VAR before doing the AND.
3204 Return NULL_EXPR if we can't simplify this to a single expression. */
3206 static tree
3207 and_var_with_comparison (tree var, bool invert,
3208 enum tree_code code2, tree op2a, tree op2b)
3210 tree t;
3211 gimple stmt = SSA_NAME_DEF_STMT (var);
3213 /* We can only deal with variables whose definitions are assignments. */
3214 if (!is_gimple_assign (stmt))
3215 return NULL_TREE;
3217 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3218 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3219 Then we only have to consider the simpler non-inverted cases. */
3220 if (invert)
3221 t = or_var_with_comparison_1 (stmt,
3222 invert_tree_comparison (code2, false),
3223 op2a, op2b);
3224 else
3225 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3226 return canonicalize_bool (t, invert);
3229 /* Try to simplify the AND of the ssa variable defined by the assignment
3230 STMT with the comparison specified by (OP2A CODE2 OP2B).
3231 Return NULL_EXPR if we can't simplify this to a single expression. */
3233 static tree
3234 and_var_with_comparison_1 (gimple stmt,
3235 enum tree_code code2, tree op2a, tree op2b)
3237 tree var = gimple_assign_lhs (stmt);
3238 tree true_test_var = NULL_TREE;
3239 tree false_test_var = NULL_TREE;
3240 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3242 /* Check for identities like (var AND (var == 0)) => false. */
3243 if (TREE_CODE (op2a) == SSA_NAME
3244 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3246 if ((code2 == NE_EXPR && integer_zerop (op2b))
3247 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3249 true_test_var = op2a;
3250 if (var == true_test_var)
3251 return var;
3253 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3254 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3256 false_test_var = op2a;
3257 if (var == false_test_var)
3258 return boolean_false_node;
3262 /* If the definition is a comparison, recurse on it. */
3263 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3265 tree t = and_comparisons_1 (innercode,
3266 gimple_assign_rhs1 (stmt),
3267 gimple_assign_rhs2 (stmt),
3268 code2,
3269 op2a,
3270 op2b);
3271 if (t)
3272 return t;
3275 /* If the definition is an AND or OR expression, we may be able to
3276 simplify by reassociating. */
3277 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3278 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3280 tree inner1 = gimple_assign_rhs1 (stmt);
3281 tree inner2 = gimple_assign_rhs2 (stmt);
3282 gimple s;
3283 tree t;
3284 tree partial = NULL_TREE;
3285 bool is_and = (innercode == BIT_AND_EXPR);
3287 /* Check for boolean identities that don't require recursive examination
3288 of inner1/inner2:
3289 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3290 inner1 AND (inner1 OR inner2) => inner1
3291 !inner1 AND (inner1 AND inner2) => false
3292 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3293 Likewise for similar cases involving inner2. */
3294 if (inner1 == true_test_var)
3295 return (is_and ? var : inner1);
3296 else if (inner2 == true_test_var)
3297 return (is_and ? var : inner2);
3298 else if (inner1 == false_test_var)
3299 return (is_and
3300 ? boolean_false_node
3301 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
3302 else if (inner2 == false_test_var)
3303 return (is_and
3304 ? boolean_false_node
3305 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
3307 /* Next, redistribute/reassociate the AND across the inner tests.
3308 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3309 if (TREE_CODE (inner1) == SSA_NAME
3310 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3311 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3312 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3313 gimple_assign_rhs1 (s),
3314 gimple_assign_rhs2 (s),
3315 code2, op2a, op2b)))
3317 /* Handle the AND case, where we are reassociating:
3318 (inner1 AND inner2) AND (op2a code2 op2b)
3319 => (t AND inner2)
3320 If the partial result t is a constant, we win. Otherwise
3321 continue on to try reassociating with the other inner test. */
3322 if (is_and)
3324 if (integer_onep (t))
3325 return inner2;
3326 else if (integer_zerop (t))
3327 return boolean_false_node;
3330 /* Handle the OR case, where we are redistributing:
3331 (inner1 OR inner2) AND (op2a code2 op2b)
3332 => (t OR (inner2 AND (op2a code2 op2b))) */
3333 else if (integer_onep (t))
3334 return boolean_true_node;
3336 /* Save partial result for later. */
3337 partial = t;
3340 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3341 if (TREE_CODE (inner2) == SSA_NAME
3342 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3343 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3344 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3345 gimple_assign_rhs1 (s),
3346 gimple_assign_rhs2 (s),
3347 code2, op2a, op2b)))
3349 /* Handle the AND case, where we are reassociating:
3350 (inner1 AND inner2) AND (op2a code2 op2b)
3351 => (inner1 AND t) */
3352 if (is_and)
3354 if (integer_onep (t))
3355 return inner1;
3356 else if (integer_zerop (t))
3357 return boolean_false_node;
3358 /* If both are the same, we can apply the identity
3359 (x AND x) == x. */
3360 else if (partial && same_bool_result_p (t, partial))
3361 return t;
3364 /* Handle the OR case. where we are redistributing:
3365 (inner1 OR inner2) AND (op2a code2 op2b)
3366 => (t OR (inner1 AND (op2a code2 op2b)))
3367 => (t OR partial) */
3368 else
3370 if (integer_onep (t))
3371 return boolean_true_node;
3372 else if (partial)
3374 /* We already got a simplification for the other
3375 operand to the redistributed OR expression. The
3376 interesting case is when at least one is false.
3377 Or, if both are the same, we can apply the identity
3378 (x OR x) == x. */
3379 if (integer_zerop (partial))
3380 return t;
3381 else if (integer_zerop (t))
3382 return partial;
3383 else if (same_bool_result_p (t, partial))
3384 return t;
3389 return NULL_TREE;
3392 /* Try to simplify the AND of two comparisons defined by
3393 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3394 If this can be done without constructing an intermediate value,
3395 return the resulting tree; otherwise NULL_TREE is returned.
3396 This function is deliberately asymmetric as it recurses on SSA_DEFs
3397 in the first comparison but not the second. */
3399 static tree
3400 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3401 enum tree_code code2, tree op2a, tree op2b)
3403 tree truth_type = truth_type_for (TREE_TYPE (op1a));
3405 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3406 if (operand_equal_p (op1a, op2a, 0)
3407 && operand_equal_p (op1b, op2b, 0))
3409 /* Result will be either NULL_TREE, or a combined comparison. */
3410 tree t = combine_comparisons (UNKNOWN_LOCATION,
3411 TRUTH_ANDIF_EXPR, code1, code2,
3412 truth_type, op1a, op1b);
3413 if (t)
3414 return t;
3417 /* Likewise the swapped case of the above. */
3418 if (operand_equal_p (op1a, op2b, 0)
3419 && operand_equal_p (op1b, op2a, 0))
3421 /* Result will be either NULL_TREE, or a combined comparison. */
3422 tree t = combine_comparisons (UNKNOWN_LOCATION,
3423 TRUTH_ANDIF_EXPR, code1,
3424 swap_tree_comparison (code2),
3425 truth_type, op1a, op1b);
3426 if (t)
3427 return t;
3430 /* If both comparisons are of the same value against constants, we might
3431 be able to merge them. */
3432 if (operand_equal_p (op1a, op2a, 0)
3433 && TREE_CODE (op1b) == INTEGER_CST
3434 && TREE_CODE (op2b) == INTEGER_CST)
3436 int cmp = tree_int_cst_compare (op1b, op2b);
3438 /* If we have (op1a == op1b), we should either be able to
3439 return that or FALSE, depending on whether the constant op1b
3440 also satisfies the other comparison against op2b. */
3441 if (code1 == EQ_EXPR)
3443 bool done = true;
3444 bool val;
3445 switch (code2)
3447 case EQ_EXPR: val = (cmp == 0); break;
3448 case NE_EXPR: val = (cmp != 0); break;
3449 case LT_EXPR: val = (cmp < 0); break;
3450 case GT_EXPR: val = (cmp > 0); break;
3451 case LE_EXPR: val = (cmp <= 0); break;
3452 case GE_EXPR: val = (cmp >= 0); break;
3453 default: done = false;
3455 if (done)
3457 if (val)
3458 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3459 else
3460 return boolean_false_node;
3463 /* Likewise if the second comparison is an == comparison. */
3464 else if (code2 == EQ_EXPR)
3466 bool done = true;
3467 bool val;
3468 switch (code1)
3470 case EQ_EXPR: val = (cmp == 0); break;
3471 case NE_EXPR: val = (cmp != 0); break;
3472 case LT_EXPR: val = (cmp > 0); break;
3473 case GT_EXPR: val = (cmp < 0); break;
3474 case LE_EXPR: val = (cmp >= 0); break;
3475 case GE_EXPR: val = (cmp <= 0); break;
3476 default: done = false;
3478 if (done)
3480 if (val)
3481 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3482 else
3483 return boolean_false_node;
3487 /* Same business with inequality tests. */
3488 else if (code1 == NE_EXPR)
3490 bool val;
3491 switch (code2)
3493 case EQ_EXPR: val = (cmp != 0); break;
3494 case NE_EXPR: val = (cmp == 0); break;
3495 case LT_EXPR: val = (cmp >= 0); break;
3496 case GT_EXPR: val = (cmp <= 0); break;
3497 case LE_EXPR: val = (cmp > 0); break;
3498 case GE_EXPR: val = (cmp < 0); break;
3499 default:
3500 val = false;
3502 if (val)
3503 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3505 else if (code2 == NE_EXPR)
3507 bool val;
3508 switch (code1)
3510 case EQ_EXPR: val = (cmp == 0); break;
3511 case NE_EXPR: val = (cmp != 0); break;
3512 case LT_EXPR: val = (cmp <= 0); break;
3513 case GT_EXPR: val = (cmp >= 0); break;
3514 case LE_EXPR: val = (cmp < 0); break;
3515 case GE_EXPR: val = (cmp > 0); break;
3516 default:
3517 val = false;
3519 if (val)
3520 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3523 /* Chose the more restrictive of two < or <= comparisons. */
3524 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3525 && (code2 == LT_EXPR || code2 == LE_EXPR))
3527 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3528 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3529 else
3530 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3533 /* Likewise chose the more restrictive of two > or >= comparisons. */
3534 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3535 && (code2 == GT_EXPR || code2 == GE_EXPR))
3537 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3538 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3539 else
3540 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3543 /* Check for singleton ranges. */
3544 else if (cmp == 0
3545 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3546 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3547 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3549 /* Check for disjoint ranges. */
3550 else if (cmp <= 0
3551 && (code1 == LT_EXPR || code1 == LE_EXPR)
3552 && (code2 == GT_EXPR || code2 == GE_EXPR))
3553 return boolean_false_node;
3554 else if (cmp >= 0
3555 && (code1 == GT_EXPR || code1 == GE_EXPR)
3556 && (code2 == LT_EXPR || code2 == LE_EXPR))
3557 return boolean_false_node;
3560 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3561 NAME's definition is a truth value. See if there are any simplifications
3562 that can be done against the NAME's definition. */
3563 if (TREE_CODE (op1a) == SSA_NAME
3564 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3565 && (integer_zerop (op1b) || integer_onep (op1b)))
3567 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3568 || (code1 == NE_EXPR && integer_onep (op1b)));
3569 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3570 switch (gimple_code (stmt))
3572 case GIMPLE_ASSIGN:
3573 /* Try to simplify by copy-propagating the definition. */
3574 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3576 case GIMPLE_PHI:
3577 /* If every argument to the PHI produces the same result when
3578 ANDed with the second comparison, we win.
3579 Do not do this unless the type is bool since we need a bool
3580 result here anyway. */
3581 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3583 tree result = NULL_TREE;
3584 unsigned i;
3585 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3587 tree arg = gimple_phi_arg_def (stmt, i);
3589 /* If this PHI has itself as an argument, ignore it.
3590 If all the other args produce the same result,
3591 we're still OK. */
3592 if (arg == gimple_phi_result (stmt))
3593 continue;
3594 else if (TREE_CODE (arg) == INTEGER_CST)
3596 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3598 if (!result)
3599 result = boolean_false_node;
3600 else if (!integer_zerop (result))
3601 return NULL_TREE;
3603 else if (!result)
3604 result = fold_build2 (code2, boolean_type_node,
3605 op2a, op2b);
3606 else if (!same_bool_comparison_p (result,
3607 code2, op2a, op2b))
3608 return NULL_TREE;
3610 else if (TREE_CODE (arg) == SSA_NAME
3611 && !SSA_NAME_IS_DEFAULT_DEF (arg))
3613 tree temp;
3614 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3615 /* In simple cases we can look through PHI nodes,
3616 but we have to be careful with loops.
3617 See PR49073. */
3618 if (! dom_info_available_p (CDI_DOMINATORS)
3619 || gimple_bb (def_stmt) == gimple_bb (stmt)
3620 || dominated_by_p (CDI_DOMINATORS,
3621 gimple_bb (def_stmt),
3622 gimple_bb (stmt)))
3623 return NULL_TREE;
3624 temp = and_var_with_comparison (arg, invert, code2,
3625 op2a, op2b);
3626 if (!temp)
3627 return NULL_TREE;
3628 else if (!result)
3629 result = temp;
3630 else if (!same_bool_result_p (result, temp))
3631 return NULL_TREE;
3633 else
3634 return NULL_TREE;
3636 return result;
3639 default:
3640 break;
3643 return NULL_TREE;
3646 /* Try to simplify the AND of two comparisons, specified by
3647 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3648 If this can be simplified to a single expression (without requiring
3649 introducing more SSA variables to hold intermediate values),
3650 return the resulting tree. Otherwise return NULL_TREE.
3651 If the result expression is non-null, it has boolean type. */
3653 tree
3654 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
3655 enum tree_code code2, tree op2a, tree op2b)
3657 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
3658 if (t)
3659 return t;
3660 else
3661 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
3664 /* Helper function for or_comparisons_1: try to simplify the OR of the
3665 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3666 If INVERT is true, invert the value of VAR before doing the OR.
3667 Return NULL_EXPR if we can't simplify this to a single expression. */
3669 static tree
3670 or_var_with_comparison (tree var, bool invert,
3671 enum tree_code code2, tree op2a, tree op2b)
3673 tree t;
3674 gimple stmt = SSA_NAME_DEF_STMT (var);
3676 /* We can only deal with variables whose definitions are assignments. */
3677 if (!is_gimple_assign (stmt))
3678 return NULL_TREE;
3680 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3681 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3682 Then we only have to consider the simpler non-inverted cases. */
3683 if (invert)
3684 t = and_var_with_comparison_1 (stmt,
3685 invert_tree_comparison (code2, false),
3686 op2a, op2b);
3687 else
3688 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
3689 return canonicalize_bool (t, invert);
3692 /* Try to simplify the OR of the ssa variable defined by the assignment
3693 STMT with the comparison specified by (OP2A CODE2 OP2B).
3694 Return NULL_EXPR if we can't simplify this to a single expression. */
3696 static tree
3697 or_var_with_comparison_1 (gimple stmt,
3698 enum tree_code code2, tree op2a, tree op2b)
3700 tree var = gimple_assign_lhs (stmt);
3701 tree true_test_var = NULL_TREE;
3702 tree false_test_var = NULL_TREE;
3703 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3705 /* Check for identities like (var OR (var != 0)) => true . */
3706 if (TREE_CODE (op2a) == SSA_NAME
3707 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3709 if ((code2 == NE_EXPR && integer_zerop (op2b))
3710 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3712 true_test_var = op2a;
3713 if (var == true_test_var)
3714 return var;
3716 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3717 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3719 false_test_var = op2a;
3720 if (var == false_test_var)
3721 return boolean_true_node;
3725 /* If the definition is a comparison, recurse on it. */
3726 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3728 tree t = or_comparisons_1 (innercode,
3729 gimple_assign_rhs1 (stmt),
3730 gimple_assign_rhs2 (stmt),
3731 code2,
3732 op2a,
3733 op2b);
3734 if (t)
3735 return t;
3738 /* If the definition is an AND or OR expression, we may be able to
3739 simplify by reassociating. */
3740 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3741 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
3743 tree inner1 = gimple_assign_rhs1 (stmt);
3744 tree inner2 = gimple_assign_rhs2 (stmt);
3745 gimple s;
3746 tree t;
3747 tree partial = NULL_TREE;
3748 bool is_or = (innercode == BIT_IOR_EXPR);
3750 /* Check for boolean identities that don't require recursive examination
3751 of inner1/inner2:
3752 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
3753 inner1 OR (inner1 AND inner2) => inner1
3754 !inner1 OR (inner1 OR inner2) => true
3755 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
3757 if (inner1 == true_test_var)
3758 return (is_or ? var : inner1);
3759 else if (inner2 == true_test_var)
3760 return (is_or ? var : inner2);
3761 else if (inner1 == false_test_var)
3762 return (is_or
3763 ? boolean_true_node
3764 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
3765 else if (inner2 == false_test_var)
3766 return (is_or
3767 ? boolean_true_node
3768 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
3770 /* Next, redistribute/reassociate the OR across the inner tests.
3771 Compute the first partial result, (inner1 OR (op2a code op2b)) */
3772 if (TREE_CODE (inner1) == SSA_NAME
3773 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3774 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3775 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
3776 gimple_assign_rhs1 (s),
3777 gimple_assign_rhs2 (s),
3778 code2, op2a, op2b)))
3780 /* Handle the OR case, where we are reassociating:
3781 (inner1 OR inner2) OR (op2a code2 op2b)
3782 => (t OR inner2)
3783 If the partial result t is a constant, we win. Otherwise
3784 continue on to try reassociating with the other inner test. */
3785 if (is_or)
3787 if (integer_onep (t))
3788 return boolean_true_node;
3789 else if (integer_zerop (t))
3790 return inner2;
3793 /* Handle the AND case, where we are redistributing:
3794 (inner1 AND inner2) OR (op2a code2 op2b)
3795 => (t AND (inner2 OR (op2a code op2b))) */
3796 else if (integer_zerop (t))
3797 return boolean_false_node;
3799 /* Save partial result for later. */
3800 partial = t;
3803 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
3804 if (TREE_CODE (inner2) == SSA_NAME
3805 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3806 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3807 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
3808 gimple_assign_rhs1 (s),
3809 gimple_assign_rhs2 (s),
3810 code2, op2a, op2b)))
3812 /* Handle the OR case, where we are reassociating:
3813 (inner1 OR inner2) OR (op2a code2 op2b)
3814 => (inner1 OR t)
3815 => (t OR partial) */
3816 if (is_or)
3818 if (integer_zerop (t))
3819 return inner1;
3820 else if (integer_onep (t))
3821 return boolean_true_node;
3822 /* If both are the same, we can apply the identity
3823 (x OR x) == x. */
3824 else if (partial && same_bool_result_p (t, partial))
3825 return t;
3828 /* Handle the AND case, where we are redistributing:
3829 (inner1 AND inner2) OR (op2a code2 op2b)
3830 => (t AND (inner1 OR (op2a code2 op2b)))
3831 => (t AND partial) */
3832 else
3834 if (integer_zerop (t))
3835 return boolean_false_node;
3836 else if (partial)
3838 /* We already got a simplification for the other
3839 operand to the redistributed AND expression. The
3840 interesting case is when at least one is true.
3841 Or, if both are the same, we can apply the identity
3842 (x AND x) == x. */
3843 if (integer_onep (partial))
3844 return t;
3845 else if (integer_onep (t))
3846 return partial;
3847 else if (same_bool_result_p (t, partial))
3848 return t;
3853 return NULL_TREE;
3856 /* Try to simplify the OR of two comparisons defined by
3857 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3858 If this can be done without constructing an intermediate value,
3859 return the resulting tree; otherwise NULL_TREE is returned.
3860 This function is deliberately asymmetric as it recurses on SSA_DEFs
3861 in the first comparison but not the second. */
3863 static tree
3864 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3865 enum tree_code code2, tree op2a, tree op2b)
3867 tree truth_type = truth_type_for (TREE_TYPE (op1a));
3869 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
3870 if (operand_equal_p (op1a, op2a, 0)
3871 && operand_equal_p (op1b, op2b, 0))
3873 /* Result will be either NULL_TREE, or a combined comparison. */
3874 tree t = combine_comparisons (UNKNOWN_LOCATION,
3875 TRUTH_ORIF_EXPR, code1, code2,
3876 truth_type, op1a, op1b);
3877 if (t)
3878 return t;
3881 /* Likewise the swapped case of the above. */
3882 if (operand_equal_p (op1a, op2b, 0)
3883 && operand_equal_p (op1b, op2a, 0))
3885 /* Result will be either NULL_TREE, or a combined comparison. */
3886 tree t = combine_comparisons (UNKNOWN_LOCATION,
3887 TRUTH_ORIF_EXPR, code1,
3888 swap_tree_comparison (code2),
3889 truth_type, op1a, op1b);
3890 if (t)
3891 return t;
3894 /* If both comparisons are of the same value against constants, we might
3895 be able to merge them. */
3896 if (operand_equal_p (op1a, op2a, 0)
3897 && TREE_CODE (op1b) == INTEGER_CST
3898 && TREE_CODE (op2b) == INTEGER_CST)
3900 int cmp = tree_int_cst_compare (op1b, op2b);
3902 /* If we have (op1a != op1b), we should either be able to
3903 return that or TRUE, depending on whether the constant op1b
3904 also satisfies the other comparison against op2b. */
3905 if (code1 == NE_EXPR)
3907 bool done = true;
3908 bool val;
3909 switch (code2)
3911 case EQ_EXPR: val = (cmp == 0); break;
3912 case NE_EXPR: val = (cmp != 0); break;
3913 case LT_EXPR: val = (cmp < 0); break;
3914 case GT_EXPR: val = (cmp > 0); break;
3915 case LE_EXPR: val = (cmp <= 0); break;
3916 case GE_EXPR: val = (cmp >= 0); break;
3917 default: done = false;
3919 if (done)
3921 if (val)
3922 return boolean_true_node;
3923 else
3924 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3927 /* Likewise if the second comparison is a != comparison. */
3928 else if (code2 == NE_EXPR)
3930 bool done = true;
3931 bool val;
3932 switch (code1)
3934 case EQ_EXPR: val = (cmp == 0); break;
3935 case NE_EXPR: val = (cmp != 0); break;
3936 case LT_EXPR: val = (cmp > 0); break;
3937 case GT_EXPR: val = (cmp < 0); break;
3938 case LE_EXPR: val = (cmp >= 0); break;
3939 case GE_EXPR: val = (cmp <= 0); break;
3940 default: done = false;
3942 if (done)
3944 if (val)
3945 return boolean_true_node;
3946 else
3947 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3951 /* See if an equality test is redundant with the other comparison. */
3952 else if (code1 == EQ_EXPR)
3954 bool val;
3955 switch (code2)
3957 case EQ_EXPR: val = (cmp == 0); break;
3958 case NE_EXPR: val = (cmp != 0); break;
3959 case LT_EXPR: val = (cmp < 0); break;
3960 case GT_EXPR: val = (cmp > 0); break;
3961 case LE_EXPR: val = (cmp <= 0); break;
3962 case GE_EXPR: val = (cmp >= 0); break;
3963 default:
3964 val = false;
3966 if (val)
3967 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3969 else if (code2 == EQ_EXPR)
3971 bool val;
3972 switch (code1)
3974 case EQ_EXPR: val = (cmp == 0); break;
3975 case NE_EXPR: val = (cmp != 0); break;
3976 case LT_EXPR: val = (cmp > 0); break;
3977 case GT_EXPR: val = (cmp < 0); break;
3978 case LE_EXPR: val = (cmp >= 0); break;
3979 case GE_EXPR: val = (cmp <= 0); break;
3980 default:
3981 val = false;
3983 if (val)
3984 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3987 /* Chose the less restrictive of two < or <= comparisons. */
3988 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3989 && (code2 == LT_EXPR || code2 == LE_EXPR))
3991 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3992 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3993 else
3994 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3997 /* Likewise chose the less restrictive of two > or >= comparisons. */
3998 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3999 && (code2 == GT_EXPR || code2 == GE_EXPR))
4001 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4002 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4003 else
4004 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4007 /* Check for singleton ranges. */
4008 else if (cmp == 0
4009 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4010 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4011 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4013 /* Check for less/greater pairs that don't restrict the range at all. */
4014 else if (cmp >= 0
4015 && (code1 == LT_EXPR || code1 == LE_EXPR)
4016 && (code2 == GT_EXPR || code2 == GE_EXPR))
4017 return boolean_true_node;
4018 else if (cmp <= 0
4019 && (code1 == GT_EXPR || code1 == GE_EXPR)
4020 && (code2 == LT_EXPR || code2 == LE_EXPR))
4021 return boolean_true_node;
4024 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4025 NAME's definition is a truth value. See if there are any simplifications
4026 that can be done against the NAME's definition. */
4027 if (TREE_CODE (op1a) == SSA_NAME
4028 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4029 && (integer_zerop (op1b) || integer_onep (op1b)))
4031 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4032 || (code1 == NE_EXPR && integer_onep (op1b)));
4033 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4034 switch (gimple_code (stmt))
4036 case GIMPLE_ASSIGN:
4037 /* Try to simplify by copy-propagating the definition. */
4038 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4040 case GIMPLE_PHI:
4041 /* If every argument to the PHI produces the same result when
4042 ORed with the second comparison, we win.
4043 Do not do this unless the type is bool since we need a bool
4044 result here anyway. */
4045 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4047 tree result = NULL_TREE;
4048 unsigned i;
4049 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4051 tree arg = gimple_phi_arg_def (stmt, i);
4053 /* If this PHI has itself as an argument, ignore it.
4054 If all the other args produce the same result,
4055 we're still OK. */
4056 if (arg == gimple_phi_result (stmt))
4057 continue;
4058 else if (TREE_CODE (arg) == INTEGER_CST)
4060 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4062 if (!result)
4063 result = boolean_true_node;
4064 else if (!integer_onep (result))
4065 return NULL_TREE;
4067 else if (!result)
4068 result = fold_build2 (code2, boolean_type_node,
4069 op2a, op2b);
4070 else if (!same_bool_comparison_p (result,
4071 code2, op2a, op2b))
4072 return NULL_TREE;
4074 else if (TREE_CODE (arg) == SSA_NAME
4075 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4077 tree temp;
4078 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4079 /* In simple cases we can look through PHI nodes,
4080 but we have to be careful with loops.
4081 See PR49073. */
4082 if (! dom_info_available_p (CDI_DOMINATORS)
4083 || gimple_bb (def_stmt) == gimple_bb (stmt)
4084 || dominated_by_p (CDI_DOMINATORS,
4085 gimple_bb (def_stmt),
4086 gimple_bb (stmt)))
4087 return NULL_TREE;
4088 temp = or_var_with_comparison (arg, invert, code2,
4089 op2a, op2b);
4090 if (!temp)
4091 return NULL_TREE;
4092 else if (!result)
4093 result = temp;
4094 else if (!same_bool_result_p (result, temp))
4095 return NULL_TREE;
4097 else
4098 return NULL_TREE;
4100 return result;
4103 default:
4104 break;
4107 return NULL_TREE;
4110 /* Try to simplify the OR of two comparisons, specified by
4111 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4112 If this can be simplified to a single expression (without requiring
4113 introducing more SSA variables to hold intermediate values),
4114 return the resulting tree. Otherwise return NULL_TREE.
4115 If the result expression is non-null, it has boolean type. */
4117 tree
4118 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4119 enum tree_code code2, tree op2a, tree op2b)
4121 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4122 if (t)
4123 return t;
4124 else
4125 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4129 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4131 Either NULL_TREE, a simplified but non-constant or a constant
4132 is returned.
4134 ??? This should go into a gimple-fold-inline.h file to be eventually
4135 privatized with the single valueize function used in the various TUs
4136 to avoid the indirect function call overhead. */
4138 tree
4139 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
4141 location_t loc = gimple_location (stmt);
4142 switch (gimple_code (stmt))
4144 case GIMPLE_ASSIGN:
4146 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4148 switch (get_gimple_rhs_class (subcode))
4150 case GIMPLE_SINGLE_RHS:
4152 tree rhs = gimple_assign_rhs1 (stmt);
4153 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4155 if (TREE_CODE (rhs) == SSA_NAME)
4157 /* If the RHS is an SSA_NAME, return its known constant value,
4158 if any. */
4159 return (*valueize) (rhs);
4161 /* Handle propagating invariant addresses into address
4162 operations. */
4163 else if (TREE_CODE (rhs) == ADDR_EXPR
4164 && !is_gimple_min_invariant (rhs))
4166 HOST_WIDE_INT offset = 0;
4167 tree base;
4168 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4169 &offset,
4170 valueize);
4171 if (base
4172 && (CONSTANT_CLASS_P (base)
4173 || decl_address_invariant_p (base)))
4174 return build_invariant_address (TREE_TYPE (rhs),
4175 base, offset);
4177 else if (TREE_CODE (rhs) == CONSTRUCTOR
4178 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4179 && (CONSTRUCTOR_NELTS (rhs)
4180 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4182 unsigned i;
4183 tree val, *vec;
4185 vec = XALLOCAVEC (tree,
4186 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4187 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4189 val = (*valueize) (val);
4190 if (TREE_CODE (val) == INTEGER_CST
4191 || TREE_CODE (val) == REAL_CST
4192 || TREE_CODE (val) == FIXED_CST)
4193 vec[i] = val;
4194 else
4195 return NULL_TREE;
4198 return build_vector (TREE_TYPE (rhs), vec);
4200 if (subcode == OBJ_TYPE_REF)
4202 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4203 /* If callee is constant, we can fold away the wrapper. */
4204 if (is_gimple_min_invariant (val))
4205 return val;
4208 if (kind == tcc_reference)
4210 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4211 || TREE_CODE (rhs) == REALPART_EXPR
4212 || TREE_CODE (rhs) == IMAGPART_EXPR)
4213 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4215 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4216 return fold_unary_loc (EXPR_LOCATION (rhs),
4217 TREE_CODE (rhs),
4218 TREE_TYPE (rhs), val);
4220 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4221 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4223 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4224 return fold_ternary_loc (EXPR_LOCATION (rhs),
4225 TREE_CODE (rhs),
4226 TREE_TYPE (rhs), val,
4227 TREE_OPERAND (rhs, 1),
4228 TREE_OPERAND (rhs, 2));
4230 else if (TREE_CODE (rhs) == MEM_REF
4231 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4233 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4234 if (TREE_CODE (val) == ADDR_EXPR
4235 && is_gimple_min_invariant (val))
4237 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4238 unshare_expr (val),
4239 TREE_OPERAND (rhs, 1));
4240 if (tem)
4241 rhs = tem;
4244 return fold_const_aggregate_ref_1 (rhs, valueize);
4246 else if (kind == tcc_declaration)
4247 return get_symbol_constant_value (rhs);
4248 return rhs;
4251 case GIMPLE_UNARY_RHS:
4253 /* Handle unary operators that can appear in GIMPLE form.
4254 Note that we know the single operand must be a constant,
4255 so this should almost always return a simplified RHS. */
4256 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4258 return
4259 fold_unary_ignore_overflow_loc (loc, subcode,
4260 gimple_expr_type (stmt), op0);
4263 case GIMPLE_BINARY_RHS:
4265 /* Handle binary operators that can appear in GIMPLE form. */
4266 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4267 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4269 /* Translate &x + CST into an invariant form suitable for
4270 further propagation. */
4271 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4272 && TREE_CODE (op0) == ADDR_EXPR
4273 && TREE_CODE (op1) == INTEGER_CST)
4275 tree off = fold_convert (ptr_type_node, op1);
4276 return build_fold_addr_expr_loc
4277 (loc,
4278 fold_build2 (MEM_REF,
4279 TREE_TYPE (TREE_TYPE (op0)),
4280 unshare_expr (op0), off));
4283 return fold_binary_loc (loc, subcode,
4284 gimple_expr_type (stmt), op0, op1);
4287 case GIMPLE_TERNARY_RHS:
4289 /* Handle ternary operators that can appear in GIMPLE form. */
4290 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4291 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4292 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
4294 /* Fold embedded expressions in ternary codes. */
4295 if ((subcode == COND_EXPR
4296 || subcode == VEC_COND_EXPR)
4297 && COMPARISON_CLASS_P (op0))
4299 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
4300 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
4301 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
4302 TREE_TYPE (op0), op00, op01);
4303 if (tem)
4304 op0 = tem;
4307 return fold_ternary_loc (loc, subcode,
4308 gimple_expr_type (stmt), op0, op1, op2);
4311 default:
4312 gcc_unreachable ();
4316 case GIMPLE_CALL:
4318 tree fn;
4320 if (gimple_call_internal_p (stmt))
4322 enum tree_code subcode = ERROR_MARK;
4323 switch (gimple_call_internal_fn (stmt))
4325 case IFN_UBSAN_CHECK_ADD:
4326 subcode = PLUS_EXPR;
4327 break;
4328 case IFN_UBSAN_CHECK_SUB:
4329 subcode = MINUS_EXPR;
4330 break;
4331 case IFN_UBSAN_CHECK_MUL:
4332 subcode = MULT_EXPR;
4333 break;
4334 default:
4335 return NULL_TREE;
4337 tree arg0 = gimple_call_arg (stmt, 0);
4338 tree arg1 = gimple_call_arg (stmt, 1);
4339 tree op0 = (*valueize) (arg0);
4340 tree op1 = (*valueize) (arg1);
4342 if (TREE_CODE (op0) != INTEGER_CST
4343 || TREE_CODE (op1) != INTEGER_CST)
4345 switch (subcode)
4347 case MULT_EXPR:
4348 /* x * 0 = 0 * x = 0 without overflow. */
4349 if (integer_zerop (op0) || integer_zerop (op1))
4350 return build_zero_cst (TREE_TYPE (arg0));
4351 break;
4352 case MINUS_EXPR:
4353 /* y - y = 0 without overflow. */
4354 if (operand_equal_p (op0, op1, 0))
4355 return build_zero_cst (TREE_TYPE (arg0));
4356 break;
4357 default:
4358 break;
4361 tree res
4362 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
4363 if (res
4364 && TREE_CODE (res) == INTEGER_CST
4365 && !TREE_OVERFLOW (res))
4366 return res;
4367 return NULL_TREE;
4370 fn = (*valueize) (gimple_call_fn (stmt));
4371 if (TREE_CODE (fn) == ADDR_EXPR
4372 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4373 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4374 && gimple_builtin_call_types_compatible_p (stmt,
4375 TREE_OPERAND (fn, 0)))
4377 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4378 tree call, retval;
4379 unsigned i;
4380 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4381 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4382 call = build_call_array_loc (loc,
4383 gimple_call_return_type (stmt),
4384 fn, gimple_call_num_args (stmt), args);
4385 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4386 if (retval)
4388 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4389 STRIP_NOPS (retval);
4390 retval = fold_convert (gimple_call_return_type (stmt), retval);
4392 return retval;
4394 return NULL_TREE;
4397 default:
4398 return NULL_TREE;
4402 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4403 Returns NULL_TREE if folding to a constant is not possible, otherwise
4404 returns a constant according to is_gimple_min_invariant. */
4406 tree
4407 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4409 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4410 if (res && is_gimple_min_invariant (res))
4411 return res;
4412 return NULL_TREE;
4416 /* The following set of functions are supposed to fold references using
4417 their constant initializers. */
4419 static tree fold_ctor_reference (tree type, tree ctor,
4420 unsigned HOST_WIDE_INT offset,
4421 unsigned HOST_WIDE_INT size, tree);
4423 /* See if we can find constructor defining value of BASE.
4424 When we know the consructor with constant offset (such as
4425 base is array[40] and we do know constructor of array), then
4426 BIT_OFFSET is adjusted accordingly.
4428 As a special case, return error_mark_node when constructor
4429 is not explicitly available, but it is known to be zero
4430 such as 'static const int a;'. */
4431 static tree
4432 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4433 tree (*valueize)(tree))
4435 HOST_WIDE_INT bit_offset2, size, max_size;
4436 if (TREE_CODE (base) == MEM_REF)
4438 if (!integer_zerop (TREE_OPERAND (base, 1)))
4440 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
4441 return NULL_TREE;
4442 *bit_offset += (mem_ref_offset (base).to_short_addr ()
4443 * BITS_PER_UNIT);
4446 if (valueize
4447 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4448 base = valueize (TREE_OPERAND (base, 0));
4449 if (!base || TREE_CODE (base) != ADDR_EXPR)
4450 return NULL_TREE;
4451 base = TREE_OPERAND (base, 0);
4454 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4455 DECL_INITIAL. If BASE is a nested reference into another
4456 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4457 the inner reference. */
4458 switch (TREE_CODE (base))
4460 case VAR_DECL:
4461 case CONST_DECL:
4463 tree init = ctor_for_folding (base);
4465 /* Our semantic is exact opposite of ctor_for_folding;
4466 NULL means unknown, while error_mark_node is 0. */
4467 if (init == error_mark_node)
4468 return NULL_TREE;
4469 if (!init)
4470 return error_mark_node;
4471 return init;
4474 case ARRAY_REF:
4475 case COMPONENT_REF:
4476 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4477 if (max_size == -1 || size != max_size)
4478 return NULL_TREE;
4479 *bit_offset += bit_offset2;
4480 return get_base_constructor (base, bit_offset, valueize);
4482 case STRING_CST:
4483 case CONSTRUCTOR:
4484 return base;
4486 default:
4487 return NULL_TREE;
4491 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4492 SIZE to the memory at bit OFFSET. */
4494 static tree
4495 fold_array_ctor_reference (tree type, tree ctor,
4496 unsigned HOST_WIDE_INT offset,
4497 unsigned HOST_WIDE_INT size,
4498 tree from_decl)
4500 unsigned HOST_WIDE_INT cnt;
4501 tree cfield, cval;
4502 offset_int low_bound;
4503 offset_int elt_size;
4504 offset_int index, max_index;
4505 offset_int access_index;
4506 tree domain_type = NULL_TREE, index_type = NULL_TREE;
4507 HOST_WIDE_INT inner_offset;
4509 /* Compute low bound and elt size. */
4510 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4511 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
4512 if (domain_type && TYPE_MIN_VALUE (domain_type))
4514 /* Static constructors for variably sized objects makes no sense. */
4515 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
4516 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
4517 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
4519 else
4520 low_bound = 0;
4521 /* Static constructors for variably sized objects makes no sense. */
4522 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
4523 == INTEGER_CST);
4524 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
4526 /* We can handle only constantly sized accesses that are known to not
4527 be larger than size of array element. */
4528 if (!TYPE_SIZE_UNIT (type)
4529 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
4530 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4531 || elt_size == 0)
4532 return NULL_TREE;
4534 /* Compute the array index we look for. */
4535 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
4536 elt_size);
4537 access_index += low_bound;
4538 if (index_type)
4539 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4540 TYPE_SIGN (index_type));
4542 /* And offset within the access. */
4543 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
4545 /* See if the array field is large enough to span whole access. We do not
4546 care to fold accesses spanning multiple array indexes. */
4547 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
4548 return NULL_TREE;
4550 index = low_bound - 1;
4551 if (index_type)
4552 index = wi::ext (index, TYPE_PRECISION (index_type),
4553 TYPE_SIGN (index_type));
4555 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4557 /* Array constructor might explicitely set index, or specify range
4558 or leave index NULL meaning that it is next index after previous
4559 one. */
4560 if (cfield)
4562 if (TREE_CODE (cfield) == INTEGER_CST)
4563 max_index = index = wi::to_offset (cfield);
4564 else
4566 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
4567 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4568 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
4571 else
4573 index += 1;
4574 if (index_type)
4575 index = wi::ext (index, TYPE_PRECISION (index_type),
4576 TYPE_SIGN (index_type));
4577 max_index = index;
4580 /* Do we have match? */
4581 if (wi::cmpu (access_index, index) >= 0
4582 && wi::cmpu (access_index, max_index) <= 0)
4583 return fold_ctor_reference (type, cval, inner_offset, size,
4584 from_decl);
4586 /* When memory is not explicitely mentioned in constructor,
4587 it is 0 (or out of range). */
4588 return build_zero_cst (type);
4591 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4592 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4594 static tree
4595 fold_nonarray_ctor_reference (tree type, tree ctor,
4596 unsigned HOST_WIDE_INT offset,
4597 unsigned HOST_WIDE_INT size,
4598 tree from_decl)
4600 unsigned HOST_WIDE_INT cnt;
4601 tree cfield, cval;
4603 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4604 cval)
4606 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4607 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4608 tree field_size = DECL_SIZE (cfield);
4609 offset_int bitoffset;
4610 offset_int bitoffset_end, access_end;
4612 /* Variable sized objects in static constructors makes no sense,
4613 but field_size can be NULL for flexible array members. */
4614 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4615 && TREE_CODE (byte_offset) == INTEGER_CST
4616 && (field_size != NULL_TREE
4617 ? TREE_CODE (field_size) == INTEGER_CST
4618 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4620 /* Compute bit offset of the field. */
4621 bitoffset = (wi::to_offset (field_offset)
4622 + wi::lshift (wi::to_offset (byte_offset),
4623 LOG2_BITS_PER_UNIT));
4624 /* Compute bit offset where the field ends. */
4625 if (field_size != NULL_TREE)
4626 bitoffset_end = bitoffset + wi::to_offset (field_size);
4627 else
4628 bitoffset_end = 0;
4630 access_end = offset_int (offset) + size;
4632 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4633 [BITOFFSET, BITOFFSET_END)? */
4634 if (wi::cmps (access_end, bitoffset) > 0
4635 && (field_size == NULL_TREE
4636 || wi::lts_p (offset, bitoffset_end)))
4638 offset_int inner_offset = offset_int (offset) - bitoffset;
4639 /* We do have overlap. Now see if field is large enough to
4640 cover the access. Give up for accesses spanning multiple
4641 fields. */
4642 if (wi::cmps (access_end, bitoffset_end) > 0)
4643 return NULL_TREE;
4644 if (wi::lts_p (offset, bitoffset))
4645 return NULL_TREE;
4646 return fold_ctor_reference (type, cval,
4647 inner_offset.to_uhwi (), size,
4648 from_decl);
4651 /* When memory is not explicitely mentioned in constructor, it is 0. */
4652 return build_zero_cst (type);
4655 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4656 to the memory at bit OFFSET. */
4658 static tree
4659 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
4660 unsigned HOST_WIDE_INT size, tree from_decl)
4662 tree ret;
4664 /* We found the field with exact match. */
4665 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
4666 && !offset)
4667 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
4669 /* We are at the end of walk, see if we can view convert the
4670 result. */
4671 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
4672 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
4673 && !compare_tree_int (TYPE_SIZE (type), size)
4674 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
4676 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
4677 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
4678 if (ret)
4679 STRIP_NOPS (ret);
4680 return ret;
4682 /* For constants and byte-aligned/sized reads try to go through
4683 native_encode/interpret. */
4684 if (CONSTANT_CLASS_P (ctor)
4685 && BITS_PER_UNIT == 8
4686 && offset % BITS_PER_UNIT == 0
4687 && size % BITS_PER_UNIT == 0
4688 && size <= MAX_BITSIZE_MODE_ANY_MODE)
4690 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
4691 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
4692 offset / BITS_PER_UNIT) > 0)
4693 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
4695 if (TREE_CODE (ctor) == CONSTRUCTOR)
4698 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
4699 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
4700 return fold_array_ctor_reference (type, ctor, offset, size,
4701 from_decl);
4702 else
4703 return fold_nonarray_ctor_reference (type, ctor, offset, size,
4704 from_decl);
4707 return NULL_TREE;
4710 /* Return the tree representing the element referenced by T if T is an
4711 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4712 names using VALUEIZE. Return NULL_TREE otherwise. */
4714 tree
4715 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
4717 tree ctor, idx, base;
4718 HOST_WIDE_INT offset, size, max_size;
4719 tree tem;
4721 if (TREE_THIS_VOLATILE (t))
4722 return NULL_TREE;
4724 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4725 return get_symbol_constant_value (t);
4727 tem = fold_read_from_constant_string (t);
4728 if (tem)
4729 return tem;
4731 switch (TREE_CODE (t))
4733 case ARRAY_REF:
4734 case ARRAY_RANGE_REF:
4735 /* Constant indexes are handled well by get_base_constructor.
4736 Only special case variable offsets.
4737 FIXME: This code can't handle nested references with variable indexes
4738 (they will be handled only by iteration of ccp). Perhaps we can bring
4739 get_ref_base_and_extent here and make it use a valueize callback. */
4740 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
4741 && valueize
4742 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
4743 && TREE_CODE (idx) == INTEGER_CST)
4745 tree low_bound, unit_size;
4747 /* If the resulting bit-offset is constant, track it. */
4748 if ((low_bound = array_ref_low_bound (t),
4749 TREE_CODE (low_bound) == INTEGER_CST)
4750 && (unit_size = array_ref_element_size (t),
4751 tree_fits_uhwi_p (unit_size)))
4753 offset_int woffset
4754 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
4755 TYPE_PRECISION (TREE_TYPE (idx)));
4757 if (wi::fits_shwi_p (woffset))
4759 offset = woffset.to_shwi ();
4760 /* TODO: This code seems wrong, multiply then check
4761 to see if it fits. */
4762 offset *= tree_to_uhwi (unit_size);
4763 offset *= BITS_PER_UNIT;
4765 base = TREE_OPERAND (t, 0);
4766 ctor = get_base_constructor (base, &offset, valueize);
4767 /* Empty constructor. Always fold to 0. */
4768 if (ctor == error_mark_node)
4769 return build_zero_cst (TREE_TYPE (t));
4770 /* Out of bound array access. Value is undefined,
4771 but don't fold. */
4772 if (offset < 0)
4773 return NULL_TREE;
4774 /* We can not determine ctor. */
4775 if (!ctor)
4776 return NULL_TREE;
4777 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
4778 tree_to_uhwi (unit_size)
4779 * BITS_PER_UNIT,
4780 base);
4784 /* Fallthru. */
4786 case COMPONENT_REF:
4787 case BIT_FIELD_REF:
4788 case TARGET_MEM_REF:
4789 case MEM_REF:
4790 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
4791 ctor = get_base_constructor (base, &offset, valueize);
4793 /* Empty constructor. Always fold to 0. */
4794 if (ctor == error_mark_node)
4795 return build_zero_cst (TREE_TYPE (t));
4796 /* We do not know precise address. */
4797 if (max_size == -1 || max_size != size)
4798 return NULL_TREE;
4799 /* We can not determine ctor. */
4800 if (!ctor)
4801 return NULL_TREE;
4803 /* Out of bound array access. Value is undefined, but don't fold. */
4804 if (offset < 0)
4805 return NULL_TREE;
4807 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
4808 base);
4810 case REALPART_EXPR:
4811 case IMAGPART_EXPR:
4813 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
4814 if (c && TREE_CODE (c) == COMPLEX_CST)
4815 return fold_build1_loc (EXPR_LOCATION (t),
4816 TREE_CODE (t), TREE_TYPE (t), c);
4817 break;
4820 default:
4821 break;
4824 return NULL_TREE;
4827 tree
4828 fold_const_aggregate_ref (tree t)
4830 return fold_const_aggregate_ref_1 (t, NULL);
4833 /* Lookup virtual method with index TOKEN in a virtual table V
4834 at OFFSET.
4835 Set CAN_REFER if non-NULL to false if method
4836 is not referable or if the virtual table is ill-formed (such as rewriten
4837 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
4839 tree
4840 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
4841 tree v,
4842 unsigned HOST_WIDE_INT offset,
4843 bool *can_refer)
4845 tree vtable = v, init, fn;
4846 unsigned HOST_WIDE_INT size;
4847 unsigned HOST_WIDE_INT elt_size, access_index;
4848 tree domain_type;
4850 if (can_refer)
4851 *can_refer = true;
4853 /* First of all double check we have virtual table. */
4854 if (TREE_CODE (v) != VAR_DECL
4855 || !DECL_VIRTUAL_P (v))
4857 gcc_assert (in_lto_p);
4858 /* Pass down that we lost track of the target. */
4859 if (can_refer)
4860 *can_refer = false;
4861 return NULL_TREE;
4864 init = ctor_for_folding (v);
4866 /* The virtual tables should always be born with constructors
4867 and we always should assume that they are avaialble for
4868 folding. At the moment we do not stream them in all cases,
4869 but it should never happen that ctor seem unreachable. */
4870 gcc_assert (init);
4871 if (init == error_mark_node)
4873 gcc_assert (in_lto_p);
4874 /* Pass down that we lost track of the target. */
4875 if (can_refer)
4876 *can_refer = false;
4877 return NULL_TREE;
4879 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
4880 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
4881 offset *= BITS_PER_UNIT;
4882 offset += token * size;
4884 /* Lookup the value in the constructor that is assumed to be array.
4885 This is equivalent to
4886 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
4887 offset, size, NULL);
4888 but in a constant time. We expect that frontend produced a simple
4889 array without indexed initializers. */
4891 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
4892 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
4893 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
4894 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
4896 access_index = offset / BITS_PER_UNIT / elt_size;
4897 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
4899 /* This code makes an assumption that there are no
4900 indexed fileds produced by C++ FE, so we can directly index the array. */
4901 if (access_index < CONSTRUCTOR_NELTS (init))
4903 fn = CONSTRUCTOR_ELT (init, access_index)->value;
4904 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
4905 STRIP_NOPS (fn);
4907 else
4908 fn = NULL;
4910 /* For type inconsistent program we may end up looking up virtual method
4911 in virtual table that does not contain TOKEN entries. We may overrun
4912 the virtual table and pick up a constant or RTTI info pointer.
4913 In any case the call is undefined. */
4914 if (!fn
4915 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
4916 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
4917 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4918 else
4920 fn = TREE_OPERAND (fn, 0);
4922 /* When cgraph node is missing and function is not public, we cannot
4923 devirtualize. This can happen in WHOPR when the actual method
4924 ends up in other partition, because we found devirtualization
4925 possibility too late. */
4926 if (!can_refer_decl_in_current_unit_p (fn, vtable))
4928 if (can_refer)
4930 *can_refer = false;
4931 return fn;
4933 return NULL_TREE;
4937 /* Make sure we create a cgraph node for functions we'll reference.
4938 They can be non-existent if the reference comes from an entry
4939 of an external vtable for example. */
4940 cgraph_node::get_create (fn);
4942 return fn;
4945 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
4946 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
4947 KNOWN_BINFO carries the binfo describing the true type of
4948 OBJ_TYPE_REF_OBJECT(REF).
4949 Set CAN_REFER if non-NULL to false if method
4950 is not referable or if the virtual table is ill-formed (such as rewriten
4951 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
4953 tree
4954 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
4955 bool *can_refer)
4957 unsigned HOST_WIDE_INT offset;
4958 tree v;
4960 v = BINFO_VTABLE (known_binfo);
4961 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
4962 if (!v)
4963 return NULL_TREE;
4965 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
4967 if (can_refer)
4968 *can_refer = false;
4969 return NULL_TREE;
4971 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
4974 /* Return true iff VAL is a gimple expression that is known to be
4975 non-negative. Restricted to floating-point inputs. */
4977 bool
4978 gimple_val_nonnegative_real_p (tree val)
4980 gimple def_stmt;
4982 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
4984 /* Use existing logic for non-gimple trees. */
4985 if (tree_expr_nonnegative_p (val))
4986 return true;
4988 if (TREE_CODE (val) != SSA_NAME)
4989 return false;
4991 /* Currently we look only at the immediately defining statement
4992 to make this determination, since recursion on defining
4993 statements of operands can lead to quadratic behavior in the
4994 worst case. This is expected to catch almost all occurrences
4995 in practice. It would be possible to implement limited-depth
4996 recursion if important cases are lost. Alternatively, passes
4997 that need this information (such as the pow/powi lowering code
4998 in the cse_sincos pass) could be revised to provide it through
4999 dataflow propagation. */
5001 def_stmt = SSA_NAME_DEF_STMT (val);
5003 if (is_gimple_assign (def_stmt))
5005 tree op0, op1;
5007 /* See fold-const.c:tree_expr_nonnegative_p for additional
5008 cases that could be handled with recursion. */
5010 switch (gimple_assign_rhs_code (def_stmt))
5012 case ABS_EXPR:
5013 /* Always true for floating-point operands. */
5014 return true;
5016 case MULT_EXPR:
5017 /* True if the two operands are identical (since we are
5018 restricted to floating-point inputs). */
5019 op0 = gimple_assign_rhs1 (def_stmt);
5020 op1 = gimple_assign_rhs2 (def_stmt);
5022 if (op0 == op1
5023 || operand_equal_p (op0, op1, 0))
5024 return true;
5026 default:
5027 return false;
5030 else if (is_gimple_call (def_stmt))
5032 tree fndecl = gimple_call_fndecl (def_stmt);
5033 if (fndecl
5034 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5036 tree arg1;
5038 switch (DECL_FUNCTION_CODE (fndecl))
5040 CASE_FLT_FN (BUILT_IN_ACOS):
5041 CASE_FLT_FN (BUILT_IN_ACOSH):
5042 CASE_FLT_FN (BUILT_IN_CABS):
5043 CASE_FLT_FN (BUILT_IN_COSH):
5044 CASE_FLT_FN (BUILT_IN_ERFC):
5045 CASE_FLT_FN (BUILT_IN_EXP):
5046 CASE_FLT_FN (BUILT_IN_EXP10):
5047 CASE_FLT_FN (BUILT_IN_EXP2):
5048 CASE_FLT_FN (BUILT_IN_FABS):
5049 CASE_FLT_FN (BUILT_IN_FDIM):
5050 CASE_FLT_FN (BUILT_IN_HYPOT):
5051 CASE_FLT_FN (BUILT_IN_POW10):
5052 return true;
5054 CASE_FLT_FN (BUILT_IN_SQRT):
5055 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5056 nonnegative inputs. */
5057 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5058 return true;
5060 break;
5062 CASE_FLT_FN (BUILT_IN_POWI):
5063 /* True if the second argument is an even integer. */
5064 arg1 = gimple_call_arg (def_stmt, 1);
5066 if (TREE_CODE (arg1) == INTEGER_CST
5067 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5068 return true;
5070 break;
5072 CASE_FLT_FN (BUILT_IN_POW):
5073 /* True if the second argument is an even integer-valued
5074 real. */
5075 arg1 = gimple_call_arg (def_stmt, 1);
5077 if (TREE_CODE (arg1) == REAL_CST)
5079 REAL_VALUE_TYPE c;
5080 HOST_WIDE_INT n;
5082 c = TREE_REAL_CST (arg1);
5083 n = real_to_integer (&c);
5085 if ((n & 1) == 0)
5087 REAL_VALUE_TYPE cint;
5088 real_from_integer (&cint, VOIDmode, n, SIGNED);
5089 if (real_identical (&c, &cint))
5090 return true;
5094 break;
5096 default:
5097 return false;
5102 return false;
5105 /* Given a pointer value OP0, return a simplified version of an
5106 indirection through OP0, or NULL_TREE if no simplification is
5107 possible. Note that the resulting type may be different from
5108 the type pointed to in the sense that it is still compatible
5109 from the langhooks point of view. */
5111 tree
5112 gimple_fold_indirect_ref (tree t)
5114 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5115 tree sub = t;
5116 tree subtype;
5118 STRIP_NOPS (sub);
5119 subtype = TREE_TYPE (sub);
5120 if (!POINTER_TYPE_P (subtype))
5121 return NULL_TREE;
5123 if (TREE_CODE (sub) == ADDR_EXPR)
5125 tree op = TREE_OPERAND (sub, 0);
5126 tree optype = TREE_TYPE (op);
5127 /* *&p => p */
5128 if (useless_type_conversion_p (type, optype))
5129 return op;
5131 /* *(foo *)&fooarray => fooarray[0] */
5132 if (TREE_CODE (optype) == ARRAY_TYPE
5133 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5134 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5136 tree type_domain = TYPE_DOMAIN (optype);
5137 tree min_val = size_zero_node;
5138 if (type_domain && TYPE_MIN_VALUE (type_domain))
5139 min_val = TYPE_MIN_VALUE (type_domain);
5140 if (TREE_CODE (min_val) == INTEGER_CST)
5141 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5143 /* *(foo *)&complexfoo => __real__ complexfoo */
5144 else if (TREE_CODE (optype) == COMPLEX_TYPE
5145 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5146 return fold_build1 (REALPART_EXPR, type, op);
5147 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5148 else if (TREE_CODE (optype) == VECTOR_TYPE
5149 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5151 tree part_width = TYPE_SIZE (type);
5152 tree index = bitsize_int (0);
5153 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5157 /* *(p + CST) -> ... */
5158 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5159 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5161 tree addr = TREE_OPERAND (sub, 0);
5162 tree off = TREE_OPERAND (sub, 1);
5163 tree addrtype;
5165 STRIP_NOPS (addr);
5166 addrtype = TREE_TYPE (addr);
5168 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5169 if (TREE_CODE (addr) == ADDR_EXPR
5170 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5171 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5172 && tree_fits_uhwi_p (off))
5174 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5175 tree part_width = TYPE_SIZE (type);
5176 unsigned HOST_WIDE_INT part_widthi
5177 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5178 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5179 tree index = bitsize_int (indexi);
5180 if (offset / part_widthi
5181 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5182 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5183 part_width, index);
5186 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5187 if (TREE_CODE (addr) == ADDR_EXPR
5188 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5189 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5191 tree size = TYPE_SIZE_UNIT (type);
5192 if (tree_int_cst_equal (size, off))
5193 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5196 /* *(p + CST) -> MEM_REF <p, CST>. */
5197 if (TREE_CODE (addr) != ADDR_EXPR
5198 || DECL_P (TREE_OPERAND (addr, 0)))
5199 return fold_build2 (MEM_REF, type,
5200 addr,
5201 wide_int_to_tree (ptype, off));
5204 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5205 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5206 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5207 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5209 tree type_domain;
5210 tree min_val = size_zero_node;
5211 tree osub = sub;
5212 sub = gimple_fold_indirect_ref (sub);
5213 if (! sub)
5214 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5215 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5216 if (type_domain && TYPE_MIN_VALUE (type_domain))
5217 min_val = TYPE_MIN_VALUE (type_domain);
5218 if (TREE_CODE (min_val) == INTEGER_CST)
5219 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5222 return NULL_TREE;
5225 /* Return true if CODE is an operation that when operating on signed
5226 integer types involves undefined behavior on overflow and the
5227 operation can be expressed with unsigned arithmetic. */
5229 bool
5230 arith_code_with_undefined_signed_overflow (tree_code code)
5232 switch (code)
5234 case PLUS_EXPR:
5235 case MINUS_EXPR:
5236 case MULT_EXPR:
5237 case NEGATE_EXPR:
5238 case POINTER_PLUS_EXPR:
5239 return true;
5240 default:
5241 return false;
5245 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5246 operation that can be transformed to unsigned arithmetic by converting
5247 its operand, carrying out the operation in the corresponding unsigned
5248 type and converting the result back to the original type.
5250 Returns a sequence of statements that replace STMT and also contain
5251 a modified form of STMT itself. */
5253 gimple_seq
5254 rewrite_to_defined_overflow (gimple stmt)
5256 if (dump_file && (dump_flags & TDF_DETAILS))
5258 fprintf (dump_file, "rewriting stmt with undefined signed "
5259 "overflow ");
5260 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5263 tree lhs = gimple_assign_lhs (stmt);
5264 tree type = unsigned_type_for (TREE_TYPE (lhs));
5265 gimple_seq stmts = NULL;
5266 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5268 gimple_seq stmts2 = NULL;
5269 gimple_set_op (stmt, i,
5270 force_gimple_operand (fold_convert (type,
5271 gimple_op (stmt, i)),
5272 &stmts2, true, NULL_TREE));
5273 gimple_seq_add_seq (&stmts, stmts2);
5275 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5276 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5277 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5278 gimple_seq_add_stmt (&stmts, stmt);
5279 gimple cvt = gimple_build_assign_with_ops
5280 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
5281 gimple_seq_add_stmt (&stmts, cvt);
5283 return stmts;