2014-08-01 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / gimple-fold.c
blob2e0f2bff4388e0a2107391f1cb7cdebbc5460223
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "dumpfile.h"
33 #include "bitmap.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "tree-dfa.h"
47 #include "tree-ssa.h"
48 #include "tree-ssa-propagate.h"
49 #include "target.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
55 #include "dbgcnt.h"
56 #include "builtins.h"
58 /* Return true when DECL can be referenced from current unit.
59 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
60 We can get declarations that are not possible to reference for various
61 reasons:
63 1) When analyzing C++ virtual tables.
64 C++ virtual tables do have known constructors even
65 when they are keyed to other compilation unit.
66 Those tables can contain pointers to methods and vars
67 in other units. Those methods have both STATIC and EXTERNAL
68 set.
69 2) In WHOPR mode devirtualization might lead to reference
70 to method that was partitioned elsehwere.
71 In this case we have static VAR_DECL or FUNCTION_DECL
72 that has no corresponding callgraph/varpool node
73 declaring the body.
74 3) COMDAT functions referred by external vtables that
75 we devirtualize only during final compilation stage.
76 At this time we already decided that we will not output
77 the function body and thus we can't reference the symbol
78 directly. */
80 static bool
81 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
83 varpool_node *vnode;
84 struct cgraph_node *node;
85 symtab_node *snode;
87 if (DECL_ABSTRACT (decl))
88 return false;
90 /* We are concerned only about static/external vars and functions. */
91 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
92 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
93 return true;
95 /* Static objects can be referred only if they was not optimized out yet. */
96 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
98 /* Before we start optimizing unreachable code we can be sure all
99 static objects are defined. */
100 if (cgraph_function_flags_ready)
101 return true;
102 snode = symtab_node::get (decl);
103 if (!snode || !snode->definition)
104 return false;
105 node = dyn_cast <cgraph_node *> (snode);
106 return !node || !node->global.inlined_to;
109 /* We will later output the initializer, so we can refer to it.
110 So we are concerned only when DECL comes from initializer of
111 external var or var that has been optimized out. */
112 if (!from_decl
113 || TREE_CODE (from_decl) != VAR_DECL
114 || (!DECL_EXTERNAL (from_decl)
115 && (vnode = varpool_node::get (from_decl)) != NULL
116 && vnode->definition)
117 || (flag_ltrans
118 && (vnode = varpool_node::get (from_decl)) != NULL
119 && vnode->in_other_partition))
120 return true;
121 /* We are folding reference from external vtable. The vtable may reffer
122 to a symbol keyed to other compilation unit. The other compilation
123 unit may be in separate DSO and the symbol may be hidden. */
124 if (DECL_VISIBILITY_SPECIFIED (decl)
125 && DECL_EXTERNAL (decl)
126 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
127 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
128 return false;
129 /* When function is public, we always can introduce new reference.
130 Exception are the COMDAT functions where introducing a direct
131 reference imply need to include function body in the curren tunit. */
132 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
133 return true;
134 /* We have COMDAT. We are going to check if we still have definition
135 or if the definition is going to be output in other partition.
136 Bypass this when gimplifying; all needed functions will be produced.
138 As observed in PR20991 for already optimized out comdat virtual functions
139 it may be tempting to not necessarily give up because the copy will be
140 output elsewhere when corresponding vtable is output.
141 This is however not possible - ABI specify that COMDATs are output in
142 units where they are used and when the other unit was compiled with LTO
143 it is possible that vtable was kept public while the function itself
144 was privatized. */
145 if (!cgraph_function_flags_ready)
146 return true;
148 snode = symtab_node::get (decl);
149 if (!snode
150 || ((!snode->definition || DECL_EXTERNAL (decl))
151 && (!snode->in_other_partition
152 || (!snode->forced_by_abi && !snode->force_output))))
153 return false;
154 node = dyn_cast <cgraph_node *> (snode);
155 return !node || !node->global.inlined_to;
158 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
159 acceptable form for is_gimple_min_invariant.
160 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
162 tree
163 canonicalize_constructor_val (tree cval, tree from_decl)
165 tree orig_cval = cval;
166 STRIP_NOPS (cval);
167 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
168 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
170 tree ptr = TREE_OPERAND (cval, 0);
171 if (is_gimple_min_invariant (ptr))
172 cval = build1_loc (EXPR_LOCATION (cval),
173 ADDR_EXPR, TREE_TYPE (ptr),
174 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
175 ptr,
176 fold_convert (ptr_type_node,
177 TREE_OPERAND (cval, 1))));
179 if (TREE_CODE (cval) == ADDR_EXPR)
181 tree base = NULL_TREE;
182 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
184 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
185 if (base)
186 TREE_OPERAND (cval, 0) = base;
188 else
189 base = get_base_address (TREE_OPERAND (cval, 0));
190 if (!base)
191 return NULL_TREE;
193 if ((TREE_CODE (base) == VAR_DECL
194 || TREE_CODE (base) == FUNCTION_DECL)
195 && !can_refer_decl_in_current_unit_p (base, from_decl))
196 return NULL_TREE;
197 if (TREE_CODE (base) == VAR_DECL)
198 TREE_ADDRESSABLE (base) = 1;
199 else if (TREE_CODE (base) == FUNCTION_DECL)
201 /* Make sure we create a cgraph node for functions we'll reference.
202 They can be non-existent if the reference comes from an entry
203 of an external vtable for example. */
204 cgraph_node::get_create (base);
206 /* Fixup types in global initializers. */
207 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
208 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
210 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
211 cval = fold_convert (TREE_TYPE (orig_cval), cval);
212 return cval;
214 if (TREE_OVERFLOW_P (cval))
215 return drop_tree_overflow (cval);
216 return orig_cval;
219 /* If SYM is a constant variable with known value, return the value.
220 NULL_TREE is returned otherwise. */
222 tree
223 get_symbol_constant_value (tree sym)
225 tree val = ctor_for_folding (sym);
226 if (val != error_mark_node)
228 if (val)
230 val = canonicalize_constructor_val (unshare_expr (val), sym);
231 if (val && is_gimple_min_invariant (val))
232 return val;
233 else
234 return NULL_TREE;
236 /* Variables declared 'const' without an initializer
237 have zero as the initializer if they may not be
238 overridden at link or run time. */
239 if (!val
240 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
241 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
242 return build_zero_cst (TREE_TYPE (sym));
245 return NULL_TREE;
250 /* Subroutine of fold_stmt. We perform several simplifications of the
251 memory reference tree EXPR and make sure to re-gimplify them properly
252 after propagation of constant addresses. IS_LHS is true if the
253 reference is supposed to be an lvalue. */
255 static tree
256 maybe_fold_reference (tree expr, bool is_lhs)
258 tree *t = &expr;
259 tree result;
261 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
262 || TREE_CODE (expr) == REALPART_EXPR
263 || TREE_CODE (expr) == IMAGPART_EXPR)
264 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
265 return fold_unary_loc (EXPR_LOCATION (expr),
266 TREE_CODE (expr),
267 TREE_TYPE (expr),
268 TREE_OPERAND (expr, 0));
269 else if (TREE_CODE (expr) == BIT_FIELD_REF
270 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
271 return fold_ternary_loc (EXPR_LOCATION (expr),
272 TREE_CODE (expr),
273 TREE_TYPE (expr),
274 TREE_OPERAND (expr, 0),
275 TREE_OPERAND (expr, 1),
276 TREE_OPERAND (expr, 2));
278 while (handled_component_p (*t))
279 t = &TREE_OPERAND (*t, 0);
281 /* Canonicalize MEM_REFs invariant address operand. Do this first
282 to avoid feeding non-canonical MEM_REFs elsewhere. */
283 if (TREE_CODE (*t) == MEM_REF
284 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
286 bool volatile_p = TREE_THIS_VOLATILE (*t);
287 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
288 TREE_OPERAND (*t, 0),
289 TREE_OPERAND (*t, 1));
290 if (tem)
292 TREE_THIS_VOLATILE (tem) = volatile_p;
293 *t = tem;
294 tem = maybe_fold_reference (expr, is_lhs);
295 if (tem)
296 return tem;
297 return expr;
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 /* Fold back MEM_REFs to reference trees. */
307 if (TREE_CODE (*t) == MEM_REF
308 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
309 && integer_zerop (TREE_OPERAND (*t, 1))
310 && (TREE_THIS_VOLATILE (*t)
311 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
312 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
313 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
314 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
315 /* We have to look out here to not drop a required conversion
316 from the rhs to the lhs if is_lhs, but we don't have the
317 rhs here to verify that. Thus require strict type
318 compatibility. */
319 && types_compatible_p (TREE_TYPE (*t),
320 TREE_TYPE (TREE_OPERAND
321 (TREE_OPERAND (*t, 0), 0))))
323 tree tem;
324 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
325 tem = maybe_fold_reference (expr, is_lhs);
326 if (tem)
327 return tem;
328 return expr;
330 else if (TREE_CODE (*t) == TARGET_MEM_REF)
332 tree tem = maybe_fold_tmr (*t);
333 if (tem)
335 *t = tem;
336 tem = maybe_fold_reference (expr, is_lhs);
337 if (tem)
338 return tem;
339 return expr;
343 return NULL_TREE;
347 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
348 replacement rhs for the statement or NULL_TREE if no simplification
349 could be made. It is assumed that the operands have been previously
350 folded. */
352 static tree
353 fold_gimple_assign (gimple_stmt_iterator *si)
355 gimple stmt = gsi_stmt (*si);
356 enum tree_code subcode = gimple_assign_rhs_code (stmt);
357 location_t loc = gimple_location (stmt);
359 tree result = NULL_TREE;
361 switch (get_gimple_rhs_class (subcode))
363 case GIMPLE_SINGLE_RHS:
365 tree rhs = gimple_assign_rhs1 (stmt);
367 if (REFERENCE_CLASS_P (rhs))
368 return maybe_fold_reference (rhs, false);
370 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
372 tree val = OBJ_TYPE_REF_EXPR (rhs);
373 if (is_gimple_min_invariant (val))
374 return val;
375 else if (flag_devirtualize && virtual_method_call_p (rhs))
377 bool final;
378 vec <cgraph_node *>targets
379 = possible_polymorphic_call_targets (rhs, stmt, &final);
380 if (final && targets.length () <= 1 && dbg_cnt (devirt))
382 tree fndecl;
384 if (targets.length () == 1)
385 fndecl = targets[0]->decl;
386 else
387 fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
388 if (dump_enabled_p ())
390 location_t loc = gimple_location_safe (stmt);
391 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
392 "resolving virtual function address "
393 "reference to function %s\n",
394 targets.length () == 1
395 ? targets[0]->name ()
396 : "__builtin_unreachable");
398 val = fold_convert (TREE_TYPE (val),
399 build_fold_addr_expr_loc (loc, fndecl));
400 STRIP_USELESS_TYPE_CONVERSION (val);
401 return val;
406 else if (TREE_CODE (rhs) == ADDR_EXPR)
408 tree ref = TREE_OPERAND (rhs, 0);
409 tree tem = maybe_fold_reference (ref, true);
410 if (tem
411 && TREE_CODE (tem) == MEM_REF
412 && integer_zerop (TREE_OPERAND (tem, 1)))
413 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
414 else if (tem)
415 result = fold_convert (TREE_TYPE (rhs),
416 build_fold_addr_expr_loc (loc, tem));
417 else if (TREE_CODE (ref) == MEM_REF
418 && integer_zerop (TREE_OPERAND (ref, 1)))
419 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
422 else if (TREE_CODE (rhs) == CONSTRUCTOR
423 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
424 && (CONSTRUCTOR_NELTS (rhs)
425 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
427 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
428 unsigned i;
429 tree val;
431 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
432 if (TREE_CODE (val) != INTEGER_CST
433 && TREE_CODE (val) != REAL_CST
434 && TREE_CODE (val) != FIXED_CST)
435 return NULL_TREE;
437 return build_vector_from_ctor (TREE_TYPE (rhs),
438 CONSTRUCTOR_ELTS (rhs));
441 else if (DECL_P (rhs))
442 return get_symbol_constant_value (rhs);
444 /* If we couldn't fold the RHS, hand over to the generic
445 fold routines. */
446 if (result == NULL_TREE)
447 result = fold (rhs);
449 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
450 that may have been added by fold, and "useless" type
451 conversions that might now be apparent due to propagation. */
452 STRIP_USELESS_TYPE_CONVERSION (result);
454 if (result != rhs && valid_gimple_rhs_p (result))
455 return result;
457 return NULL_TREE;
459 break;
461 case GIMPLE_UNARY_RHS:
463 tree rhs = gimple_assign_rhs1 (stmt);
465 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
466 if (result)
468 /* If the operation was a conversion do _not_ mark a
469 resulting constant with TREE_OVERFLOW if the original
470 constant was not. These conversions have implementation
471 defined behavior and retaining the TREE_OVERFLOW flag
472 here would confuse later passes such as VRP. */
473 if (CONVERT_EXPR_CODE_P (subcode)
474 && TREE_CODE (result) == INTEGER_CST
475 && TREE_CODE (rhs) == INTEGER_CST)
476 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
478 STRIP_USELESS_TYPE_CONVERSION (result);
479 if (valid_gimple_rhs_p (result))
480 return result;
483 break;
485 case GIMPLE_BINARY_RHS:
486 /* Try to canonicalize for boolean-typed X the comparisons
487 X == 0, X == 1, X != 0, and X != 1. */
488 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
489 || gimple_assign_rhs_code (stmt) == NE_EXPR)
491 tree lhs = gimple_assign_lhs (stmt);
492 tree op1 = gimple_assign_rhs1 (stmt);
493 tree op2 = gimple_assign_rhs2 (stmt);
494 tree type = TREE_TYPE (op1);
496 /* Check whether the comparison operands are of the same boolean
497 type as the result type is.
498 Check that second operand is an integer-constant with value
499 one or zero. */
500 if (TREE_CODE (op2) == INTEGER_CST
501 && (integer_zerop (op2) || integer_onep (op2))
502 && useless_type_conversion_p (TREE_TYPE (lhs), type))
504 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
505 bool is_logical_not = false;
507 /* X == 0 and X != 1 is a logical-not.of X
508 X == 1 and X != 0 is X */
509 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
510 || (cmp_code == NE_EXPR && integer_onep (op2)))
511 is_logical_not = true;
513 if (is_logical_not == false)
514 result = op1;
515 /* Only for one-bit precision typed X the transformation
516 !X -> ~X is valied. */
517 else if (TYPE_PRECISION (type) == 1)
518 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
519 type, op1);
520 /* Otherwise we use !X -> X ^ 1. */
521 else
522 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
523 type, op1, build_int_cst (type, 1));
528 if (!result)
529 result = fold_binary_loc (loc, subcode,
530 TREE_TYPE (gimple_assign_lhs (stmt)),
531 gimple_assign_rhs1 (stmt),
532 gimple_assign_rhs2 (stmt));
534 if (result)
536 STRIP_USELESS_TYPE_CONVERSION (result);
537 if (valid_gimple_rhs_p (result))
538 return result;
540 break;
542 case GIMPLE_TERNARY_RHS:
543 /* Try to fold a conditional expression. */
544 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
546 tree op0 = gimple_assign_rhs1 (stmt);
547 tree tem;
548 bool set = false;
549 location_t cond_loc = gimple_location (stmt);
551 if (COMPARISON_CLASS_P (op0))
553 fold_defer_overflow_warnings ();
554 tem = fold_binary_loc (cond_loc,
555 TREE_CODE (op0), TREE_TYPE (op0),
556 TREE_OPERAND (op0, 0),
557 TREE_OPERAND (op0, 1));
558 /* This is actually a conditional expression, not a GIMPLE
559 conditional statement, however, the valid_gimple_rhs_p
560 test still applies. */
561 set = (tem && is_gimple_condexpr (tem)
562 && valid_gimple_rhs_p (tem));
563 fold_undefer_overflow_warnings (set, stmt, 0);
565 else if (is_gimple_min_invariant (op0))
567 tem = op0;
568 set = true;
570 else
571 return NULL_TREE;
573 if (set)
574 result = fold_build3_loc (cond_loc, COND_EXPR,
575 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
576 gimple_assign_rhs2 (stmt),
577 gimple_assign_rhs3 (stmt));
580 if (!result)
581 result = fold_ternary_loc (loc, subcode,
582 TREE_TYPE (gimple_assign_lhs (stmt)),
583 gimple_assign_rhs1 (stmt),
584 gimple_assign_rhs2 (stmt),
585 gimple_assign_rhs3 (stmt));
587 if (result)
589 STRIP_USELESS_TYPE_CONVERSION (result);
590 if (valid_gimple_rhs_p (result))
591 return result;
593 break;
595 case GIMPLE_INVALID_RHS:
596 gcc_unreachable ();
599 return NULL_TREE;
602 /* Attempt to fold a conditional statement. Return true if any changes were
603 made. We only attempt to fold the condition expression, and do not perform
604 any transformation that would require alteration of the cfg. It is
605 assumed that the operands have been previously folded. */
607 static bool
608 fold_gimple_cond (gimple stmt)
610 tree result = fold_binary_loc (gimple_location (stmt),
611 gimple_cond_code (stmt),
612 boolean_type_node,
613 gimple_cond_lhs (stmt),
614 gimple_cond_rhs (stmt));
616 if (result)
618 STRIP_USELESS_TYPE_CONVERSION (result);
619 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
621 gimple_cond_set_condition_from_tree (stmt, result);
622 return true;
626 return false;
629 /* Convert EXPR into a GIMPLE value suitable for substitution on the
630 RHS of an assignment. Insert the necessary statements before
631 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
632 is replaced. If the call is expected to produces a result, then it
633 is replaced by an assignment of the new RHS to the result variable.
634 If the result is to be ignored, then the call is replaced by a
635 GIMPLE_NOP. A proper VDEF chain is retained by making the first
636 VUSE and the last VDEF of the whole sequence be the same as the replaced
637 statement and using new SSA names for stores in between. */
639 void
640 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
642 tree lhs;
643 gimple stmt, new_stmt;
644 gimple_stmt_iterator i;
645 gimple_seq stmts = NULL;
646 gimple laststore;
647 tree reaching_vuse;
649 stmt = gsi_stmt (*si_p);
651 gcc_assert (is_gimple_call (stmt));
653 push_gimplify_context (gimple_in_ssa_p (cfun));
655 lhs = gimple_call_lhs (stmt);
656 if (lhs == NULL_TREE)
658 gimplify_and_add (expr, &stmts);
659 /* We can end up with folding a memcpy of an empty class assignment
660 which gets optimized away by C++ gimplification. */
661 if (gimple_seq_empty_p (stmts))
663 pop_gimplify_context (NULL);
664 if (gimple_in_ssa_p (cfun))
666 unlink_stmt_vdef (stmt);
667 release_defs (stmt);
669 gsi_replace (si_p, gimple_build_nop (), true);
670 return;
673 else
675 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
676 new_stmt = gimple_build_assign (lhs, tmp);
677 i = gsi_last (stmts);
678 gsi_insert_after_without_update (&i, new_stmt,
679 GSI_CONTINUE_LINKING);
682 pop_gimplify_context (NULL);
684 if (gimple_has_location (stmt))
685 annotate_all_with_location (stmts, gimple_location (stmt));
687 /* First iterate over the replacement statements backward, assigning
688 virtual operands to their defining statements. */
689 laststore = NULL;
690 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
692 new_stmt = gsi_stmt (i);
693 if ((gimple_assign_single_p (new_stmt)
694 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
695 || (is_gimple_call (new_stmt)
696 && (gimple_call_flags (new_stmt)
697 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
699 tree vdef;
700 if (!laststore)
701 vdef = gimple_vdef (stmt);
702 else
703 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
704 gimple_set_vdef (new_stmt, vdef);
705 if (vdef && TREE_CODE (vdef) == SSA_NAME)
706 SSA_NAME_DEF_STMT (vdef) = new_stmt;
707 laststore = new_stmt;
711 /* Second iterate over the statements forward, assigning virtual
712 operands to their uses. */
713 reaching_vuse = gimple_vuse (stmt);
714 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
716 new_stmt = gsi_stmt (i);
717 /* If the new statement possibly has a VUSE, update it with exact SSA
718 name we know will reach this one. */
719 if (gimple_has_mem_ops (new_stmt))
720 gimple_set_vuse (new_stmt, reaching_vuse);
721 gimple_set_modified (new_stmt, true);
722 if (gimple_vdef (new_stmt))
723 reaching_vuse = gimple_vdef (new_stmt);
726 /* If the new sequence does not do a store release the virtual
727 definition of the original statement. */
728 if (reaching_vuse
729 && reaching_vuse == gimple_vuse (stmt))
731 tree vdef = gimple_vdef (stmt);
732 if (vdef
733 && TREE_CODE (vdef) == SSA_NAME)
735 unlink_stmt_vdef (stmt);
736 release_ssa_name (vdef);
740 /* Finally replace the original statement with the sequence. */
741 gsi_replace_with_seq (si_p, stmts, false);
744 /* Return the string length, maximum string length or maximum value of
745 ARG in LENGTH.
746 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
747 is not NULL and, for TYPE == 0, its value is not equal to the length
748 we determine or if we are unable to determine the length or value,
749 return false. VISITED is a bitmap of visited variables.
750 TYPE is 0 if string length should be returned, 1 for maximum string
751 length and 2 for maximum value ARG can have. */
753 static bool
754 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
756 tree var, val;
757 gimple def_stmt;
759 if (TREE_CODE (arg) != SSA_NAME)
761 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
762 if (TREE_CODE (arg) == ADDR_EXPR
763 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
764 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
766 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
767 if (TREE_CODE (aop0) == INDIRECT_REF
768 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
769 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
770 length, visited, type);
773 if (type == 2)
775 val = arg;
776 if (TREE_CODE (val) != INTEGER_CST
777 || tree_int_cst_sgn (val) < 0)
778 return false;
780 else
781 val = c_strlen (arg, 1);
782 if (!val)
783 return false;
785 if (*length)
787 if (type > 0)
789 if (TREE_CODE (*length) != INTEGER_CST
790 || TREE_CODE (val) != INTEGER_CST)
791 return false;
793 if (tree_int_cst_lt (*length, val))
794 *length = val;
795 return true;
797 else if (simple_cst_equal (val, *length) != 1)
798 return false;
801 *length = val;
802 return true;
805 /* If ARG is registered for SSA update we cannot look at its defining
806 statement. */
807 if (name_registered_for_update_p (arg))
808 return false;
810 /* If we were already here, break the infinite cycle. */
811 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
812 return true;
814 var = arg;
815 def_stmt = SSA_NAME_DEF_STMT (var);
817 switch (gimple_code (def_stmt))
819 case GIMPLE_ASSIGN:
820 /* The RHS of the statement defining VAR must either have a
821 constant length or come from another SSA_NAME with a constant
822 length. */
823 if (gimple_assign_single_p (def_stmt)
824 || gimple_assign_unary_nop_p (def_stmt))
826 tree rhs = gimple_assign_rhs1 (def_stmt);
827 return get_maxval_strlen (rhs, length, visited, type);
829 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
831 tree op2 = gimple_assign_rhs2 (def_stmt);
832 tree op3 = gimple_assign_rhs3 (def_stmt);
833 return get_maxval_strlen (op2, length, visited, type)
834 && get_maxval_strlen (op3, length, visited, type);
836 return false;
838 case GIMPLE_PHI:
840 /* All the arguments of the PHI node must have the same constant
841 length. */
842 unsigned i;
844 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
846 tree arg = gimple_phi_arg (def_stmt, i)->def;
848 /* If this PHI has itself as an argument, we cannot
849 determine the string length of this argument. However,
850 if we can find a constant string length for the other
851 PHI args then we can still be sure that this is a
852 constant string length. So be optimistic and just
853 continue with the next argument. */
854 if (arg == gimple_phi_result (def_stmt))
855 continue;
857 if (!get_maxval_strlen (arg, length, visited, type))
858 return false;
861 return true;
863 default:
864 return false;
869 /* Fold builtin call in statement STMT. Returns a simplified tree.
870 We may return a non-constant expression, including another call
871 to a different function and with different arguments, e.g.,
872 substituting memcpy for strcpy when the string length is known.
873 Note that some builtins expand into inline code that may not
874 be valid in GIMPLE. Callers must take care. */
876 tree
877 gimple_fold_builtin (gimple stmt)
879 tree result, val[3];
880 tree callee, a;
881 int arg_idx, type;
882 bitmap visited;
883 bool ignore;
884 int nargs;
885 location_t loc = gimple_location (stmt);
887 ignore = (gimple_call_lhs (stmt) == NULL);
889 /* First try the generic builtin folder. If that succeeds, return the
890 result directly. */
891 result = fold_call_stmt (stmt, ignore);
892 if (result)
894 if (ignore)
895 STRIP_NOPS (result);
896 else
897 result = fold_convert (gimple_call_return_type (stmt), result);
898 return result;
901 /* Ignore MD builtins. */
902 callee = gimple_call_fndecl (stmt);
903 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
904 return NULL_TREE;
906 /* Give up for always_inline inline builtins until they are
907 inlined. */
908 if (avoid_folding_inline_builtin (callee))
909 return NULL_TREE;
911 /* If the builtin could not be folded, and it has no argument list,
912 we're done. */
913 nargs = gimple_call_num_args (stmt);
914 if (nargs == 0)
915 return NULL_TREE;
917 /* Limit the work only for builtins we know how to simplify. */
918 switch (DECL_FUNCTION_CODE (callee))
920 case BUILT_IN_STRLEN:
921 case BUILT_IN_FPUTS:
922 case BUILT_IN_FPUTS_UNLOCKED:
923 arg_idx = 0;
924 type = 0;
925 break;
926 case BUILT_IN_STRCPY:
927 case BUILT_IN_STRNCPY:
928 case BUILT_IN_STRCAT:
929 arg_idx = 1;
930 type = 0;
931 break;
932 case BUILT_IN_MEMCPY_CHK:
933 case BUILT_IN_MEMPCPY_CHK:
934 case BUILT_IN_MEMMOVE_CHK:
935 case BUILT_IN_MEMSET_CHK:
936 case BUILT_IN_STRNCPY_CHK:
937 case BUILT_IN_STPNCPY_CHK:
938 arg_idx = 2;
939 type = 2;
940 break;
941 case BUILT_IN_STRCPY_CHK:
942 case BUILT_IN_STPCPY_CHK:
943 arg_idx = 1;
944 type = 1;
945 break;
946 case BUILT_IN_SNPRINTF_CHK:
947 case BUILT_IN_VSNPRINTF_CHK:
948 arg_idx = 1;
949 type = 2;
950 break;
951 default:
952 return NULL_TREE;
955 if (arg_idx >= nargs)
956 return NULL_TREE;
958 /* Try to use the dataflow information gathered by the CCP process. */
959 visited = BITMAP_ALLOC (NULL);
960 bitmap_clear (visited);
962 memset (val, 0, sizeof (val));
963 a = gimple_call_arg (stmt, arg_idx);
964 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
965 val[arg_idx] = NULL_TREE;
967 BITMAP_FREE (visited);
969 result = NULL_TREE;
970 switch (DECL_FUNCTION_CODE (callee))
972 case BUILT_IN_STRLEN:
973 if (val[0] && nargs == 1)
975 tree new_val =
976 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
978 /* If the result is not a valid gimple value, or not a cast
979 of a valid gimple value, then we cannot use the result. */
980 if (is_gimple_val (new_val)
981 || (CONVERT_EXPR_P (new_val)
982 && is_gimple_val (TREE_OPERAND (new_val, 0))))
983 return new_val;
985 break;
987 case BUILT_IN_STRCPY:
988 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
989 result = fold_builtin_strcpy (loc, callee,
990 gimple_call_arg (stmt, 0),
991 gimple_call_arg (stmt, 1),
992 val[1]);
993 break;
995 case BUILT_IN_STRNCPY:
996 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
997 result = fold_builtin_strncpy (loc, callee,
998 gimple_call_arg (stmt, 0),
999 gimple_call_arg (stmt, 1),
1000 gimple_call_arg (stmt, 2),
1001 val[1]);
1002 break;
1004 case BUILT_IN_STRCAT:
1005 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1006 result = fold_builtin_strcat (loc, gimple_call_arg (stmt, 0),
1007 gimple_call_arg (stmt, 1),
1008 val[1]);
1009 break;
1011 case BUILT_IN_FPUTS:
1012 if (nargs == 2)
1013 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1014 gimple_call_arg (stmt, 1),
1015 ignore, false, val[0]);
1016 break;
1018 case BUILT_IN_FPUTS_UNLOCKED:
1019 if (nargs == 2)
1020 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1021 gimple_call_arg (stmt, 1),
1022 ignore, true, val[0]);
1023 break;
1025 case BUILT_IN_MEMCPY_CHK:
1026 case BUILT_IN_MEMPCPY_CHK:
1027 case BUILT_IN_MEMMOVE_CHK:
1028 case BUILT_IN_MEMSET_CHK:
1029 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1030 result = fold_builtin_memory_chk (loc, callee,
1031 gimple_call_arg (stmt, 0),
1032 gimple_call_arg (stmt, 1),
1033 gimple_call_arg (stmt, 2),
1034 gimple_call_arg (stmt, 3),
1035 val[2], ignore,
1036 DECL_FUNCTION_CODE (callee));
1037 break;
1039 case BUILT_IN_STRCPY_CHK:
1040 case BUILT_IN_STPCPY_CHK:
1041 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1042 result = fold_builtin_stxcpy_chk (loc, callee,
1043 gimple_call_arg (stmt, 0),
1044 gimple_call_arg (stmt, 1),
1045 gimple_call_arg (stmt, 2),
1046 val[1], ignore,
1047 DECL_FUNCTION_CODE (callee));
1048 break;
1050 case BUILT_IN_STRNCPY_CHK:
1051 case BUILT_IN_STPNCPY_CHK:
1052 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1053 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1054 gimple_call_arg (stmt, 1),
1055 gimple_call_arg (stmt, 2),
1056 gimple_call_arg (stmt, 3),
1057 val[2], ignore,
1058 DECL_FUNCTION_CODE (callee));
1059 break;
1061 case BUILT_IN_SNPRINTF_CHK:
1062 case BUILT_IN_VSNPRINTF_CHK:
1063 if (val[1] && is_gimple_val (val[1]))
1064 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1065 DECL_FUNCTION_CODE (callee));
1066 break;
1068 default:
1069 gcc_unreachable ();
1072 if (result && ignore)
1073 result = fold_ignored_result (result);
1074 return result;
1078 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1079 The statement may be replaced by another statement, e.g., if the call
1080 simplifies to a constant value. Return true if any changes were made.
1081 It is assumed that the operands have been previously folded. */
1083 static bool
1084 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1086 gimple stmt = gsi_stmt (*gsi);
1087 tree callee;
1088 bool changed = false;
1089 unsigned i;
1091 /* Fold *& in call arguments. */
1092 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1093 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1095 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1096 if (tmp)
1098 gimple_call_set_arg (stmt, i, tmp);
1099 changed = true;
1103 /* Check for virtual calls that became direct calls. */
1104 callee = gimple_call_fn (stmt);
1105 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1107 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1109 if (dump_file && virtual_method_call_p (callee)
1110 && !possible_polymorphic_call_target_p
1111 (callee, cgraph_node::get (gimple_call_addr_fndecl
1112 (OBJ_TYPE_REF_EXPR (callee)))))
1114 fprintf (dump_file,
1115 "Type inheritance inconsistent devirtualization of ");
1116 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1117 fprintf (dump_file, " to ");
1118 print_generic_expr (dump_file, callee, TDF_SLIM);
1119 fprintf (dump_file, "\n");
1122 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1123 changed = true;
1125 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
1127 bool final;
1128 vec <cgraph_node *>targets
1129 = possible_polymorphic_call_targets (callee, stmt, &final);
1130 if (final && targets.length () <= 1 && dbg_cnt (devirt))
1132 tree lhs = gimple_call_lhs (stmt);
1133 if (dump_enabled_p ())
1135 location_t loc = gimple_location_safe (stmt);
1136 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
1137 "folding virtual function call to %s\n",
1138 targets.length () == 1
1139 ? targets[0]->name ()
1140 : "__builtin_unreachable");
1142 if (targets.length () == 1)
1144 gimple_call_set_fndecl (stmt, targets[0]->decl);
1145 changed = true;
1146 /* If the call becomes noreturn, remove the lhs. */
1147 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
1149 if (TREE_CODE (lhs) == SSA_NAME)
1151 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1152 tree def = get_or_create_ssa_default_def (cfun, var);
1153 gimple new_stmt = gimple_build_assign (lhs, def);
1154 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1156 gimple_call_set_lhs (stmt, NULL_TREE);
1159 else
1161 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
1162 gimple new_stmt = gimple_build_call (fndecl, 0);
1163 gimple_set_location (new_stmt, gimple_location (stmt));
1164 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1166 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1167 tree def = get_or_create_ssa_default_def (cfun, var);
1169 /* To satisfy condition for
1170 cgraph_update_edges_for_call_stmt_node,
1171 we need to preserve GIMPLE_CALL statement
1172 at position of GSI iterator. */
1173 update_call_from_tree (gsi, def);
1174 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
1176 else
1177 gsi_replace (gsi, new_stmt, true);
1178 return true;
1184 if (inplace)
1185 return changed;
1187 /* Check for builtins that CCP can handle using information not
1188 available in the generic fold routines. */
1189 if (gimple_call_builtin_p (stmt))
1191 tree result = gimple_fold_builtin (stmt);
1192 if (result)
1194 if (!update_call_from_tree (gsi, result))
1195 gimplify_and_update_call_from_tree (gsi, result);
1196 changed = true;
1198 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
1199 changed |= targetm.gimple_fold_builtin (gsi);
1201 else if (gimple_call_internal_p (stmt))
1203 enum tree_code subcode = ERROR_MARK;
1204 tree result = NULL_TREE;
1205 switch (gimple_call_internal_fn (stmt))
1207 case IFN_BUILTIN_EXPECT:
1208 result = fold_builtin_expect (gimple_location (stmt),
1209 gimple_call_arg (stmt, 0),
1210 gimple_call_arg (stmt, 1),
1211 gimple_call_arg (stmt, 2));
1212 break;
1213 case IFN_UBSAN_CHECK_ADD:
1214 subcode = PLUS_EXPR;
1215 break;
1216 case IFN_UBSAN_CHECK_SUB:
1217 subcode = MINUS_EXPR;
1218 break;
1219 case IFN_UBSAN_CHECK_MUL:
1220 subcode = MULT_EXPR;
1221 break;
1222 default:
1223 break;
1225 if (subcode != ERROR_MARK)
1227 tree arg0 = gimple_call_arg (stmt, 0);
1228 tree arg1 = gimple_call_arg (stmt, 1);
1229 /* x = y + 0; x = y - 0; x = y * 0; */
1230 if (integer_zerop (arg1))
1231 result = subcode == MULT_EXPR
1232 ? build_zero_cst (TREE_TYPE (arg0))
1233 : arg0;
1234 /* x = 0 + y; x = 0 * y; */
1235 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1236 result = subcode == MULT_EXPR
1237 ? build_zero_cst (TREE_TYPE (arg0))
1238 : arg1;
1239 /* x = y - y; */
1240 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1241 result = build_zero_cst (TREE_TYPE (arg0));
1242 /* x = y * 1; x = 1 * y; */
1243 else if (subcode == MULT_EXPR)
1245 if (integer_onep (arg1))
1246 result = arg0;
1247 else if (integer_onep (arg0))
1248 result = arg1;
1251 if (result)
1253 if (!update_call_from_tree (gsi, result))
1254 gimplify_and_update_call_from_tree (gsi, result);
1255 changed = true;
1259 return changed;
1262 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1263 distinguishes both cases. */
1265 static bool
1266 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1268 bool changed = false;
1269 gimple stmt = gsi_stmt (*gsi);
1270 unsigned i;
1272 /* Fold the main computation performed by the statement. */
1273 switch (gimple_code (stmt))
1275 case GIMPLE_ASSIGN:
1277 unsigned old_num_ops = gimple_num_ops (stmt);
1278 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1279 tree lhs = gimple_assign_lhs (stmt);
1280 tree new_rhs;
1281 /* First canonicalize operand order. This avoids building new
1282 trees if this is the only thing fold would later do. */
1283 if ((commutative_tree_code (subcode)
1284 || commutative_ternary_tree_code (subcode))
1285 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1286 gimple_assign_rhs2 (stmt), false))
1288 tree tem = gimple_assign_rhs1 (stmt);
1289 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1290 gimple_assign_set_rhs2 (stmt, tem);
1291 changed = true;
1293 new_rhs = fold_gimple_assign (gsi);
1294 if (new_rhs
1295 && !useless_type_conversion_p (TREE_TYPE (lhs),
1296 TREE_TYPE (new_rhs)))
1297 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1298 if (new_rhs
1299 && (!inplace
1300 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1302 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1303 changed = true;
1305 break;
1308 case GIMPLE_COND:
1309 changed |= fold_gimple_cond (stmt);
1310 break;
1312 case GIMPLE_CALL:
1313 changed |= gimple_fold_call (gsi, inplace);
1314 break;
1316 case GIMPLE_ASM:
1317 /* Fold *& in asm operands. */
1319 size_t noutputs;
1320 const char **oconstraints;
1321 const char *constraint;
1322 bool allows_mem, allows_reg;
1324 noutputs = gimple_asm_noutputs (stmt);
1325 oconstraints = XALLOCAVEC (const char *, noutputs);
1327 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1329 tree link = gimple_asm_output_op (stmt, i);
1330 tree op = TREE_VALUE (link);
1331 oconstraints[i]
1332 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1333 if (REFERENCE_CLASS_P (op)
1334 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1336 TREE_VALUE (link) = op;
1337 changed = true;
1340 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1342 tree link = gimple_asm_input_op (stmt, i);
1343 tree op = TREE_VALUE (link);
1344 constraint
1345 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1346 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1347 oconstraints, &allows_mem, &allows_reg);
1348 if (REFERENCE_CLASS_P (op)
1349 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1350 != NULL_TREE)
1352 TREE_VALUE (link) = op;
1353 changed = true;
1357 break;
1359 case GIMPLE_DEBUG:
1360 if (gimple_debug_bind_p (stmt))
1362 tree val = gimple_debug_bind_get_value (stmt);
1363 if (val
1364 && REFERENCE_CLASS_P (val))
1366 tree tem = maybe_fold_reference (val, false);
1367 if (tem)
1369 gimple_debug_bind_set_value (stmt, tem);
1370 changed = true;
1373 else if (val
1374 && TREE_CODE (val) == ADDR_EXPR)
1376 tree ref = TREE_OPERAND (val, 0);
1377 tree tem = maybe_fold_reference (ref, false);
1378 if (tem)
1380 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1381 gimple_debug_bind_set_value (stmt, tem);
1382 changed = true;
1386 break;
1388 default:;
1391 stmt = gsi_stmt (*gsi);
1393 /* Fold *& on the lhs. */
1394 if (gimple_has_lhs (stmt))
1396 tree lhs = gimple_get_lhs (stmt);
1397 if (lhs && REFERENCE_CLASS_P (lhs))
1399 tree new_lhs = maybe_fold_reference (lhs, true);
1400 if (new_lhs)
1402 gimple_set_lhs (stmt, new_lhs);
1403 changed = true;
1408 return changed;
1411 /* Fold the statement pointed to by GSI. In some cases, this function may
1412 replace the whole statement with a new one. Returns true iff folding
1413 makes any changes.
1414 The statement pointed to by GSI should be in valid gimple form but may
1415 be in unfolded state as resulting from for example constant propagation
1416 which can produce *&x = 0. */
1418 bool
1419 fold_stmt (gimple_stmt_iterator *gsi)
1421 return fold_stmt_1 (gsi, false);
1424 /* Perform the minimal folding on statement *GSI. Only operations like
1425 *&x created by constant propagation are handled. The statement cannot
1426 be replaced with a new one. Return true if the statement was
1427 changed, false otherwise.
1428 The statement *GSI should be in valid gimple form but may
1429 be in unfolded state as resulting from for example constant propagation
1430 which can produce *&x = 0. */
1432 bool
1433 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1435 gimple stmt = gsi_stmt (*gsi);
1436 bool changed = fold_stmt_1 (gsi, true);
1437 gcc_assert (gsi_stmt (*gsi) == stmt);
1438 return changed;
1441 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1442 if EXPR is null or we don't know how.
1443 If non-null, the result always has boolean type. */
1445 static tree
1446 canonicalize_bool (tree expr, bool invert)
1448 if (!expr)
1449 return NULL_TREE;
1450 else if (invert)
1452 if (integer_nonzerop (expr))
1453 return boolean_false_node;
1454 else if (integer_zerop (expr))
1455 return boolean_true_node;
1456 else if (TREE_CODE (expr) == SSA_NAME)
1457 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1458 build_int_cst (TREE_TYPE (expr), 0));
1459 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1460 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1461 boolean_type_node,
1462 TREE_OPERAND (expr, 0),
1463 TREE_OPERAND (expr, 1));
1464 else
1465 return NULL_TREE;
1467 else
1469 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1470 return expr;
1471 if (integer_nonzerop (expr))
1472 return boolean_true_node;
1473 else if (integer_zerop (expr))
1474 return boolean_false_node;
1475 else if (TREE_CODE (expr) == SSA_NAME)
1476 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1477 build_int_cst (TREE_TYPE (expr), 0));
1478 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1479 return fold_build2 (TREE_CODE (expr),
1480 boolean_type_node,
1481 TREE_OPERAND (expr, 0),
1482 TREE_OPERAND (expr, 1));
1483 else
1484 return NULL_TREE;
1488 /* Check to see if a boolean expression EXPR is logically equivalent to the
1489 comparison (OP1 CODE OP2). Check for various identities involving
1490 SSA_NAMEs. */
1492 static bool
1493 same_bool_comparison_p (const_tree expr, enum tree_code code,
1494 const_tree op1, const_tree op2)
1496 gimple s;
1498 /* The obvious case. */
1499 if (TREE_CODE (expr) == code
1500 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1501 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1502 return true;
1504 /* Check for comparing (name, name != 0) and the case where expr
1505 is an SSA_NAME with a definition matching the comparison. */
1506 if (TREE_CODE (expr) == SSA_NAME
1507 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1509 if (operand_equal_p (expr, op1, 0))
1510 return ((code == NE_EXPR && integer_zerop (op2))
1511 || (code == EQ_EXPR && integer_nonzerop (op2)));
1512 s = SSA_NAME_DEF_STMT (expr);
1513 if (is_gimple_assign (s)
1514 && gimple_assign_rhs_code (s) == code
1515 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1516 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1517 return true;
1520 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1521 of name is a comparison, recurse. */
1522 if (TREE_CODE (op1) == SSA_NAME
1523 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1525 s = SSA_NAME_DEF_STMT (op1);
1526 if (is_gimple_assign (s)
1527 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1529 enum tree_code c = gimple_assign_rhs_code (s);
1530 if ((c == NE_EXPR && integer_zerop (op2))
1531 || (c == EQ_EXPR && integer_nonzerop (op2)))
1532 return same_bool_comparison_p (expr, c,
1533 gimple_assign_rhs1 (s),
1534 gimple_assign_rhs2 (s));
1535 if ((c == EQ_EXPR && integer_zerop (op2))
1536 || (c == NE_EXPR && integer_nonzerop (op2)))
1537 return same_bool_comparison_p (expr,
1538 invert_tree_comparison (c, false),
1539 gimple_assign_rhs1 (s),
1540 gimple_assign_rhs2 (s));
1543 return false;
1546 /* Check to see if two boolean expressions OP1 and OP2 are logically
1547 equivalent. */
1549 static bool
1550 same_bool_result_p (const_tree op1, const_tree op2)
1552 /* Simple cases first. */
1553 if (operand_equal_p (op1, op2, 0))
1554 return true;
1556 /* Check the cases where at least one of the operands is a comparison.
1557 These are a bit smarter than operand_equal_p in that they apply some
1558 identifies on SSA_NAMEs. */
1559 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1560 && same_bool_comparison_p (op1, TREE_CODE (op2),
1561 TREE_OPERAND (op2, 0),
1562 TREE_OPERAND (op2, 1)))
1563 return true;
1564 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1565 && same_bool_comparison_p (op2, TREE_CODE (op1),
1566 TREE_OPERAND (op1, 0),
1567 TREE_OPERAND (op1, 1)))
1568 return true;
1570 /* Default case. */
1571 return false;
1574 /* Forward declarations for some mutually recursive functions. */
1576 static tree
1577 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1578 enum tree_code code2, tree op2a, tree op2b);
1579 static tree
1580 and_var_with_comparison (tree var, bool invert,
1581 enum tree_code code2, tree op2a, tree op2b);
1582 static tree
1583 and_var_with_comparison_1 (gimple stmt,
1584 enum tree_code code2, tree op2a, tree op2b);
1585 static tree
1586 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1587 enum tree_code code2, tree op2a, tree op2b);
1588 static tree
1589 or_var_with_comparison (tree var, bool invert,
1590 enum tree_code code2, tree op2a, tree op2b);
1591 static tree
1592 or_var_with_comparison_1 (gimple stmt,
1593 enum tree_code code2, tree op2a, tree op2b);
1595 /* Helper function for and_comparisons_1: try to simplify the AND of the
1596 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1597 If INVERT is true, invert the value of the VAR before doing the AND.
1598 Return NULL_EXPR if we can't simplify this to a single expression. */
1600 static tree
1601 and_var_with_comparison (tree var, bool invert,
1602 enum tree_code code2, tree op2a, tree op2b)
1604 tree t;
1605 gimple stmt = SSA_NAME_DEF_STMT (var);
1607 /* We can only deal with variables whose definitions are assignments. */
1608 if (!is_gimple_assign (stmt))
1609 return NULL_TREE;
1611 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1612 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1613 Then we only have to consider the simpler non-inverted cases. */
1614 if (invert)
1615 t = or_var_with_comparison_1 (stmt,
1616 invert_tree_comparison (code2, false),
1617 op2a, op2b);
1618 else
1619 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1620 return canonicalize_bool (t, invert);
1623 /* Try to simplify the AND of the ssa variable defined by the assignment
1624 STMT with the comparison specified by (OP2A CODE2 OP2B).
1625 Return NULL_EXPR if we can't simplify this to a single expression. */
1627 static tree
1628 and_var_with_comparison_1 (gimple stmt,
1629 enum tree_code code2, tree op2a, tree op2b)
1631 tree var = gimple_assign_lhs (stmt);
1632 tree true_test_var = NULL_TREE;
1633 tree false_test_var = NULL_TREE;
1634 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1636 /* Check for identities like (var AND (var == 0)) => false. */
1637 if (TREE_CODE (op2a) == SSA_NAME
1638 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1640 if ((code2 == NE_EXPR && integer_zerop (op2b))
1641 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1643 true_test_var = op2a;
1644 if (var == true_test_var)
1645 return var;
1647 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1648 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1650 false_test_var = op2a;
1651 if (var == false_test_var)
1652 return boolean_false_node;
1656 /* If the definition is a comparison, recurse on it. */
1657 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1659 tree t = and_comparisons_1 (innercode,
1660 gimple_assign_rhs1 (stmt),
1661 gimple_assign_rhs2 (stmt),
1662 code2,
1663 op2a,
1664 op2b);
1665 if (t)
1666 return t;
1669 /* If the definition is an AND or OR expression, we may be able to
1670 simplify by reassociating. */
1671 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1672 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1674 tree inner1 = gimple_assign_rhs1 (stmt);
1675 tree inner2 = gimple_assign_rhs2 (stmt);
1676 gimple s;
1677 tree t;
1678 tree partial = NULL_TREE;
1679 bool is_and = (innercode == BIT_AND_EXPR);
1681 /* Check for boolean identities that don't require recursive examination
1682 of inner1/inner2:
1683 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1684 inner1 AND (inner1 OR inner2) => inner1
1685 !inner1 AND (inner1 AND inner2) => false
1686 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1687 Likewise for similar cases involving inner2. */
1688 if (inner1 == true_test_var)
1689 return (is_and ? var : inner1);
1690 else if (inner2 == true_test_var)
1691 return (is_and ? var : inner2);
1692 else if (inner1 == false_test_var)
1693 return (is_and
1694 ? boolean_false_node
1695 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1696 else if (inner2 == false_test_var)
1697 return (is_and
1698 ? boolean_false_node
1699 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1701 /* Next, redistribute/reassociate the AND across the inner tests.
1702 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1703 if (TREE_CODE (inner1) == SSA_NAME
1704 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1705 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1706 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1707 gimple_assign_rhs1 (s),
1708 gimple_assign_rhs2 (s),
1709 code2, op2a, op2b)))
1711 /* Handle the AND case, where we are reassociating:
1712 (inner1 AND inner2) AND (op2a code2 op2b)
1713 => (t AND inner2)
1714 If the partial result t is a constant, we win. Otherwise
1715 continue on to try reassociating with the other inner test. */
1716 if (is_and)
1718 if (integer_onep (t))
1719 return inner2;
1720 else if (integer_zerop (t))
1721 return boolean_false_node;
1724 /* Handle the OR case, where we are redistributing:
1725 (inner1 OR inner2) AND (op2a code2 op2b)
1726 => (t OR (inner2 AND (op2a code2 op2b))) */
1727 else if (integer_onep (t))
1728 return boolean_true_node;
1730 /* Save partial result for later. */
1731 partial = t;
1734 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1735 if (TREE_CODE (inner2) == SSA_NAME
1736 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1737 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1738 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1739 gimple_assign_rhs1 (s),
1740 gimple_assign_rhs2 (s),
1741 code2, op2a, op2b)))
1743 /* Handle the AND case, where we are reassociating:
1744 (inner1 AND inner2) AND (op2a code2 op2b)
1745 => (inner1 AND t) */
1746 if (is_and)
1748 if (integer_onep (t))
1749 return inner1;
1750 else if (integer_zerop (t))
1751 return boolean_false_node;
1752 /* If both are the same, we can apply the identity
1753 (x AND x) == x. */
1754 else if (partial && same_bool_result_p (t, partial))
1755 return t;
1758 /* Handle the OR case. where we are redistributing:
1759 (inner1 OR inner2) AND (op2a code2 op2b)
1760 => (t OR (inner1 AND (op2a code2 op2b)))
1761 => (t OR partial) */
1762 else
1764 if (integer_onep (t))
1765 return boolean_true_node;
1766 else if (partial)
1768 /* We already got a simplification for the other
1769 operand to the redistributed OR expression. The
1770 interesting case is when at least one is false.
1771 Or, if both are the same, we can apply the identity
1772 (x OR x) == x. */
1773 if (integer_zerop (partial))
1774 return t;
1775 else if (integer_zerop (t))
1776 return partial;
1777 else if (same_bool_result_p (t, partial))
1778 return t;
1783 return NULL_TREE;
1786 /* Try to simplify the AND of two comparisons defined by
1787 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1788 If this can be done without constructing an intermediate value,
1789 return the resulting tree; otherwise NULL_TREE is returned.
1790 This function is deliberately asymmetric as it recurses on SSA_DEFs
1791 in the first comparison but not the second. */
1793 static tree
1794 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1795 enum tree_code code2, tree op2a, tree op2b)
1797 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1799 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1800 if (operand_equal_p (op1a, op2a, 0)
1801 && operand_equal_p (op1b, op2b, 0))
1803 /* Result will be either NULL_TREE, or a combined comparison. */
1804 tree t = combine_comparisons (UNKNOWN_LOCATION,
1805 TRUTH_ANDIF_EXPR, code1, code2,
1806 truth_type, op1a, op1b);
1807 if (t)
1808 return t;
1811 /* Likewise the swapped case of the above. */
1812 if (operand_equal_p (op1a, op2b, 0)
1813 && operand_equal_p (op1b, op2a, 0))
1815 /* Result will be either NULL_TREE, or a combined comparison. */
1816 tree t = combine_comparisons (UNKNOWN_LOCATION,
1817 TRUTH_ANDIF_EXPR, code1,
1818 swap_tree_comparison (code2),
1819 truth_type, op1a, op1b);
1820 if (t)
1821 return t;
1824 /* If both comparisons are of the same value against constants, we might
1825 be able to merge them. */
1826 if (operand_equal_p (op1a, op2a, 0)
1827 && TREE_CODE (op1b) == INTEGER_CST
1828 && TREE_CODE (op2b) == INTEGER_CST)
1830 int cmp = tree_int_cst_compare (op1b, op2b);
1832 /* If we have (op1a == op1b), we should either be able to
1833 return that or FALSE, depending on whether the constant op1b
1834 also satisfies the other comparison against op2b. */
1835 if (code1 == EQ_EXPR)
1837 bool done = true;
1838 bool val;
1839 switch (code2)
1841 case EQ_EXPR: val = (cmp == 0); break;
1842 case NE_EXPR: val = (cmp != 0); break;
1843 case LT_EXPR: val = (cmp < 0); break;
1844 case GT_EXPR: val = (cmp > 0); break;
1845 case LE_EXPR: val = (cmp <= 0); break;
1846 case GE_EXPR: val = (cmp >= 0); break;
1847 default: done = false;
1849 if (done)
1851 if (val)
1852 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1853 else
1854 return boolean_false_node;
1857 /* Likewise if the second comparison is an == comparison. */
1858 else if (code2 == EQ_EXPR)
1860 bool done = true;
1861 bool val;
1862 switch (code1)
1864 case EQ_EXPR: val = (cmp == 0); break;
1865 case NE_EXPR: val = (cmp != 0); break;
1866 case LT_EXPR: val = (cmp > 0); break;
1867 case GT_EXPR: val = (cmp < 0); break;
1868 case LE_EXPR: val = (cmp >= 0); break;
1869 case GE_EXPR: val = (cmp <= 0); break;
1870 default: done = false;
1872 if (done)
1874 if (val)
1875 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1876 else
1877 return boolean_false_node;
1881 /* Same business with inequality tests. */
1882 else if (code1 == NE_EXPR)
1884 bool val;
1885 switch (code2)
1887 case EQ_EXPR: val = (cmp != 0); break;
1888 case NE_EXPR: val = (cmp == 0); break;
1889 case LT_EXPR: val = (cmp >= 0); break;
1890 case GT_EXPR: val = (cmp <= 0); break;
1891 case LE_EXPR: val = (cmp > 0); break;
1892 case GE_EXPR: val = (cmp < 0); break;
1893 default:
1894 val = false;
1896 if (val)
1897 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1899 else if (code2 == NE_EXPR)
1901 bool val;
1902 switch (code1)
1904 case EQ_EXPR: val = (cmp == 0); break;
1905 case NE_EXPR: val = (cmp != 0); break;
1906 case LT_EXPR: val = (cmp <= 0); break;
1907 case GT_EXPR: val = (cmp >= 0); break;
1908 case LE_EXPR: val = (cmp < 0); break;
1909 case GE_EXPR: val = (cmp > 0); break;
1910 default:
1911 val = false;
1913 if (val)
1914 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1917 /* Chose the more restrictive of two < or <= comparisons. */
1918 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1919 && (code2 == LT_EXPR || code2 == LE_EXPR))
1921 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1922 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1923 else
1924 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1927 /* Likewise chose the more restrictive of two > or >= comparisons. */
1928 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1929 && (code2 == GT_EXPR || code2 == GE_EXPR))
1931 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1932 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1933 else
1934 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1937 /* Check for singleton ranges. */
1938 else if (cmp == 0
1939 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1940 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1941 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1943 /* Check for disjoint ranges. */
1944 else if (cmp <= 0
1945 && (code1 == LT_EXPR || code1 == LE_EXPR)
1946 && (code2 == GT_EXPR || code2 == GE_EXPR))
1947 return boolean_false_node;
1948 else if (cmp >= 0
1949 && (code1 == GT_EXPR || code1 == GE_EXPR)
1950 && (code2 == LT_EXPR || code2 == LE_EXPR))
1951 return boolean_false_node;
1954 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1955 NAME's definition is a truth value. See if there are any simplifications
1956 that can be done against the NAME's definition. */
1957 if (TREE_CODE (op1a) == SSA_NAME
1958 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1959 && (integer_zerop (op1b) || integer_onep (op1b)))
1961 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1962 || (code1 == NE_EXPR && integer_onep (op1b)));
1963 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1964 switch (gimple_code (stmt))
1966 case GIMPLE_ASSIGN:
1967 /* Try to simplify by copy-propagating the definition. */
1968 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1970 case GIMPLE_PHI:
1971 /* If every argument to the PHI produces the same result when
1972 ANDed with the second comparison, we win.
1973 Do not do this unless the type is bool since we need a bool
1974 result here anyway. */
1975 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1977 tree result = NULL_TREE;
1978 unsigned i;
1979 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1981 tree arg = gimple_phi_arg_def (stmt, i);
1983 /* If this PHI has itself as an argument, ignore it.
1984 If all the other args produce the same result,
1985 we're still OK. */
1986 if (arg == gimple_phi_result (stmt))
1987 continue;
1988 else if (TREE_CODE (arg) == INTEGER_CST)
1990 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1992 if (!result)
1993 result = boolean_false_node;
1994 else if (!integer_zerop (result))
1995 return NULL_TREE;
1997 else if (!result)
1998 result = fold_build2 (code2, boolean_type_node,
1999 op2a, op2b);
2000 else if (!same_bool_comparison_p (result,
2001 code2, op2a, op2b))
2002 return NULL_TREE;
2004 else if (TREE_CODE (arg) == SSA_NAME
2005 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2007 tree temp;
2008 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2009 /* In simple cases we can look through PHI nodes,
2010 but we have to be careful with loops.
2011 See PR49073. */
2012 if (! dom_info_available_p (CDI_DOMINATORS)
2013 || gimple_bb (def_stmt) == gimple_bb (stmt)
2014 || dominated_by_p (CDI_DOMINATORS,
2015 gimple_bb (def_stmt),
2016 gimple_bb (stmt)))
2017 return NULL_TREE;
2018 temp = and_var_with_comparison (arg, invert, code2,
2019 op2a, op2b);
2020 if (!temp)
2021 return NULL_TREE;
2022 else if (!result)
2023 result = temp;
2024 else if (!same_bool_result_p (result, temp))
2025 return NULL_TREE;
2027 else
2028 return NULL_TREE;
2030 return result;
2033 default:
2034 break;
2037 return NULL_TREE;
2040 /* Try to simplify the AND of two comparisons, specified by
2041 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2042 If this can be simplified to a single expression (without requiring
2043 introducing more SSA variables to hold intermediate values),
2044 return the resulting tree. Otherwise return NULL_TREE.
2045 If the result expression is non-null, it has boolean type. */
2047 tree
2048 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2049 enum tree_code code2, tree op2a, tree op2b)
2051 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2052 if (t)
2053 return t;
2054 else
2055 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2058 /* Helper function for or_comparisons_1: try to simplify the OR of the
2059 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2060 If INVERT is true, invert the value of VAR before doing the OR.
2061 Return NULL_EXPR if we can't simplify this to a single expression. */
2063 static tree
2064 or_var_with_comparison (tree var, bool invert,
2065 enum tree_code code2, tree op2a, tree op2b)
2067 tree t;
2068 gimple stmt = SSA_NAME_DEF_STMT (var);
2070 /* We can only deal with variables whose definitions are assignments. */
2071 if (!is_gimple_assign (stmt))
2072 return NULL_TREE;
2074 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2075 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2076 Then we only have to consider the simpler non-inverted cases. */
2077 if (invert)
2078 t = and_var_with_comparison_1 (stmt,
2079 invert_tree_comparison (code2, false),
2080 op2a, op2b);
2081 else
2082 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2083 return canonicalize_bool (t, invert);
2086 /* Try to simplify the OR of the ssa variable defined by the assignment
2087 STMT with the comparison specified by (OP2A CODE2 OP2B).
2088 Return NULL_EXPR if we can't simplify this to a single expression. */
2090 static tree
2091 or_var_with_comparison_1 (gimple stmt,
2092 enum tree_code code2, tree op2a, tree op2b)
2094 tree var = gimple_assign_lhs (stmt);
2095 tree true_test_var = NULL_TREE;
2096 tree false_test_var = NULL_TREE;
2097 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2099 /* Check for identities like (var OR (var != 0)) => true . */
2100 if (TREE_CODE (op2a) == SSA_NAME
2101 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2103 if ((code2 == NE_EXPR && integer_zerop (op2b))
2104 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2106 true_test_var = op2a;
2107 if (var == true_test_var)
2108 return var;
2110 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2111 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2113 false_test_var = op2a;
2114 if (var == false_test_var)
2115 return boolean_true_node;
2119 /* If the definition is a comparison, recurse on it. */
2120 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2122 tree t = or_comparisons_1 (innercode,
2123 gimple_assign_rhs1 (stmt),
2124 gimple_assign_rhs2 (stmt),
2125 code2,
2126 op2a,
2127 op2b);
2128 if (t)
2129 return t;
2132 /* If the definition is an AND or OR expression, we may be able to
2133 simplify by reassociating. */
2134 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2135 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2137 tree inner1 = gimple_assign_rhs1 (stmt);
2138 tree inner2 = gimple_assign_rhs2 (stmt);
2139 gimple s;
2140 tree t;
2141 tree partial = NULL_TREE;
2142 bool is_or = (innercode == BIT_IOR_EXPR);
2144 /* Check for boolean identities that don't require recursive examination
2145 of inner1/inner2:
2146 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2147 inner1 OR (inner1 AND inner2) => inner1
2148 !inner1 OR (inner1 OR inner2) => true
2149 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2151 if (inner1 == true_test_var)
2152 return (is_or ? var : inner1);
2153 else if (inner2 == true_test_var)
2154 return (is_or ? var : inner2);
2155 else if (inner1 == false_test_var)
2156 return (is_or
2157 ? boolean_true_node
2158 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2159 else if (inner2 == false_test_var)
2160 return (is_or
2161 ? boolean_true_node
2162 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2164 /* Next, redistribute/reassociate the OR across the inner tests.
2165 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2166 if (TREE_CODE (inner1) == SSA_NAME
2167 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2168 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2169 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2170 gimple_assign_rhs1 (s),
2171 gimple_assign_rhs2 (s),
2172 code2, op2a, op2b)))
2174 /* Handle the OR case, where we are reassociating:
2175 (inner1 OR inner2) OR (op2a code2 op2b)
2176 => (t OR inner2)
2177 If the partial result t is a constant, we win. Otherwise
2178 continue on to try reassociating with the other inner test. */
2179 if (is_or)
2181 if (integer_onep (t))
2182 return boolean_true_node;
2183 else if (integer_zerop (t))
2184 return inner2;
2187 /* Handle the AND case, where we are redistributing:
2188 (inner1 AND inner2) OR (op2a code2 op2b)
2189 => (t AND (inner2 OR (op2a code op2b))) */
2190 else if (integer_zerop (t))
2191 return boolean_false_node;
2193 /* Save partial result for later. */
2194 partial = t;
2197 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2198 if (TREE_CODE (inner2) == SSA_NAME
2199 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2200 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2201 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2202 gimple_assign_rhs1 (s),
2203 gimple_assign_rhs2 (s),
2204 code2, op2a, op2b)))
2206 /* Handle the OR case, where we are reassociating:
2207 (inner1 OR inner2) OR (op2a code2 op2b)
2208 => (inner1 OR t)
2209 => (t OR partial) */
2210 if (is_or)
2212 if (integer_zerop (t))
2213 return inner1;
2214 else if (integer_onep (t))
2215 return boolean_true_node;
2216 /* If both are the same, we can apply the identity
2217 (x OR x) == x. */
2218 else if (partial && same_bool_result_p (t, partial))
2219 return t;
2222 /* Handle the AND case, where we are redistributing:
2223 (inner1 AND inner2) OR (op2a code2 op2b)
2224 => (t AND (inner1 OR (op2a code2 op2b)))
2225 => (t AND partial) */
2226 else
2228 if (integer_zerop (t))
2229 return boolean_false_node;
2230 else if (partial)
2232 /* We already got a simplification for the other
2233 operand to the redistributed AND expression. The
2234 interesting case is when at least one is true.
2235 Or, if both are the same, we can apply the identity
2236 (x AND x) == x. */
2237 if (integer_onep (partial))
2238 return t;
2239 else if (integer_onep (t))
2240 return partial;
2241 else if (same_bool_result_p (t, partial))
2242 return t;
2247 return NULL_TREE;
2250 /* Try to simplify the OR of two comparisons defined by
2251 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2252 If this can be done without constructing an intermediate value,
2253 return the resulting tree; otherwise NULL_TREE is returned.
2254 This function is deliberately asymmetric as it recurses on SSA_DEFs
2255 in the first comparison but not the second. */
2257 static tree
2258 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2259 enum tree_code code2, tree op2a, tree op2b)
2261 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2263 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2264 if (operand_equal_p (op1a, op2a, 0)
2265 && operand_equal_p (op1b, op2b, 0))
2267 /* Result will be either NULL_TREE, or a combined comparison. */
2268 tree t = combine_comparisons (UNKNOWN_LOCATION,
2269 TRUTH_ORIF_EXPR, code1, code2,
2270 truth_type, op1a, op1b);
2271 if (t)
2272 return t;
2275 /* Likewise the swapped case of the above. */
2276 if (operand_equal_p (op1a, op2b, 0)
2277 && operand_equal_p (op1b, op2a, 0))
2279 /* Result will be either NULL_TREE, or a combined comparison. */
2280 tree t = combine_comparisons (UNKNOWN_LOCATION,
2281 TRUTH_ORIF_EXPR, code1,
2282 swap_tree_comparison (code2),
2283 truth_type, op1a, op1b);
2284 if (t)
2285 return t;
2288 /* If both comparisons are of the same value against constants, we might
2289 be able to merge them. */
2290 if (operand_equal_p (op1a, op2a, 0)
2291 && TREE_CODE (op1b) == INTEGER_CST
2292 && TREE_CODE (op2b) == INTEGER_CST)
2294 int cmp = tree_int_cst_compare (op1b, op2b);
2296 /* If we have (op1a != op1b), we should either be able to
2297 return that or TRUE, depending on whether the constant op1b
2298 also satisfies the other comparison against op2b. */
2299 if (code1 == NE_EXPR)
2301 bool done = true;
2302 bool val;
2303 switch (code2)
2305 case EQ_EXPR: val = (cmp == 0); break;
2306 case NE_EXPR: val = (cmp != 0); break;
2307 case LT_EXPR: val = (cmp < 0); break;
2308 case GT_EXPR: val = (cmp > 0); break;
2309 case LE_EXPR: val = (cmp <= 0); break;
2310 case GE_EXPR: val = (cmp >= 0); break;
2311 default: done = false;
2313 if (done)
2315 if (val)
2316 return boolean_true_node;
2317 else
2318 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2321 /* Likewise if the second comparison is a != comparison. */
2322 else if (code2 == NE_EXPR)
2324 bool done = true;
2325 bool val;
2326 switch (code1)
2328 case EQ_EXPR: val = (cmp == 0); break;
2329 case NE_EXPR: val = (cmp != 0); break;
2330 case LT_EXPR: val = (cmp > 0); break;
2331 case GT_EXPR: val = (cmp < 0); break;
2332 case LE_EXPR: val = (cmp >= 0); break;
2333 case GE_EXPR: val = (cmp <= 0); break;
2334 default: done = false;
2336 if (done)
2338 if (val)
2339 return boolean_true_node;
2340 else
2341 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2345 /* See if an equality test is redundant with the other comparison. */
2346 else if (code1 == EQ_EXPR)
2348 bool val;
2349 switch (code2)
2351 case EQ_EXPR: val = (cmp == 0); break;
2352 case NE_EXPR: val = (cmp != 0); break;
2353 case LT_EXPR: val = (cmp < 0); break;
2354 case GT_EXPR: val = (cmp > 0); break;
2355 case LE_EXPR: val = (cmp <= 0); break;
2356 case GE_EXPR: val = (cmp >= 0); break;
2357 default:
2358 val = false;
2360 if (val)
2361 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2363 else if (code2 == EQ_EXPR)
2365 bool val;
2366 switch (code1)
2368 case EQ_EXPR: val = (cmp == 0); break;
2369 case NE_EXPR: val = (cmp != 0); break;
2370 case LT_EXPR: val = (cmp > 0); break;
2371 case GT_EXPR: val = (cmp < 0); break;
2372 case LE_EXPR: val = (cmp >= 0); break;
2373 case GE_EXPR: val = (cmp <= 0); break;
2374 default:
2375 val = false;
2377 if (val)
2378 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2381 /* Chose the less restrictive of two < or <= comparisons. */
2382 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2383 && (code2 == LT_EXPR || code2 == LE_EXPR))
2385 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2386 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2387 else
2388 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2391 /* Likewise chose the less restrictive of two > or >= comparisons. */
2392 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2393 && (code2 == GT_EXPR || code2 == GE_EXPR))
2395 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2396 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2397 else
2398 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2401 /* Check for singleton ranges. */
2402 else if (cmp == 0
2403 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2404 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2405 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2407 /* Check for less/greater pairs that don't restrict the range at all. */
2408 else if (cmp >= 0
2409 && (code1 == LT_EXPR || code1 == LE_EXPR)
2410 && (code2 == GT_EXPR || code2 == GE_EXPR))
2411 return boolean_true_node;
2412 else if (cmp <= 0
2413 && (code1 == GT_EXPR || code1 == GE_EXPR)
2414 && (code2 == LT_EXPR || code2 == LE_EXPR))
2415 return boolean_true_node;
2418 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2419 NAME's definition is a truth value. See if there are any simplifications
2420 that can be done against the NAME's definition. */
2421 if (TREE_CODE (op1a) == SSA_NAME
2422 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2423 && (integer_zerop (op1b) || integer_onep (op1b)))
2425 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2426 || (code1 == NE_EXPR && integer_onep (op1b)));
2427 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2428 switch (gimple_code (stmt))
2430 case GIMPLE_ASSIGN:
2431 /* Try to simplify by copy-propagating the definition. */
2432 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2434 case GIMPLE_PHI:
2435 /* If every argument to the PHI produces the same result when
2436 ORed with the second comparison, we win.
2437 Do not do this unless the type is bool since we need a bool
2438 result here anyway. */
2439 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2441 tree result = NULL_TREE;
2442 unsigned i;
2443 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2445 tree arg = gimple_phi_arg_def (stmt, i);
2447 /* If this PHI has itself as an argument, ignore it.
2448 If all the other args produce the same result,
2449 we're still OK. */
2450 if (arg == gimple_phi_result (stmt))
2451 continue;
2452 else if (TREE_CODE (arg) == INTEGER_CST)
2454 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2456 if (!result)
2457 result = boolean_true_node;
2458 else if (!integer_onep (result))
2459 return NULL_TREE;
2461 else if (!result)
2462 result = fold_build2 (code2, boolean_type_node,
2463 op2a, op2b);
2464 else if (!same_bool_comparison_p (result,
2465 code2, op2a, op2b))
2466 return NULL_TREE;
2468 else if (TREE_CODE (arg) == SSA_NAME
2469 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2471 tree temp;
2472 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2473 /* In simple cases we can look through PHI nodes,
2474 but we have to be careful with loops.
2475 See PR49073. */
2476 if (! dom_info_available_p (CDI_DOMINATORS)
2477 || gimple_bb (def_stmt) == gimple_bb (stmt)
2478 || dominated_by_p (CDI_DOMINATORS,
2479 gimple_bb (def_stmt),
2480 gimple_bb (stmt)))
2481 return NULL_TREE;
2482 temp = or_var_with_comparison (arg, invert, code2,
2483 op2a, op2b);
2484 if (!temp)
2485 return NULL_TREE;
2486 else if (!result)
2487 result = temp;
2488 else if (!same_bool_result_p (result, temp))
2489 return NULL_TREE;
2491 else
2492 return NULL_TREE;
2494 return result;
2497 default:
2498 break;
2501 return NULL_TREE;
2504 /* Try to simplify the OR of two comparisons, specified by
2505 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2506 If this can be simplified to a single expression (without requiring
2507 introducing more SSA variables to hold intermediate values),
2508 return the resulting tree. Otherwise return NULL_TREE.
2509 If the result expression is non-null, it has boolean type. */
2511 tree
2512 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2513 enum tree_code code2, tree op2a, tree op2b)
2515 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2516 if (t)
2517 return t;
2518 else
2519 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2523 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2525 Either NULL_TREE, a simplified but non-constant or a constant
2526 is returned.
2528 ??? This should go into a gimple-fold-inline.h file to be eventually
2529 privatized with the single valueize function used in the various TUs
2530 to avoid the indirect function call overhead. */
2532 tree
2533 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2535 location_t loc = gimple_location (stmt);
2536 switch (gimple_code (stmt))
2538 case GIMPLE_ASSIGN:
2540 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2542 switch (get_gimple_rhs_class (subcode))
2544 case GIMPLE_SINGLE_RHS:
2546 tree rhs = gimple_assign_rhs1 (stmt);
2547 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2549 if (TREE_CODE (rhs) == SSA_NAME)
2551 /* If the RHS is an SSA_NAME, return its known constant value,
2552 if any. */
2553 return (*valueize) (rhs);
2555 /* Handle propagating invariant addresses into address
2556 operations. */
2557 else if (TREE_CODE (rhs) == ADDR_EXPR
2558 && !is_gimple_min_invariant (rhs))
2560 HOST_WIDE_INT offset = 0;
2561 tree base;
2562 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2563 &offset,
2564 valueize);
2565 if (base
2566 && (CONSTANT_CLASS_P (base)
2567 || decl_address_invariant_p (base)))
2568 return build_invariant_address (TREE_TYPE (rhs),
2569 base, offset);
2571 else if (TREE_CODE (rhs) == CONSTRUCTOR
2572 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2573 && (CONSTRUCTOR_NELTS (rhs)
2574 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2576 unsigned i;
2577 tree val, *vec;
2579 vec = XALLOCAVEC (tree,
2580 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2581 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2583 val = (*valueize) (val);
2584 if (TREE_CODE (val) == INTEGER_CST
2585 || TREE_CODE (val) == REAL_CST
2586 || TREE_CODE (val) == FIXED_CST)
2587 vec[i] = val;
2588 else
2589 return NULL_TREE;
2592 return build_vector (TREE_TYPE (rhs), vec);
2594 if (subcode == OBJ_TYPE_REF)
2596 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
2597 /* If callee is constant, we can fold away the wrapper. */
2598 if (is_gimple_min_invariant (val))
2599 return val;
2602 if (kind == tcc_reference)
2604 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2605 || TREE_CODE (rhs) == REALPART_EXPR
2606 || TREE_CODE (rhs) == IMAGPART_EXPR)
2607 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2609 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2610 return fold_unary_loc (EXPR_LOCATION (rhs),
2611 TREE_CODE (rhs),
2612 TREE_TYPE (rhs), val);
2614 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2615 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2617 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2618 return fold_ternary_loc (EXPR_LOCATION (rhs),
2619 TREE_CODE (rhs),
2620 TREE_TYPE (rhs), val,
2621 TREE_OPERAND (rhs, 1),
2622 TREE_OPERAND (rhs, 2));
2624 else if (TREE_CODE (rhs) == MEM_REF
2625 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2627 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2628 if (TREE_CODE (val) == ADDR_EXPR
2629 && is_gimple_min_invariant (val))
2631 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2632 unshare_expr (val),
2633 TREE_OPERAND (rhs, 1));
2634 if (tem)
2635 rhs = tem;
2638 return fold_const_aggregate_ref_1 (rhs, valueize);
2640 else if (kind == tcc_declaration)
2641 return get_symbol_constant_value (rhs);
2642 return rhs;
2645 case GIMPLE_UNARY_RHS:
2647 /* Handle unary operators that can appear in GIMPLE form.
2648 Note that we know the single operand must be a constant,
2649 so this should almost always return a simplified RHS. */
2650 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2652 return
2653 fold_unary_ignore_overflow_loc (loc, subcode,
2654 gimple_expr_type (stmt), op0);
2657 case GIMPLE_BINARY_RHS:
2659 /* Handle binary operators that can appear in GIMPLE form. */
2660 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2661 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2663 /* Translate &x + CST into an invariant form suitable for
2664 further propagation. */
2665 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2666 && TREE_CODE (op0) == ADDR_EXPR
2667 && TREE_CODE (op1) == INTEGER_CST)
2669 tree off = fold_convert (ptr_type_node, op1);
2670 return build_fold_addr_expr_loc
2671 (loc,
2672 fold_build2 (MEM_REF,
2673 TREE_TYPE (TREE_TYPE (op0)),
2674 unshare_expr (op0), off));
2677 return fold_binary_loc (loc, subcode,
2678 gimple_expr_type (stmt), op0, op1);
2681 case GIMPLE_TERNARY_RHS:
2683 /* Handle ternary operators that can appear in GIMPLE form. */
2684 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2685 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2686 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2688 /* Fold embedded expressions in ternary codes. */
2689 if ((subcode == COND_EXPR
2690 || subcode == VEC_COND_EXPR)
2691 && COMPARISON_CLASS_P (op0))
2693 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2694 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2695 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2696 TREE_TYPE (op0), op00, op01);
2697 if (tem)
2698 op0 = tem;
2701 return fold_ternary_loc (loc, subcode,
2702 gimple_expr_type (stmt), op0, op1, op2);
2705 default:
2706 gcc_unreachable ();
2710 case GIMPLE_CALL:
2712 tree fn;
2714 if (gimple_call_internal_p (stmt))
2716 enum tree_code subcode = ERROR_MARK;
2717 switch (gimple_call_internal_fn (stmt))
2719 case IFN_UBSAN_CHECK_ADD:
2720 subcode = PLUS_EXPR;
2721 break;
2722 case IFN_UBSAN_CHECK_SUB:
2723 subcode = MINUS_EXPR;
2724 break;
2725 case IFN_UBSAN_CHECK_MUL:
2726 subcode = MULT_EXPR;
2727 break;
2728 default:
2729 return NULL_TREE;
2731 tree arg0 = gimple_call_arg (stmt, 0);
2732 tree arg1 = gimple_call_arg (stmt, 1);
2733 tree op0 = (*valueize) (arg0);
2734 tree op1 = (*valueize) (arg1);
2736 if (TREE_CODE (op0) != INTEGER_CST
2737 || TREE_CODE (op1) != INTEGER_CST)
2739 switch (subcode)
2741 case MULT_EXPR:
2742 /* x * 0 = 0 * x = 0 without overflow. */
2743 if (integer_zerop (op0) || integer_zerop (op1))
2744 return build_zero_cst (TREE_TYPE (arg0));
2745 break;
2746 case MINUS_EXPR:
2747 /* y - y = 0 without overflow. */
2748 if (operand_equal_p (op0, op1, 0))
2749 return build_zero_cst (TREE_TYPE (arg0));
2750 break;
2751 default:
2752 break;
2755 tree res
2756 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
2757 if (res
2758 && TREE_CODE (res) == INTEGER_CST
2759 && !TREE_OVERFLOW (res))
2760 return res;
2761 return NULL_TREE;
2764 fn = (*valueize) (gimple_call_fn (stmt));
2765 if (TREE_CODE (fn) == ADDR_EXPR
2766 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2767 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
2768 && gimple_builtin_call_types_compatible_p (stmt,
2769 TREE_OPERAND (fn, 0)))
2771 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2772 tree call, retval;
2773 unsigned i;
2774 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2775 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2776 call = build_call_array_loc (loc,
2777 gimple_call_return_type (stmt),
2778 fn, gimple_call_num_args (stmt), args);
2779 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2780 if (retval)
2782 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2783 STRIP_NOPS (retval);
2784 retval = fold_convert (gimple_call_return_type (stmt), retval);
2786 return retval;
2788 return NULL_TREE;
2791 default:
2792 return NULL_TREE;
2796 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2797 Returns NULL_TREE if folding to a constant is not possible, otherwise
2798 returns a constant according to is_gimple_min_invariant. */
2800 tree
2801 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2803 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2804 if (res && is_gimple_min_invariant (res))
2805 return res;
2806 return NULL_TREE;
2810 /* The following set of functions are supposed to fold references using
2811 their constant initializers. */
2813 static tree fold_ctor_reference (tree type, tree ctor,
2814 unsigned HOST_WIDE_INT offset,
2815 unsigned HOST_WIDE_INT size, tree);
2817 /* See if we can find constructor defining value of BASE.
2818 When we know the consructor with constant offset (such as
2819 base is array[40] and we do know constructor of array), then
2820 BIT_OFFSET is adjusted accordingly.
2822 As a special case, return error_mark_node when constructor
2823 is not explicitly available, but it is known to be zero
2824 such as 'static const int a;'. */
2825 static tree
2826 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2827 tree (*valueize)(tree))
2829 HOST_WIDE_INT bit_offset2, size, max_size;
2830 if (TREE_CODE (base) == MEM_REF)
2832 if (!integer_zerop (TREE_OPERAND (base, 1)))
2834 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
2835 return NULL_TREE;
2836 *bit_offset += (mem_ref_offset (base).to_short_addr ()
2837 * BITS_PER_UNIT);
2840 if (valueize
2841 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2842 base = valueize (TREE_OPERAND (base, 0));
2843 if (!base || TREE_CODE (base) != ADDR_EXPR)
2844 return NULL_TREE;
2845 base = TREE_OPERAND (base, 0);
2848 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2849 DECL_INITIAL. If BASE is a nested reference into another
2850 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2851 the inner reference. */
2852 switch (TREE_CODE (base))
2854 case VAR_DECL:
2855 case CONST_DECL:
2857 tree init = ctor_for_folding (base);
2859 /* Our semantic is exact opposite of ctor_for_folding;
2860 NULL means unknown, while error_mark_node is 0. */
2861 if (init == error_mark_node)
2862 return NULL_TREE;
2863 if (!init)
2864 return error_mark_node;
2865 return init;
2868 case ARRAY_REF:
2869 case COMPONENT_REF:
2870 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2871 if (max_size == -1 || size != max_size)
2872 return NULL_TREE;
2873 *bit_offset += bit_offset2;
2874 return get_base_constructor (base, bit_offset, valueize);
2876 case STRING_CST:
2877 case CONSTRUCTOR:
2878 return base;
2880 default:
2881 return NULL_TREE;
2885 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2886 SIZE to the memory at bit OFFSET. */
2888 static tree
2889 fold_array_ctor_reference (tree type, tree ctor,
2890 unsigned HOST_WIDE_INT offset,
2891 unsigned HOST_WIDE_INT size,
2892 tree from_decl)
2894 unsigned HOST_WIDE_INT cnt;
2895 tree cfield, cval;
2896 offset_int low_bound;
2897 offset_int elt_size;
2898 offset_int index, max_index;
2899 offset_int access_index;
2900 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2901 HOST_WIDE_INT inner_offset;
2903 /* Compute low bound and elt size. */
2904 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2905 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2906 if (domain_type && TYPE_MIN_VALUE (domain_type))
2908 /* Static constructors for variably sized objects makes no sense. */
2909 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2910 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2911 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
2913 else
2914 low_bound = 0;
2915 /* Static constructors for variably sized objects makes no sense. */
2916 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2917 == INTEGER_CST);
2918 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2920 /* We can handle only constantly sized accesses that are known to not
2921 be larger than size of array element. */
2922 if (!TYPE_SIZE_UNIT (type)
2923 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2924 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
2925 || elt_size == 0)
2926 return NULL_TREE;
2928 /* Compute the array index we look for. */
2929 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
2930 elt_size);
2931 access_index += low_bound;
2932 if (index_type)
2933 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
2934 TYPE_SIGN (index_type));
2936 /* And offset within the access. */
2937 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2939 /* See if the array field is large enough to span whole access. We do not
2940 care to fold accesses spanning multiple array indexes. */
2941 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2942 return NULL_TREE;
2944 index = low_bound - 1;
2945 if (index_type)
2946 index = wi::ext (index, TYPE_PRECISION (index_type),
2947 TYPE_SIGN (index_type));
2949 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2951 /* Array constructor might explicitely set index, or specify range
2952 or leave index NULL meaning that it is next index after previous
2953 one. */
2954 if (cfield)
2956 if (TREE_CODE (cfield) == INTEGER_CST)
2957 max_index = index = wi::to_offset (cfield);
2958 else
2960 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2961 index = wi::to_offset (TREE_OPERAND (cfield, 0));
2962 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
2965 else
2967 index += 1;
2968 if (index_type)
2969 index = wi::ext (index, TYPE_PRECISION (index_type),
2970 TYPE_SIGN (index_type));
2971 max_index = index;
2974 /* Do we have match? */
2975 if (wi::cmpu (access_index, index) >= 0
2976 && wi::cmpu (access_index, max_index) <= 0)
2977 return fold_ctor_reference (type, cval, inner_offset, size,
2978 from_decl);
2980 /* When memory is not explicitely mentioned in constructor,
2981 it is 0 (or out of range). */
2982 return build_zero_cst (type);
2985 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2986 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2988 static tree
2989 fold_nonarray_ctor_reference (tree type, tree ctor,
2990 unsigned HOST_WIDE_INT offset,
2991 unsigned HOST_WIDE_INT size,
2992 tree from_decl)
2994 unsigned HOST_WIDE_INT cnt;
2995 tree cfield, cval;
2997 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2998 cval)
3000 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3001 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3002 tree field_size = DECL_SIZE (cfield);
3003 offset_int bitoffset;
3004 offset_int bitoffset_end, access_end;
3006 /* Variable sized objects in static constructors makes no sense,
3007 but field_size can be NULL for flexible array members. */
3008 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3009 && TREE_CODE (byte_offset) == INTEGER_CST
3010 && (field_size != NULL_TREE
3011 ? TREE_CODE (field_size) == INTEGER_CST
3012 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3014 /* Compute bit offset of the field. */
3015 bitoffset = (wi::to_offset (field_offset)
3016 + wi::lshift (wi::to_offset (byte_offset),
3017 LOG2_BITS_PER_UNIT));
3018 /* Compute bit offset where the field ends. */
3019 if (field_size != NULL_TREE)
3020 bitoffset_end = bitoffset + wi::to_offset (field_size);
3021 else
3022 bitoffset_end = 0;
3024 access_end = offset_int (offset) + size;
3026 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3027 [BITOFFSET, BITOFFSET_END)? */
3028 if (wi::cmps (access_end, bitoffset) > 0
3029 && (field_size == NULL_TREE
3030 || wi::lts_p (offset, bitoffset_end)))
3032 offset_int inner_offset = offset_int (offset) - bitoffset;
3033 /* We do have overlap. Now see if field is large enough to
3034 cover the access. Give up for accesses spanning multiple
3035 fields. */
3036 if (wi::cmps (access_end, bitoffset_end) > 0)
3037 return NULL_TREE;
3038 if (wi::lts_p (offset, bitoffset))
3039 return NULL_TREE;
3040 return fold_ctor_reference (type, cval,
3041 inner_offset.to_uhwi (), size,
3042 from_decl);
3045 /* When memory is not explicitely mentioned in constructor, it is 0. */
3046 return build_zero_cst (type);
3049 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3050 to the memory at bit OFFSET. */
3052 static tree
3053 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3054 unsigned HOST_WIDE_INT size, tree from_decl)
3056 tree ret;
3058 /* We found the field with exact match. */
3059 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3060 && !offset)
3061 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3063 /* We are at the end of walk, see if we can view convert the
3064 result. */
3065 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3066 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3067 && operand_equal_p (TYPE_SIZE (type),
3068 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3070 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3071 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3072 if (ret)
3073 STRIP_NOPS (ret);
3074 return ret;
3076 /* For constants and byte-aligned/sized reads try to go through
3077 native_encode/interpret. */
3078 if (CONSTANT_CLASS_P (ctor)
3079 && BITS_PER_UNIT == 8
3080 && offset % BITS_PER_UNIT == 0
3081 && size % BITS_PER_UNIT == 0
3082 && size <= MAX_BITSIZE_MODE_ANY_MODE)
3084 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
3085 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
3086 offset / BITS_PER_UNIT) > 0)
3087 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
3089 if (TREE_CODE (ctor) == CONSTRUCTOR)
3092 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3093 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3094 return fold_array_ctor_reference (type, ctor, offset, size,
3095 from_decl);
3096 else
3097 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3098 from_decl);
3101 return NULL_TREE;
3104 /* Return the tree representing the element referenced by T if T is an
3105 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3106 names using VALUEIZE. Return NULL_TREE otherwise. */
3108 tree
3109 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3111 tree ctor, idx, base;
3112 HOST_WIDE_INT offset, size, max_size;
3113 tree tem;
3115 if (TREE_THIS_VOLATILE (t))
3116 return NULL_TREE;
3118 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3119 return get_symbol_constant_value (t);
3121 tem = fold_read_from_constant_string (t);
3122 if (tem)
3123 return tem;
3125 switch (TREE_CODE (t))
3127 case ARRAY_REF:
3128 case ARRAY_RANGE_REF:
3129 /* Constant indexes are handled well by get_base_constructor.
3130 Only special case variable offsets.
3131 FIXME: This code can't handle nested references with variable indexes
3132 (they will be handled only by iteration of ccp). Perhaps we can bring
3133 get_ref_base_and_extent here and make it use a valueize callback. */
3134 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3135 && valueize
3136 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3137 && TREE_CODE (idx) == INTEGER_CST)
3139 tree low_bound, unit_size;
3141 /* If the resulting bit-offset is constant, track it. */
3142 if ((low_bound = array_ref_low_bound (t),
3143 TREE_CODE (low_bound) == INTEGER_CST)
3144 && (unit_size = array_ref_element_size (t),
3145 tree_fits_uhwi_p (unit_size)))
3147 offset_int woffset
3148 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
3149 TYPE_PRECISION (TREE_TYPE (idx)));
3151 if (wi::fits_shwi_p (woffset))
3153 offset = woffset.to_shwi ();
3154 /* TODO: This code seems wrong, multiply then check
3155 to see if it fits. */
3156 offset *= tree_to_uhwi (unit_size);
3157 offset *= BITS_PER_UNIT;
3159 base = TREE_OPERAND (t, 0);
3160 ctor = get_base_constructor (base, &offset, valueize);
3161 /* Empty constructor. Always fold to 0. */
3162 if (ctor == error_mark_node)
3163 return build_zero_cst (TREE_TYPE (t));
3164 /* Out of bound array access. Value is undefined,
3165 but don't fold. */
3166 if (offset < 0)
3167 return NULL_TREE;
3168 /* We can not determine ctor. */
3169 if (!ctor)
3170 return NULL_TREE;
3171 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3172 tree_to_uhwi (unit_size)
3173 * BITS_PER_UNIT,
3174 base);
3178 /* Fallthru. */
3180 case COMPONENT_REF:
3181 case BIT_FIELD_REF:
3182 case TARGET_MEM_REF:
3183 case MEM_REF:
3184 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3185 ctor = get_base_constructor (base, &offset, valueize);
3187 /* Empty constructor. Always fold to 0. */
3188 if (ctor == error_mark_node)
3189 return build_zero_cst (TREE_TYPE (t));
3190 /* We do not know precise address. */
3191 if (max_size == -1 || max_size != size)
3192 return NULL_TREE;
3193 /* We can not determine ctor. */
3194 if (!ctor)
3195 return NULL_TREE;
3197 /* Out of bound array access. Value is undefined, but don't fold. */
3198 if (offset < 0)
3199 return NULL_TREE;
3201 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3202 base);
3204 case REALPART_EXPR:
3205 case IMAGPART_EXPR:
3207 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3208 if (c && TREE_CODE (c) == COMPLEX_CST)
3209 return fold_build1_loc (EXPR_LOCATION (t),
3210 TREE_CODE (t), TREE_TYPE (t), c);
3211 break;
3214 default:
3215 break;
3218 return NULL_TREE;
3221 tree
3222 fold_const_aggregate_ref (tree t)
3224 return fold_const_aggregate_ref_1 (t, NULL);
3227 /* Lookup virtual method with index TOKEN in a virtual table V
3228 at OFFSET.
3229 Set CAN_REFER if non-NULL to false if method
3230 is not referable or if the virtual table is ill-formed (such as rewriten
3231 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3233 tree
3234 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
3235 tree v,
3236 unsigned HOST_WIDE_INT offset,
3237 bool *can_refer)
3239 tree vtable = v, init, fn;
3240 unsigned HOST_WIDE_INT size;
3241 unsigned HOST_WIDE_INT elt_size, access_index;
3242 tree domain_type;
3244 if (can_refer)
3245 *can_refer = true;
3247 /* First of all double check we have virtual table. */
3248 if (TREE_CODE (v) != VAR_DECL
3249 || !DECL_VIRTUAL_P (v))
3251 gcc_assert (in_lto_p);
3252 /* Pass down that we lost track of the target. */
3253 if (can_refer)
3254 *can_refer = false;
3255 return NULL_TREE;
3258 init = ctor_for_folding (v);
3260 /* The virtual tables should always be born with constructors
3261 and we always should assume that they are avaialble for
3262 folding. At the moment we do not stream them in all cases,
3263 but it should never happen that ctor seem unreachable. */
3264 gcc_assert (init);
3265 if (init == error_mark_node)
3267 gcc_assert (in_lto_p);
3268 /* Pass down that we lost track of the target. */
3269 if (can_refer)
3270 *can_refer = false;
3271 return NULL_TREE;
3273 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3274 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
3275 offset *= BITS_PER_UNIT;
3276 offset += token * size;
3278 /* Lookup the value in the constructor that is assumed to be array.
3279 This is equivalent to
3280 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3281 offset, size, NULL);
3282 but in a constant time. We expect that frontend produced a simple
3283 array without indexed initializers. */
3285 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
3286 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
3287 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
3288 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
3290 access_index = offset / BITS_PER_UNIT / elt_size;
3291 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
3293 /* This code makes an assumption that there are no
3294 indexed fileds produced by C++ FE, so we can directly index the array. */
3295 if (access_index < CONSTRUCTOR_NELTS (init))
3297 fn = CONSTRUCTOR_ELT (init, access_index)->value;
3298 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
3299 STRIP_NOPS (fn);
3301 else
3302 fn = NULL;
3304 /* For type inconsistent program we may end up looking up virtual method
3305 in virtual table that does not contain TOKEN entries. We may overrun
3306 the virtual table and pick up a constant or RTTI info pointer.
3307 In any case the call is undefined. */
3308 if (!fn
3309 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
3310 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3311 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3312 else
3314 fn = TREE_OPERAND (fn, 0);
3316 /* When cgraph node is missing and function is not public, we cannot
3317 devirtualize. This can happen in WHOPR when the actual method
3318 ends up in other partition, because we found devirtualization
3319 possibility too late. */
3320 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3322 if (can_refer)
3324 *can_refer = false;
3325 return fn;
3327 return NULL_TREE;
3331 /* Make sure we create a cgraph node for functions we'll reference.
3332 They can be non-existent if the reference comes from an entry
3333 of an external vtable for example. */
3334 cgraph_node::get_create (fn);
3336 return fn;
3339 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3340 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3341 KNOWN_BINFO carries the binfo describing the true type of
3342 OBJ_TYPE_REF_OBJECT(REF).
3343 Set CAN_REFER if non-NULL to false if method
3344 is not referable or if the virtual table is ill-formed (such as rewriten
3345 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3347 tree
3348 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
3349 bool *can_refer)
3351 unsigned HOST_WIDE_INT offset;
3352 tree v;
3354 v = BINFO_VTABLE (known_binfo);
3355 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3356 if (!v)
3357 return NULL_TREE;
3359 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
3361 if (can_refer)
3362 *can_refer = false;
3363 return NULL_TREE;
3365 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
3368 /* Return true iff VAL is a gimple expression that is known to be
3369 non-negative. Restricted to floating-point inputs. */
3371 bool
3372 gimple_val_nonnegative_real_p (tree val)
3374 gimple def_stmt;
3376 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3378 /* Use existing logic for non-gimple trees. */
3379 if (tree_expr_nonnegative_p (val))
3380 return true;
3382 if (TREE_CODE (val) != SSA_NAME)
3383 return false;
3385 /* Currently we look only at the immediately defining statement
3386 to make this determination, since recursion on defining
3387 statements of operands can lead to quadratic behavior in the
3388 worst case. This is expected to catch almost all occurrences
3389 in practice. It would be possible to implement limited-depth
3390 recursion if important cases are lost. Alternatively, passes
3391 that need this information (such as the pow/powi lowering code
3392 in the cse_sincos pass) could be revised to provide it through
3393 dataflow propagation. */
3395 def_stmt = SSA_NAME_DEF_STMT (val);
3397 if (is_gimple_assign (def_stmt))
3399 tree op0, op1;
3401 /* See fold-const.c:tree_expr_nonnegative_p for additional
3402 cases that could be handled with recursion. */
3404 switch (gimple_assign_rhs_code (def_stmt))
3406 case ABS_EXPR:
3407 /* Always true for floating-point operands. */
3408 return true;
3410 case MULT_EXPR:
3411 /* True if the two operands are identical (since we are
3412 restricted to floating-point inputs). */
3413 op0 = gimple_assign_rhs1 (def_stmt);
3414 op1 = gimple_assign_rhs2 (def_stmt);
3416 if (op0 == op1
3417 || operand_equal_p (op0, op1, 0))
3418 return true;
3420 default:
3421 return false;
3424 else if (is_gimple_call (def_stmt))
3426 tree fndecl = gimple_call_fndecl (def_stmt);
3427 if (fndecl
3428 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3430 tree arg1;
3432 switch (DECL_FUNCTION_CODE (fndecl))
3434 CASE_FLT_FN (BUILT_IN_ACOS):
3435 CASE_FLT_FN (BUILT_IN_ACOSH):
3436 CASE_FLT_FN (BUILT_IN_CABS):
3437 CASE_FLT_FN (BUILT_IN_COSH):
3438 CASE_FLT_FN (BUILT_IN_ERFC):
3439 CASE_FLT_FN (BUILT_IN_EXP):
3440 CASE_FLT_FN (BUILT_IN_EXP10):
3441 CASE_FLT_FN (BUILT_IN_EXP2):
3442 CASE_FLT_FN (BUILT_IN_FABS):
3443 CASE_FLT_FN (BUILT_IN_FDIM):
3444 CASE_FLT_FN (BUILT_IN_HYPOT):
3445 CASE_FLT_FN (BUILT_IN_POW10):
3446 return true;
3448 CASE_FLT_FN (BUILT_IN_SQRT):
3449 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3450 nonnegative inputs. */
3451 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3452 return true;
3454 break;
3456 CASE_FLT_FN (BUILT_IN_POWI):
3457 /* True if the second argument is an even integer. */
3458 arg1 = gimple_call_arg (def_stmt, 1);
3460 if (TREE_CODE (arg1) == INTEGER_CST
3461 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3462 return true;
3464 break;
3466 CASE_FLT_FN (BUILT_IN_POW):
3467 /* True if the second argument is an even integer-valued
3468 real. */
3469 arg1 = gimple_call_arg (def_stmt, 1);
3471 if (TREE_CODE (arg1) == REAL_CST)
3473 REAL_VALUE_TYPE c;
3474 HOST_WIDE_INT n;
3476 c = TREE_REAL_CST (arg1);
3477 n = real_to_integer (&c);
3479 if ((n & 1) == 0)
3481 REAL_VALUE_TYPE cint;
3482 real_from_integer (&cint, VOIDmode, n, SIGNED);
3483 if (real_identical (&c, &cint))
3484 return true;
3488 break;
3490 default:
3491 return false;
3496 return false;
3499 /* Given a pointer value OP0, return a simplified version of an
3500 indirection through OP0, or NULL_TREE if no simplification is
3501 possible. Note that the resulting type may be different from
3502 the type pointed to in the sense that it is still compatible
3503 from the langhooks point of view. */
3505 tree
3506 gimple_fold_indirect_ref (tree t)
3508 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3509 tree sub = t;
3510 tree subtype;
3512 STRIP_NOPS (sub);
3513 subtype = TREE_TYPE (sub);
3514 if (!POINTER_TYPE_P (subtype))
3515 return NULL_TREE;
3517 if (TREE_CODE (sub) == ADDR_EXPR)
3519 tree op = TREE_OPERAND (sub, 0);
3520 tree optype = TREE_TYPE (op);
3521 /* *&p => p */
3522 if (useless_type_conversion_p (type, optype))
3523 return op;
3525 /* *(foo *)&fooarray => fooarray[0] */
3526 if (TREE_CODE (optype) == ARRAY_TYPE
3527 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3528 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3530 tree type_domain = TYPE_DOMAIN (optype);
3531 tree min_val = size_zero_node;
3532 if (type_domain && TYPE_MIN_VALUE (type_domain))
3533 min_val = TYPE_MIN_VALUE (type_domain);
3534 if (TREE_CODE (min_val) == INTEGER_CST)
3535 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3537 /* *(foo *)&complexfoo => __real__ complexfoo */
3538 else if (TREE_CODE (optype) == COMPLEX_TYPE
3539 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3540 return fold_build1 (REALPART_EXPR, type, op);
3541 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3542 else if (TREE_CODE (optype) == VECTOR_TYPE
3543 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3545 tree part_width = TYPE_SIZE (type);
3546 tree index = bitsize_int (0);
3547 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3551 /* *(p + CST) -> ... */
3552 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3553 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3555 tree addr = TREE_OPERAND (sub, 0);
3556 tree off = TREE_OPERAND (sub, 1);
3557 tree addrtype;
3559 STRIP_NOPS (addr);
3560 addrtype = TREE_TYPE (addr);
3562 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3563 if (TREE_CODE (addr) == ADDR_EXPR
3564 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3565 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3566 && tree_fits_uhwi_p (off))
3568 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
3569 tree part_width = TYPE_SIZE (type);
3570 unsigned HOST_WIDE_INT part_widthi
3571 = tree_to_shwi (part_width) / BITS_PER_UNIT;
3572 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3573 tree index = bitsize_int (indexi);
3574 if (offset / part_widthi
3575 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3576 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3577 part_width, index);
3580 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3581 if (TREE_CODE (addr) == ADDR_EXPR
3582 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3583 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3585 tree size = TYPE_SIZE_UNIT (type);
3586 if (tree_int_cst_equal (size, off))
3587 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3590 /* *(p + CST) -> MEM_REF <p, CST>. */
3591 if (TREE_CODE (addr) != ADDR_EXPR
3592 || DECL_P (TREE_OPERAND (addr, 0)))
3593 return fold_build2 (MEM_REF, type,
3594 addr,
3595 wide_int_to_tree (ptype, off));
3598 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3599 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3600 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3601 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3603 tree type_domain;
3604 tree min_val = size_zero_node;
3605 tree osub = sub;
3606 sub = gimple_fold_indirect_ref (sub);
3607 if (! sub)
3608 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3609 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3610 if (type_domain && TYPE_MIN_VALUE (type_domain))
3611 min_val = TYPE_MIN_VALUE (type_domain);
3612 if (TREE_CODE (min_val) == INTEGER_CST)
3613 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3616 return NULL_TREE;
3619 /* Return true if CODE is an operation that when operating on signed
3620 integer types involves undefined behavior on overflow and the
3621 operation can be expressed with unsigned arithmetic. */
3623 bool
3624 arith_code_with_undefined_signed_overflow (tree_code code)
3626 switch (code)
3628 case PLUS_EXPR:
3629 case MINUS_EXPR:
3630 case MULT_EXPR:
3631 case NEGATE_EXPR:
3632 case POINTER_PLUS_EXPR:
3633 return true;
3634 default:
3635 return false;
3639 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3640 operation that can be transformed to unsigned arithmetic by converting
3641 its operand, carrying out the operation in the corresponding unsigned
3642 type and converting the result back to the original type.
3644 Returns a sequence of statements that replace STMT and also contain
3645 a modified form of STMT itself. */
3647 gimple_seq
3648 rewrite_to_defined_overflow (gimple stmt)
3650 if (dump_file && (dump_flags & TDF_DETAILS))
3652 fprintf (dump_file, "rewriting stmt with undefined signed "
3653 "overflow ");
3654 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3657 tree lhs = gimple_assign_lhs (stmt);
3658 tree type = unsigned_type_for (TREE_TYPE (lhs));
3659 gimple_seq stmts = NULL;
3660 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
3662 gimple_seq stmts2 = NULL;
3663 gimple_set_op (stmt, i,
3664 force_gimple_operand (fold_convert (type,
3665 gimple_op (stmt, i)),
3666 &stmts2, true, NULL_TREE));
3667 gimple_seq_add_seq (&stmts, stmts2);
3669 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
3670 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3671 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
3672 gimple_seq_add_stmt (&stmts, stmt);
3673 gimple cvt = gimple_build_assign_with_ops
3674 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
3675 gimple_seq_add_stmt (&stmts, cvt);
3677 return stmts;