* g++.dg/asan/asan_test.C: Add -std=c++11 and
[official-gcc.git] / gcc / gimple-fold.c
bloba9c01bd2560cf8ffae48c27098b2fccabb7e8342
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "dumpfile.h"
33 #include "bitmap.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "tree-dfa.h"
47 #include "tree-ssa.h"
48 #include "tree-ssa-propagate.h"
49 #include "target.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
55 #include "dbgcnt.h"
57 /* Return true when DECL can be referenced from current unit.
58 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
59 We can get declarations that are not possible to reference for various
60 reasons:
62 1) When analyzing C++ virtual tables.
63 C++ virtual tables do have known constructors even
64 when they are keyed to other compilation unit.
65 Those tables can contain pointers to methods and vars
66 in other units. Those methods have both STATIC and EXTERNAL
67 set.
68 2) In WHOPR mode devirtualization might lead to reference
69 to method that was partitioned elsehwere.
70 In this case we have static VAR_DECL or FUNCTION_DECL
71 that has no corresponding callgraph/varpool node
72 declaring the body.
73 3) COMDAT functions referred by external vtables that
74 we devirtualize only during final compilation stage.
75 At this time we already decided that we will not output
76 the function body and thus we can't reference the symbol
77 directly. */
79 static bool
80 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
82 varpool_node *vnode;
83 struct cgraph_node *node;
84 symtab_node *snode;
86 if (DECL_ABSTRACT (decl))
87 return false;
89 /* We are concerned only about static/external vars and functions. */
90 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
91 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
92 return true;
94 /* Static objects can be referred only if they was not optimized out yet. */
95 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
97 /* Before we start optimizing unreachable code we can be sure all
98 static objects are defined. */
99 if (cgraph_function_flags_ready)
100 return true;
101 snode = symtab_get_node (decl);
102 if (!snode || !snode->definition)
103 return false;
104 node = dyn_cast <cgraph_node *> (snode);
105 return !node || !node->global.inlined_to;
108 /* We will later output the initializer, so we can refer to it.
109 So we are concerned only when DECL comes from initializer of
110 external var or var that has been optimized out. */
111 if (!from_decl
112 || TREE_CODE (from_decl) != VAR_DECL
113 || (!DECL_EXTERNAL (from_decl)
114 && (vnode = varpool_get_node (from_decl)) != NULL
115 && vnode->definition)
116 || (flag_ltrans
117 && (vnode = varpool_get_node (from_decl)) != NULL
118 && vnode->in_other_partition))
119 return true;
120 /* We are folding reference from external vtable. The vtable may reffer
121 to a symbol keyed to other compilation unit. The other compilation
122 unit may be in separate DSO and the symbol may be hidden. */
123 if (DECL_VISIBILITY_SPECIFIED (decl)
124 && DECL_EXTERNAL (decl)
125 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
126 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
127 return false;
128 /* When function is public, we always can introduce new reference.
129 Exception are the COMDAT functions where introducing a direct
130 reference imply need to include function body in the curren tunit. */
131 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
132 return true;
133 /* We have COMDAT. We are going to check if we still have definition
134 or if the definition is going to be output in other partition.
135 Bypass this when gimplifying; all needed functions will be produced.
137 As observed in PR20991 for already optimized out comdat virtual functions
138 it may be tempting to not necessarily give up because the copy will be
139 output elsewhere when corresponding vtable is output.
140 This is however not possible - ABI specify that COMDATs are output in
141 units where they are used and when the other unit was compiled with LTO
142 it is possible that vtable was kept public while the function itself
143 was privatized. */
144 if (!cgraph_function_flags_ready)
145 return true;
147 snode = symtab_get_node (decl);
148 if (!snode
149 || ((!snode->definition || DECL_EXTERNAL (decl))
150 && (!snode->in_other_partition
151 || (!snode->forced_by_abi && !snode->force_output))))
152 return false;
153 node = dyn_cast <cgraph_node *> (snode);
154 return !node || !node->global.inlined_to;
157 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
158 acceptable form for is_gimple_min_invariant.
159 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
161 tree
162 canonicalize_constructor_val (tree cval, tree from_decl)
164 tree orig_cval = cval;
165 STRIP_NOPS (cval);
166 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
167 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
169 tree ptr = TREE_OPERAND (cval, 0);
170 if (is_gimple_min_invariant (ptr))
171 cval = build1_loc (EXPR_LOCATION (cval),
172 ADDR_EXPR, TREE_TYPE (ptr),
173 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
174 ptr,
175 fold_convert (ptr_type_node,
176 TREE_OPERAND (cval, 1))));
178 if (TREE_CODE (cval) == ADDR_EXPR)
180 tree base = NULL_TREE;
181 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
183 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
184 if (base)
185 TREE_OPERAND (cval, 0) = base;
187 else
188 base = get_base_address (TREE_OPERAND (cval, 0));
189 if (!base)
190 return NULL_TREE;
192 if ((TREE_CODE (base) == VAR_DECL
193 || TREE_CODE (base) == FUNCTION_DECL)
194 && !can_refer_decl_in_current_unit_p (base, from_decl))
195 return NULL_TREE;
196 if (TREE_CODE (base) == VAR_DECL)
197 TREE_ADDRESSABLE (base) = 1;
198 else if (TREE_CODE (base) == FUNCTION_DECL)
200 /* Make sure we create a cgraph node for functions we'll reference.
201 They can be non-existent if the reference comes from an entry
202 of an external vtable for example. */
203 cgraph_get_create_node (base);
205 /* Fixup types in global initializers. */
206 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
207 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
209 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
210 cval = fold_convert (TREE_TYPE (orig_cval), cval);
211 return cval;
213 if (TREE_OVERFLOW_P (cval))
214 return drop_tree_overflow (cval);
215 return orig_cval;
218 /* If SYM is a constant variable with known value, return the value.
219 NULL_TREE is returned otherwise. */
221 tree
222 get_symbol_constant_value (tree sym)
224 tree val = ctor_for_folding (sym);
225 if (val != error_mark_node)
227 if (val)
229 val = canonicalize_constructor_val (unshare_expr (val), sym);
230 if (val && is_gimple_min_invariant (val))
231 return val;
232 else
233 return NULL_TREE;
235 /* Variables declared 'const' without an initializer
236 have zero as the initializer if they may not be
237 overridden at link or run time. */
238 if (!val
239 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
240 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
241 return build_zero_cst (TREE_TYPE (sym));
244 return NULL_TREE;
249 /* Subroutine of fold_stmt. We perform several simplifications of the
250 memory reference tree EXPR and make sure to re-gimplify them properly
251 after propagation of constant addresses. IS_LHS is true if the
252 reference is supposed to be an lvalue. */
254 static tree
255 maybe_fold_reference (tree expr, bool is_lhs)
257 tree *t = &expr;
258 tree result;
260 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
261 || TREE_CODE (expr) == REALPART_EXPR
262 || TREE_CODE (expr) == IMAGPART_EXPR)
263 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
264 return fold_unary_loc (EXPR_LOCATION (expr),
265 TREE_CODE (expr),
266 TREE_TYPE (expr),
267 TREE_OPERAND (expr, 0));
268 else if (TREE_CODE (expr) == BIT_FIELD_REF
269 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
270 return fold_ternary_loc (EXPR_LOCATION (expr),
271 TREE_CODE (expr),
272 TREE_TYPE (expr),
273 TREE_OPERAND (expr, 0),
274 TREE_OPERAND (expr, 1),
275 TREE_OPERAND (expr, 2));
277 while (handled_component_p (*t))
278 t = &TREE_OPERAND (*t, 0);
280 /* Canonicalize MEM_REFs invariant address operand. Do this first
281 to avoid feeding non-canonical MEM_REFs elsewhere. */
282 if (TREE_CODE (*t) == MEM_REF
283 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
285 bool volatile_p = TREE_THIS_VOLATILE (*t);
286 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
287 TREE_OPERAND (*t, 0),
288 TREE_OPERAND (*t, 1));
289 if (tem)
291 TREE_THIS_VOLATILE (tem) = volatile_p;
292 *t = tem;
293 tem = maybe_fold_reference (expr, is_lhs);
294 if (tem)
295 return tem;
296 return expr;
300 if (!is_lhs
301 && (result = fold_const_aggregate_ref (expr))
302 && is_gimple_min_invariant (result))
303 return result;
305 /* Fold back MEM_REFs to reference trees. */
306 if (TREE_CODE (*t) == MEM_REF
307 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
308 && integer_zerop (TREE_OPERAND (*t, 1))
309 && (TREE_THIS_VOLATILE (*t)
310 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
311 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
312 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
313 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
314 /* We have to look out here to not drop a required conversion
315 from the rhs to the lhs if is_lhs, but we don't have the
316 rhs here to verify that. Thus require strict type
317 compatibility. */
318 && types_compatible_p (TREE_TYPE (*t),
319 TREE_TYPE (TREE_OPERAND
320 (TREE_OPERAND (*t, 0), 0))))
322 tree tem;
323 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
324 tem = maybe_fold_reference (expr, is_lhs);
325 if (tem)
326 return tem;
327 return expr;
329 else if (TREE_CODE (*t) == TARGET_MEM_REF)
331 tree tem = maybe_fold_tmr (*t);
332 if (tem)
334 *t = tem;
335 tem = maybe_fold_reference (expr, is_lhs);
336 if (tem)
337 return tem;
338 return expr;
342 return NULL_TREE;
346 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
347 replacement rhs for the statement or NULL_TREE if no simplification
348 could be made. It is assumed that the operands have been previously
349 folded. */
351 static tree
352 fold_gimple_assign (gimple_stmt_iterator *si)
354 gimple stmt = gsi_stmt (*si);
355 enum tree_code subcode = gimple_assign_rhs_code (stmt);
356 location_t loc = gimple_location (stmt);
358 tree result = NULL_TREE;
360 switch (get_gimple_rhs_class (subcode))
362 case GIMPLE_SINGLE_RHS:
364 tree rhs = gimple_assign_rhs1 (stmt);
366 if (REFERENCE_CLASS_P (rhs))
367 return maybe_fold_reference (rhs, false);
369 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
371 tree val = OBJ_TYPE_REF_EXPR (rhs);
372 if (is_gimple_min_invariant (val))
373 return val;
374 else if (flag_devirtualize && virtual_method_call_p (val))
376 bool final;
377 vec <cgraph_node *>targets
378 = possible_polymorphic_call_targets (val, &final);
379 if (final && targets.length () <= 1 && dbg_cnt (devirt))
381 tree fndecl;
383 if (targets.length () == 1)
384 fndecl = targets[0]->decl;
385 else
386 fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
387 if (dump_enabled_p ())
389 location_t loc = gimple_location (stmt);
390 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
391 "resolving virtual function address "
392 "reference to function %s\n",
393 targets.length () == 1
394 ? targets[0]->name ()
395 : "__builtin_unreachable");
397 val = fold_convert (TREE_TYPE (val), fndecl);
398 STRIP_USELESS_TYPE_CONVERSION (val);
399 return val;
404 else if (TREE_CODE (rhs) == ADDR_EXPR)
406 tree ref = TREE_OPERAND (rhs, 0);
407 tree tem = maybe_fold_reference (ref, true);
408 if (tem
409 && TREE_CODE (tem) == MEM_REF
410 && integer_zerop (TREE_OPERAND (tem, 1)))
411 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
412 else if (tem)
413 result = fold_convert (TREE_TYPE (rhs),
414 build_fold_addr_expr_loc (loc, tem));
415 else if (TREE_CODE (ref) == MEM_REF
416 && integer_zerop (TREE_OPERAND (ref, 1)))
417 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
420 else if (TREE_CODE (rhs) == CONSTRUCTOR
421 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
422 && (CONSTRUCTOR_NELTS (rhs)
423 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
425 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
426 unsigned i;
427 tree val;
429 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
430 if (TREE_CODE (val) != INTEGER_CST
431 && TREE_CODE (val) != REAL_CST
432 && TREE_CODE (val) != FIXED_CST)
433 return NULL_TREE;
435 return build_vector_from_ctor (TREE_TYPE (rhs),
436 CONSTRUCTOR_ELTS (rhs));
439 else if (DECL_P (rhs))
440 return get_symbol_constant_value (rhs);
442 /* If we couldn't fold the RHS, hand over to the generic
443 fold routines. */
444 if (result == NULL_TREE)
445 result = fold (rhs);
447 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
448 that may have been added by fold, and "useless" type
449 conversions that might now be apparent due to propagation. */
450 STRIP_USELESS_TYPE_CONVERSION (result);
452 if (result != rhs && valid_gimple_rhs_p (result))
453 return result;
455 return NULL_TREE;
457 break;
459 case GIMPLE_UNARY_RHS:
461 tree rhs = gimple_assign_rhs1 (stmt);
463 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
464 if (result)
466 /* If the operation was a conversion do _not_ mark a
467 resulting constant with TREE_OVERFLOW if the original
468 constant was not. These conversions have implementation
469 defined behavior and retaining the TREE_OVERFLOW flag
470 here would confuse later passes such as VRP. */
471 if (CONVERT_EXPR_CODE_P (subcode)
472 && TREE_CODE (result) == INTEGER_CST
473 && TREE_CODE (rhs) == INTEGER_CST)
474 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
476 STRIP_USELESS_TYPE_CONVERSION (result);
477 if (valid_gimple_rhs_p (result))
478 return result;
481 break;
483 case GIMPLE_BINARY_RHS:
484 /* Try to canonicalize for boolean-typed X the comparisons
485 X == 0, X == 1, X != 0, and X != 1. */
486 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
487 || gimple_assign_rhs_code (stmt) == NE_EXPR)
489 tree lhs = gimple_assign_lhs (stmt);
490 tree op1 = gimple_assign_rhs1 (stmt);
491 tree op2 = gimple_assign_rhs2 (stmt);
492 tree type = TREE_TYPE (op1);
494 /* Check whether the comparison operands are of the same boolean
495 type as the result type is.
496 Check that second operand is an integer-constant with value
497 one or zero. */
498 if (TREE_CODE (op2) == INTEGER_CST
499 && (integer_zerop (op2) || integer_onep (op2))
500 && useless_type_conversion_p (TREE_TYPE (lhs), type))
502 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
503 bool is_logical_not = false;
505 /* X == 0 and X != 1 is a logical-not.of X
506 X == 1 and X != 0 is X */
507 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
508 || (cmp_code == NE_EXPR && integer_onep (op2)))
509 is_logical_not = true;
511 if (is_logical_not == false)
512 result = op1;
513 /* Only for one-bit precision typed X the transformation
514 !X -> ~X is valied. */
515 else if (TYPE_PRECISION (type) == 1)
516 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
517 type, op1);
518 /* Otherwise we use !X -> X ^ 1. */
519 else
520 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
521 type, op1, build_int_cst (type, 1));
526 if (!result)
527 result = fold_binary_loc (loc, subcode,
528 TREE_TYPE (gimple_assign_lhs (stmt)),
529 gimple_assign_rhs1 (stmt),
530 gimple_assign_rhs2 (stmt));
532 if (result)
534 STRIP_USELESS_TYPE_CONVERSION (result);
535 if (valid_gimple_rhs_p (result))
536 return result;
538 break;
540 case GIMPLE_TERNARY_RHS:
541 /* Try to fold a conditional expression. */
542 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
544 tree op0 = gimple_assign_rhs1 (stmt);
545 tree tem;
546 bool set = false;
547 location_t cond_loc = gimple_location (stmt);
549 if (COMPARISON_CLASS_P (op0))
551 fold_defer_overflow_warnings ();
552 tem = fold_binary_loc (cond_loc,
553 TREE_CODE (op0), TREE_TYPE (op0),
554 TREE_OPERAND (op0, 0),
555 TREE_OPERAND (op0, 1));
556 /* This is actually a conditional expression, not a GIMPLE
557 conditional statement, however, the valid_gimple_rhs_p
558 test still applies. */
559 set = (tem && is_gimple_condexpr (tem)
560 && valid_gimple_rhs_p (tem));
561 fold_undefer_overflow_warnings (set, stmt, 0);
563 else if (is_gimple_min_invariant (op0))
565 tem = op0;
566 set = true;
568 else
569 return NULL_TREE;
571 if (set)
572 result = fold_build3_loc (cond_loc, COND_EXPR,
573 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
574 gimple_assign_rhs2 (stmt),
575 gimple_assign_rhs3 (stmt));
578 if (!result)
579 result = fold_ternary_loc (loc, subcode,
580 TREE_TYPE (gimple_assign_lhs (stmt)),
581 gimple_assign_rhs1 (stmt),
582 gimple_assign_rhs2 (stmt),
583 gimple_assign_rhs3 (stmt));
585 if (result)
587 STRIP_USELESS_TYPE_CONVERSION (result);
588 if (valid_gimple_rhs_p (result))
589 return result;
591 break;
593 case GIMPLE_INVALID_RHS:
594 gcc_unreachable ();
597 return NULL_TREE;
600 /* Attempt to fold a conditional statement. Return true if any changes were
601 made. We only attempt to fold the condition expression, and do not perform
602 any transformation that would require alteration of the cfg. It is
603 assumed that the operands have been previously folded. */
605 static bool
606 fold_gimple_cond (gimple stmt)
608 tree result = fold_binary_loc (gimple_location (stmt),
609 gimple_cond_code (stmt),
610 boolean_type_node,
611 gimple_cond_lhs (stmt),
612 gimple_cond_rhs (stmt));
614 if (result)
616 STRIP_USELESS_TYPE_CONVERSION (result);
617 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
619 gimple_cond_set_condition_from_tree (stmt, result);
620 return true;
624 return false;
627 /* Convert EXPR into a GIMPLE value suitable for substitution on the
628 RHS of an assignment. Insert the necessary statements before
629 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
630 is replaced. If the call is expected to produces a result, then it
631 is replaced by an assignment of the new RHS to the result variable.
632 If the result is to be ignored, then the call is replaced by a
633 GIMPLE_NOP. A proper VDEF chain is retained by making the first
634 VUSE and the last VDEF of the whole sequence be the same as the replaced
635 statement and using new SSA names for stores in between. */
637 void
638 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
640 tree lhs;
641 gimple stmt, new_stmt;
642 gimple_stmt_iterator i;
643 gimple_seq stmts = NULL;
644 gimple laststore;
645 tree reaching_vuse;
647 stmt = gsi_stmt (*si_p);
649 gcc_assert (is_gimple_call (stmt));
651 push_gimplify_context (gimple_in_ssa_p (cfun));
653 lhs = gimple_call_lhs (stmt);
654 if (lhs == NULL_TREE)
656 gimplify_and_add (expr, &stmts);
657 /* We can end up with folding a memcpy of an empty class assignment
658 which gets optimized away by C++ gimplification. */
659 if (gimple_seq_empty_p (stmts))
661 pop_gimplify_context (NULL);
662 if (gimple_in_ssa_p (cfun))
664 unlink_stmt_vdef (stmt);
665 release_defs (stmt);
667 gsi_replace (si_p, gimple_build_nop (), true);
668 return;
671 else
673 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
674 new_stmt = gimple_build_assign (lhs, tmp);
675 i = gsi_last (stmts);
676 gsi_insert_after_without_update (&i, new_stmt,
677 GSI_CONTINUE_LINKING);
680 pop_gimplify_context (NULL);
682 if (gimple_has_location (stmt))
683 annotate_all_with_location (stmts, gimple_location (stmt));
685 /* First iterate over the replacement statements backward, assigning
686 virtual operands to their defining statements. */
687 laststore = NULL;
688 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
690 new_stmt = gsi_stmt (i);
691 if ((gimple_assign_single_p (new_stmt)
692 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
693 || (is_gimple_call (new_stmt)
694 && (gimple_call_flags (new_stmt)
695 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
697 tree vdef;
698 if (!laststore)
699 vdef = gimple_vdef (stmt);
700 else
701 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
702 gimple_set_vdef (new_stmt, vdef);
703 if (vdef && TREE_CODE (vdef) == SSA_NAME)
704 SSA_NAME_DEF_STMT (vdef) = new_stmt;
705 laststore = new_stmt;
709 /* Second iterate over the statements forward, assigning virtual
710 operands to their uses. */
711 reaching_vuse = gimple_vuse (stmt);
712 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
714 new_stmt = gsi_stmt (i);
715 /* If the new statement possibly has a VUSE, update it with exact SSA
716 name we know will reach this one. */
717 if (gimple_has_mem_ops (new_stmt))
718 gimple_set_vuse (new_stmt, reaching_vuse);
719 gimple_set_modified (new_stmt, true);
720 if (gimple_vdef (new_stmt))
721 reaching_vuse = gimple_vdef (new_stmt);
724 /* If the new sequence does not do a store release the virtual
725 definition of the original statement. */
726 if (reaching_vuse
727 && reaching_vuse == gimple_vuse (stmt))
729 tree vdef = gimple_vdef (stmt);
730 if (vdef
731 && TREE_CODE (vdef) == SSA_NAME)
733 unlink_stmt_vdef (stmt);
734 release_ssa_name (vdef);
738 /* Finally replace the original statement with the sequence. */
739 gsi_replace_with_seq (si_p, stmts, false);
742 /* Return the string length, maximum string length or maximum value of
743 ARG in LENGTH.
744 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
745 is not NULL and, for TYPE == 0, its value is not equal to the length
746 we determine or if we are unable to determine the length or value,
747 return false. VISITED is a bitmap of visited variables.
748 TYPE is 0 if string length should be returned, 1 for maximum string
749 length and 2 for maximum value ARG can have. */
751 static bool
752 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
754 tree var, val;
755 gimple def_stmt;
757 if (TREE_CODE (arg) != SSA_NAME)
759 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
760 if (TREE_CODE (arg) == ADDR_EXPR
761 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
762 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
764 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
765 if (TREE_CODE (aop0) == INDIRECT_REF
766 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
767 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
768 length, visited, type);
771 if (type == 2)
773 val = arg;
774 if (TREE_CODE (val) != INTEGER_CST
775 || tree_int_cst_sgn (val) < 0)
776 return false;
778 else
779 val = c_strlen (arg, 1);
780 if (!val)
781 return false;
783 if (*length)
785 if (type > 0)
787 if (TREE_CODE (*length) != INTEGER_CST
788 || TREE_CODE (val) != INTEGER_CST)
789 return false;
791 if (tree_int_cst_lt (*length, val))
792 *length = val;
793 return true;
795 else if (simple_cst_equal (val, *length) != 1)
796 return false;
799 *length = val;
800 return true;
803 /* If ARG is registered for SSA update we cannot look at its defining
804 statement. */
805 if (name_registered_for_update_p (arg))
806 return false;
808 /* If we were already here, break the infinite cycle. */
809 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
810 return true;
812 var = arg;
813 def_stmt = SSA_NAME_DEF_STMT (var);
815 switch (gimple_code (def_stmt))
817 case GIMPLE_ASSIGN:
818 /* The RHS of the statement defining VAR must either have a
819 constant length or come from another SSA_NAME with a constant
820 length. */
821 if (gimple_assign_single_p (def_stmt)
822 || gimple_assign_unary_nop_p (def_stmt))
824 tree rhs = gimple_assign_rhs1 (def_stmt);
825 return get_maxval_strlen (rhs, length, visited, type);
827 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
829 tree op2 = gimple_assign_rhs2 (def_stmt);
830 tree op3 = gimple_assign_rhs3 (def_stmt);
831 return get_maxval_strlen (op2, length, visited, type)
832 && get_maxval_strlen (op3, length, visited, type);
834 return false;
836 case GIMPLE_PHI:
838 /* All the arguments of the PHI node must have the same constant
839 length. */
840 unsigned i;
842 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
844 tree arg = gimple_phi_arg (def_stmt, i)->def;
846 /* If this PHI has itself as an argument, we cannot
847 determine the string length of this argument. However,
848 if we can find a constant string length for the other
849 PHI args then we can still be sure that this is a
850 constant string length. So be optimistic and just
851 continue with the next argument. */
852 if (arg == gimple_phi_result (def_stmt))
853 continue;
855 if (!get_maxval_strlen (arg, length, visited, type))
856 return false;
859 return true;
861 default:
862 return false;
867 /* Fold builtin call in statement STMT. Returns a simplified tree.
868 We may return a non-constant expression, including another call
869 to a different function and with different arguments, e.g.,
870 substituting memcpy for strcpy when the string length is known.
871 Note that some builtins expand into inline code that may not
872 be valid in GIMPLE. Callers must take care. */
874 tree
875 gimple_fold_builtin (gimple stmt)
877 tree result, val[3];
878 tree callee, a;
879 int arg_idx, type;
880 bitmap visited;
881 bool ignore;
882 int nargs;
883 location_t loc = gimple_location (stmt);
885 ignore = (gimple_call_lhs (stmt) == NULL);
887 /* First try the generic builtin folder. If that succeeds, return the
888 result directly. */
889 result = fold_call_stmt (stmt, ignore);
890 if (result)
892 if (ignore)
893 STRIP_NOPS (result);
894 else
895 result = fold_convert (gimple_call_return_type (stmt), result);
896 return result;
899 /* Ignore MD builtins. */
900 callee = gimple_call_fndecl (stmt);
901 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
902 return NULL_TREE;
904 /* Give up for always_inline inline builtins until they are
905 inlined. */
906 if (avoid_folding_inline_builtin (callee))
907 return NULL_TREE;
909 /* If the builtin could not be folded, and it has no argument list,
910 we're done. */
911 nargs = gimple_call_num_args (stmt);
912 if (nargs == 0)
913 return NULL_TREE;
915 /* Limit the work only for builtins we know how to simplify. */
916 switch (DECL_FUNCTION_CODE (callee))
918 case BUILT_IN_STRLEN:
919 case BUILT_IN_FPUTS:
920 case BUILT_IN_FPUTS_UNLOCKED:
921 arg_idx = 0;
922 type = 0;
923 break;
924 case BUILT_IN_STRCPY:
925 case BUILT_IN_STRNCPY:
926 case BUILT_IN_STRCAT:
927 arg_idx = 1;
928 type = 0;
929 break;
930 case BUILT_IN_MEMCPY_CHK:
931 case BUILT_IN_MEMPCPY_CHK:
932 case BUILT_IN_MEMMOVE_CHK:
933 case BUILT_IN_MEMSET_CHK:
934 case BUILT_IN_STRNCPY_CHK:
935 case BUILT_IN_STPNCPY_CHK:
936 arg_idx = 2;
937 type = 2;
938 break;
939 case BUILT_IN_STRCPY_CHK:
940 case BUILT_IN_STPCPY_CHK:
941 arg_idx = 1;
942 type = 1;
943 break;
944 case BUILT_IN_SNPRINTF_CHK:
945 case BUILT_IN_VSNPRINTF_CHK:
946 arg_idx = 1;
947 type = 2;
948 break;
949 default:
950 return NULL_TREE;
953 if (arg_idx >= nargs)
954 return NULL_TREE;
956 /* Try to use the dataflow information gathered by the CCP process. */
957 visited = BITMAP_ALLOC (NULL);
958 bitmap_clear (visited);
960 memset (val, 0, sizeof (val));
961 a = gimple_call_arg (stmt, arg_idx);
962 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
963 val[arg_idx] = NULL_TREE;
965 BITMAP_FREE (visited);
967 result = NULL_TREE;
968 switch (DECL_FUNCTION_CODE (callee))
970 case BUILT_IN_STRLEN:
971 if (val[0] && nargs == 1)
973 tree new_val =
974 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
976 /* If the result is not a valid gimple value, or not a cast
977 of a valid gimple value, then we cannot use the result. */
978 if (is_gimple_val (new_val)
979 || (CONVERT_EXPR_P (new_val)
980 && is_gimple_val (TREE_OPERAND (new_val, 0))))
981 return new_val;
983 break;
985 case BUILT_IN_STRCPY:
986 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
987 result = fold_builtin_strcpy (loc, callee,
988 gimple_call_arg (stmt, 0),
989 gimple_call_arg (stmt, 1),
990 val[1]);
991 break;
993 case BUILT_IN_STRNCPY:
994 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
995 result = fold_builtin_strncpy (loc, callee,
996 gimple_call_arg (stmt, 0),
997 gimple_call_arg (stmt, 1),
998 gimple_call_arg (stmt, 2),
999 val[1]);
1000 break;
1002 case BUILT_IN_STRCAT:
1003 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1004 result = fold_builtin_strcat (loc, gimple_call_arg (stmt, 0),
1005 gimple_call_arg (stmt, 1),
1006 val[1]);
1007 break;
1009 case BUILT_IN_FPUTS:
1010 if (nargs == 2)
1011 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1012 gimple_call_arg (stmt, 1),
1013 ignore, false, val[0]);
1014 break;
1016 case BUILT_IN_FPUTS_UNLOCKED:
1017 if (nargs == 2)
1018 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1019 gimple_call_arg (stmt, 1),
1020 ignore, true, val[0]);
1021 break;
1023 case BUILT_IN_MEMCPY_CHK:
1024 case BUILT_IN_MEMPCPY_CHK:
1025 case BUILT_IN_MEMMOVE_CHK:
1026 case BUILT_IN_MEMSET_CHK:
1027 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1028 result = fold_builtin_memory_chk (loc, callee,
1029 gimple_call_arg (stmt, 0),
1030 gimple_call_arg (stmt, 1),
1031 gimple_call_arg (stmt, 2),
1032 gimple_call_arg (stmt, 3),
1033 val[2], ignore,
1034 DECL_FUNCTION_CODE (callee));
1035 break;
1037 case BUILT_IN_STRCPY_CHK:
1038 case BUILT_IN_STPCPY_CHK:
1039 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1040 result = fold_builtin_stxcpy_chk (loc, callee,
1041 gimple_call_arg (stmt, 0),
1042 gimple_call_arg (stmt, 1),
1043 gimple_call_arg (stmt, 2),
1044 val[1], ignore,
1045 DECL_FUNCTION_CODE (callee));
1046 break;
1048 case BUILT_IN_STRNCPY_CHK:
1049 case BUILT_IN_STPNCPY_CHK:
1050 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1051 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1052 gimple_call_arg (stmt, 1),
1053 gimple_call_arg (stmt, 2),
1054 gimple_call_arg (stmt, 3),
1055 val[2], ignore,
1056 DECL_FUNCTION_CODE (callee));
1057 break;
1059 case BUILT_IN_SNPRINTF_CHK:
1060 case BUILT_IN_VSNPRINTF_CHK:
1061 if (val[1] && is_gimple_val (val[1]))
1062 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1063 DECL_FUNCTION_CODE (callee));
1064 break;
1066 default:
1067 gcc_unreachable ();
1070 if (result && ignore)
1071 result = fold_ignored_result (result);
1072 return result;
1076 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1077 The statement may be replaced by another statement, e.g., if the call
1078 simplifies to a constant value. Return true if any changes were made.
1079 It is assumed that the operands have been previously folded. */
1081 static bool
1082 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1084 gimple stmt = gsi_stmt (*gsi);
1085 tree callee;
1086 bool changed = false;
1087 unsigned i;
1089 /* Fold *& in call arguments. */
1090 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1091 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1093 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1094 if (tmp)
1096 gimple_call_set_arg (stmt, i, tmp);
1097 changed = true;
1101 /* Check for virtual calls that became direct calls. */
1102 callee = gimple_call_fn (stmt);
1103 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1105 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1107 if (dump_file && virtual_method_call_p (callee)
1108 && !possible_polymorphic_call_target_p
1109 (callee, cgraph_get_node (gimple_call_addr_fndecl
1110 (OBJ_TYPE_REF_EXPR (callee)))))
1112 fprintf (dump_file,
1113 "Type inheritance inconsistent devirtualization of ");
1114 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1115 fprintf (dump_file, " to ");
1116 print_generic_expr (dump_file, callee, TDF_SLIM);
1117 fprintf (dump_file, "\n");
1120 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1121 changed = true;
1123 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
1125 bool final;
1126 vec <cgraph_node *>targets
1127 = possible_polymorphic_call_targets (callee, &final);
1128 if (final && targets.length () <= 1 && dbg_cnt (devirt))
1130 tree lhs = gimple_call_lhs (stmt);
1131 if (dump_enabled_p ())
1133 location_t loc = gimple_location (stmt);
1134 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
1135 "folding virtual function call to %s\n",
1136 targets.length () == 1
1137 ? targets[0]->name ()
1138 : "__builtin_unreachable");
1140 if (targets.length () == 1)
1142 gimple_call_set_fndecl (stmt, targets[0]->decl);
1143 changed = true;
1144 /* If the call becomes noreturn, remove the lhs. */
1145 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
1147 if (TREE_CODE (lhs) == SSA_NAME)
1149 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1150 tree def = get_or_create_ssa_default_def (cfun, var);
1151 gimple new_stmt = gimple_build_assign (lhs, def);
1152 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1154 gimple_call_set_lhs (stmt, NULL_TREE);
1157 else
1159 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
1160 gimple new_stmt = gimple_build_call (fndecl, 0);
1161 gimple_set_location (new_stmt, gimple_location (stmt));
1162 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1164 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1165 tree def = get_or_create_ssa_default_def (cfun, var);
1167 /* To satisfy condition for
1168 cgraph_update_edges_for_call_stmt_node,
1169 we need to preserve GIMPLE_CALL statement
1170 at position of GSI iterator. */
1171 update_call_from_tree (gsi, def);
1172 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
1174 else
1175 gsi_replace (gsi, new_stmt, true);
1176 return true;
1182 if (inplace)
1183 return changed;
1185 /* Check for builtins that CCP can handle using information not
1186 available in the generic fold routines. */
1187 if (gimple_call_builtin_p (stmt))
1189 tree result = gimple_fold_builtin (stmt);
1190 if (result)
1192 if (!update_call_from_tree (gsi, result))
1193 gimplify_and_update_call_from_tree (gsi, result);
1194 changed = true;
1196 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
1197 changed |= targetm.gimple_fold_builtin (gsi);
1199 else if (gimple_call_internal_p (stmt))
1201 enum tree_code subcode = ERROR_MARK;
1202 tree result = NULL_TREE;
1203 switch (gimple_call_internal_fn (stmt))
1205 case IFN_BUILTIN_EXPECT:
1206 result = fold_builtin_expect (gimple_location (stmt),
1207 gimple_call_arg (stmt, 0),
1208 gimple_call_arg (stmt, 1),
1209 gimple_call_arg (stmt, 2));
1210 break;
1211 case IFN_UBSAN_CHECK_ADD:
1212 subcode = PLUS_EXPR;
1213 break;
1214 case IFN_UBSAN_CHECK_SUB:
1215 subcode = MINUS_EXPR;
1216 break;
1217 case IFN_UBSAN_CHECK_MUL:
1218 subcode = MULT_EXPR;
1219 break;
1220 default:
1221 break;
1223 if (subcode != ERROR_MARK)
1225 tree arg0 = gimple_call_arg (stmt, 0);
1226 tree arg1 = gimple_call_arg (stmt, 1);
1227 /* x = y + 0; x = y - 0; x = y * 0; */
1228 if (integer_zerop (arg1))
1229 result = subcode == MULT_EXPR
1230 ? build_zero_cst (TREE_TYPE (arg0))
1231 : arg0;
1232 /* x = 0 + y; x = 0 * y; */
1233 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1234 result = subcode == MULT_EXPR
1235 ? build_zero_cst (TREE_TYPE (arg0))
1236 : arg1;
1237 /* x = y - y; */
1238 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1239 result = build_zero_cst (TREE_TYPE (arg0));
1240 /* x = y * 1; x = 1 * y; */
1241 else if (subcode == MULT_EXPR)
1243 if (integer_onep (arg1))
1244 result = arg0;
1245 else if (integer_onep (arg0))
1246 result = arg1;
1249 if (result)
1251 if (!update_call_from_tree (gsi, result))
1252 gimplify_and_update_call_from_tree (gsi, result);
1253 changed = true;
1257 return changed;
1260 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1261 distinguishes both cases. */
1263 static bool
1264 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1266 bool changed = false;
1267 gimple stmt = gsi_stmt (*gsi);
1268 unsigned i;
1270 /* Fold the main computation performed by the statement. */
1271 switch (gimple_code (stmt))
1273 case GIMPLE_ASSIGN:
1275 unsigned old_num_ops = gimple_num_ops (stmt);
1276 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1277 tree lhs = gimple_assign_lhs (stmt);
1278 tree new_rhs;
1279 /* First canonicalize operand order. This avoids building new
1280 trees if this is the only thing fold would later do. */
1281 if ((commutative_tree_code (subcode)
1282 || commutative_ternary_tree_code (subcode))
1283 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1284 gimple_assign_rhs2 (stmt), false))
1286 tree tem = gimple_assign_rhs1 (stmt);
1287 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1288 gimple_assign_set_rhs2 (stmt, tem);
1289 changed = true;
1291 new_rhs = fold_gimple_assign (gsi);
1292 if (new_rhs
1293 && !useless_type_conversion_p (TREE_TYPE (lhs),
1294 TREE_TYPE (new_rhs)))
1295 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1296 if (new_rhs
1297 && (!inplace
1298 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1300 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1301 changed = true;
1303 break;
1306 case GIMPLE_COND:
1307 changed |= fold_gimple_cond (stmt);
1308 break;
1310 case GIMPLE_CALL:
1311 changed |= gimple_fold_call (gsi, inplace);
1312 break;
1314 case GIMPLE_ASM:
1315 /* Fold *& in asm operands. */
1317 size_t noutputs;
1318 const char **oconstraints;
1319 const char *constraint;
1320 bool allows_mem, allows_reg;
1322 noutputs = gimple_asm_noutputs (stmt);
1323 oconstraints = XALLOCAVEC (const char *, noutputs);
1325 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1327 tree link = gimple_asm_output_op (stmt, i);
1328 tree op = TREE_VALUE (link);
1329 oconstraints[i]
1330 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1331 if (REFERENCE_CLASS_P (op)
1332 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1334 TREE_VALUE (link) = op;
1335 changed = true;
1338 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1340 tree link = gimple_asm_input_op (stmt, i);
1341 tree op = TREE_VALUE (link);
1342 constraint
1343 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1344 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1345 oconstraints, &allows_mem, &allows_reg);
1346 if (REFERENCE_CLASS_P (op)
1347 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1348 != NULL_TREE)
1350 TREE_VALUE (link) = op;
1351 changed = true;
1355 break;
1357 case GIMPLE_DEBUG:
1358 if (gimple_debug_bind_p (stmt))
1360 tree val = gimple_debug_bind_get_value (stmt);
1361 if (val
1362 && REFERENCE_CLASS_P (val))
1364 tree tem = maybe_fold_reference (val, false);
1365 if (tem)
1367 gimple_debug_bind_set_value (stmt, tem);
1368 changed = true;
1371 else if (val
1372 && TREE_CODE (val) == ADDR_EXPR)
1374 tree ref = TREE_OPERAND (val, 0);
1375 tree tem = maybe_fold_reference (ref, false);
1376 if (tem)
1378 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1379 gimple_debug_bind_set_value (stmt, tem);
1380 changed = true;
1384 break;
1386 default:;
1389 stmt = gsi_stmt (*gsi);
1391 /* Fold *& on the lhs. */
1392 if (gimple_has_lhs (stmt))
1394 tree lhs = gimple_get_lhs (stmt);
1395 if (lhs && REFERENCE_CLASS_P (lhs))
1397 tree new_lhs = maybe_fold_reference (lhs, true);
1398 if (new_lhs)
1400 gimple_set_lhs (stmt, new_lhs);
1401 changed = true;
1406 return changed;
1409 /* Fold the statement pointed to by GSI. In some cases, this function may
1410 replace the whole statement with a new one. Returns true iff folding
1411 makes any changes.
1412 The statement pointed to by GSI should be in valid gimple form but may
1413 be in unfolded state as resulting from for example constant propagation
1414 which can produce *&x = 0. */
1416 bool
1417 fold_stmt (gimple_stmt_iterator *gsi)
1419 return fold_stmt_1 (gsi, false);
1422 /* Perform the minimal folding on statement *GSI. Only operations like
1423 *&x created by constant propagation are handled. The statement cannot
1424 be replaced with a new one. Return true if the statement was
1425 changed, false otherwise.
1426 The statement *GSI should be in valid gimple form but may
1427 be in unfolded state as resulting from for example constant propagation
1428 which can produce *&x = 0. */
1430 bool
1431 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1433 gimple stmt = gsi_stmt (*gsi);
1434 bool changed = fold_stmt_1 (gsi, true);
1435 gcc_assert (gsi_stmt (*gsi) == stmt);
1436 return changed;
1439 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1440 if EXPR is null or we don't know how.
1441 If non-null, the result always has boolean type. */
1443 static tree
1444 canonicalize_bool (tree expr, bool invert)
1446 if (!expr)
1447 return NULL_TREE;
1448 else if (invert)
1450 if (integer_nonzerop (expr))
1451 return boolean_false_node;
1452 else if (integer_zerop (expr))
1453 return boolean_true_node;
1454 else if (TREE_CODE (expr) == SSA_NAME)
1455 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1456 build_int_cst (TREE_TYPE (expr), 0));
1457 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1458 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1459 boolean_type_node,
1460 TREE_OPERAND (expr, 0),
1461 TREE_OPERAND (expr, 1));
1462 else
1463 return NULL_TREE;
1465 else
1467 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1468 return expr;
1469 if (integer_nonzerop (expr))
1470 return boolean_true_node;
1471 else if (integer_zerop (expr))
1472 return boolean_false_node;
1473 else if (TREE_CODE (expr) == SSA_NAME)
1474 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1475 build_int_cst (TREE_TYPE (expr), 0));
1476 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1477 return fold_build2 (TREE_CODE (expr),
1478 boolean_type_node,
1479 TREE_OPERAND (expr, 0),
1480 TREE_OPERAND (expr, 1));
1481 else
1482 return NULL_TREE;
1486 /* Check to see if a boolean expression EXPR is logically equivalent to the
1487 comparison (OP1 CODE OP2). Check for various identities involving
1488 SSA_NAMEs. */
1490 static bool
1491 same_bool_comparison_p (const_tree expr, enum tree_code code,
1492 const_tree op1, const_tree op2)
1494 gimple s;
1496 /* The obvious case. */
1497 if (TREE_CODE (expr) == code
1498 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1499 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1500 return true;
1502 /* Check for comparing (name, name != 0) and the case where expr
1503 is an SSA_NAME with a definition matching the comparison. */
1504 if (TREE_CODE (expr) == SSA_NAME
1505 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1507 if (operand_equal_p (expr, op1, 0))
1508 return ((code == NE_EXPR && integer_zerop (op2))
1509 || (code == EQ_EXPR && integer_nonzerop (op2)));
1510 s = SSA_NAME_DEF_STMT (expr);
1511 if (is_gimple_assign (s)
1512 && gimple_assign_rhs_code (s) == code
1513 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1514 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1515 return true;
1518 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1519 of name is a comparison, recurse. */
1520 if (TREE_CODE (op1) == SSA_NAME
1521 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1523 s = SSA_NAME_DEF_STMT (op1);
1524 if (is_gimple_assign (s)
1525 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1527 enum tree_code c = gimple_assign_rhs_code (s);
1528 if ((c == NE_EXPR && integer_zerop (op2))
1529 || (c == EQ_EXPR && integer_nonzerop (op2)))
1530 return same_bool_comparison_p (expr, c,
1531 gimple_assign_rhs1 (s),
1532 gimple_assign_rhs2 (s));
1533 if ((c == EQ_EXPR && integer_zerop (op2))
1534 || (c == NE_EXPR && integer_nonzerop (op2)))
1535 return same_bool_comparison_p (expr,
1536 invert_tree_comparison (c, false),
1537 gimple_assign_rhs1 (s),
1538 gimple_assign_rhs2 (s));
1541 return false;
1544 /* Check to see if two boolean expressions OP1 and OP2 are logically
1545 equivalent. */
1547 static bool
1548 same_bool_result_p (const_tree op1, const_tree op2)
1550 /* Simple cases first. */
1551 if (operand_equal_p (op1, op2, 0))
1552 return true;
1554 /* Check the cases where at least one of the operands is a comparison.
1555 These are a bit smarter than operand_equal_p in that they apply some
1556 identifies on SSA_NAMEs. */
1557 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1558 && same_bool_comparison_p (op1, TREE_CODE (op2),
1559 TREE_OPERAND (op2, 0),
1560 TREE_OPERAND (op2, 1)))
1561 return true;
1562 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1563 && same_bool_comparison_p (op2, TREE_CODE (op1),
1564 TREE_OPERAND (op1, 0),
1565 TREE_OPERAND (op1, 1)))
1566 return true;
1568 /* Default case. */
1569 return false;
1572 /* Forward declarations for some mutually recursive functions. */
1574 static tree
1575 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1576 enum tree_code code2, tree op2a, tree op2b);
1577 static tree
1578 and_var_with_comparison (tree var, bool invert,
1579 enum tree_code code2, tree op2a, tree op2b);
1580 static tree
1581 and_var_with_comparison_1 (gimple stmt,
1582 enum tree_code code2, tree op2a, tree op2b);
1583 static tree
1584 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1585 enum tree_code code2, tree op2a, tree op2b);
1586 static tree
1587 or_var_with_comparison (tree var, bool invert,
1588 enum tree_code code2, tree op2a, tree op2b);
1589 static tree
1590 or_var_with_comparison_1 (gimple stmt,
1591 enum tree_code code2, tree op2a, tree op2b);
1593 /* Helper function for and_comparisons_1: try to simplify the AND of the
1594 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1595 If INVERT is true, invert the value of the VAR before doing the AND.
1596 Return NULL_EXPR if we can't simplify this to a single expression. */
1598 static tree
1599 and_var_with_comparison (tree var, bool invert,
1600 enum tree_code code2, tree op2a, tree op2b)
1602 tree t;
1603 gimple stmt = SSA_NAME_DEF_STMT (var);
1605 /* We can only deal with variables whose definitions are assignments. */
1606 if (!is_gimple_assign (stmt))
1607 return NULL_TREE;
1609 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1610 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1611 Then we only have to consider the simpler non-inverted cases. */
1612 if (invert)
1613 t = or_var_with_comparison_1 (stmt,
1614 invert_tree_comparison (code2, false),
1615 op2a, op2b);
1616 else
1617 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1618 return canonicalize_bool (t, invert);
1621 /* Try to simplify the AND of the ssa variable defined by the assignment
1622 STMT with the comparison specified by (OP2A CODE2 OP2B).
1623 Return NULL_EXPR if we can't simplify this to a single expression. */
1625 static tree
1626 and_var_with_comparison_1 (gimple stmt,
1627 enum tree_code code2, tree op2a, tree op2b)
1629 tree var = gimple_assign_lhs (stmt);
1630 tree true_test_var = NULL_TREE;
1631 tree false_test_var = NULL_TREE;
1632 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1634 /* Check for identities like (var AND (var == 0)) => false. */
1635 if (TREE_CODE (op2a) == SSA_NAME
1636 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1638 if ((code2 == NE_EXPR && integer_zerop (op2b))
1639 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1641 true_test_var = op2a;
1642 if (var == true_test_var)
1643 return var;
1645 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1646 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1648 false_test_var = op2a;
1649 if (var == false_test_var)
1650 return boolean_false_node;
1654 /* If the definition is a comparison, recurse on it. */
1655 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1657 tree t = and_comparisons_1 (innercode,
1658 gimple_assign_rhs1 (stmt),
1659 gimple_assign_rhs2 (stmt),
1660 code2,
1661 op2a,
1662 op2b);
1663 if (t)
1664 return t;
1667 /* If the definition is an AND or OR expression, we may be able to
1668 simplify by reassociating. */
1669 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1670 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1672 tree inner1 = gimple_assign_rhs1 (stmt);
1673 tree inner2 = gimple_assign_rhs2 (stmt);
1674 gimple s;
1675 tree t;
1676 tree partial = NULL_TREE;
1677 bool is_and = (innercode == BIT_AND_EXPR);
1679 /* Check for boolean identities that don't require recursive examination
1680 of inner1/inner2:
1681 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1682 inner1 AND (inner1 OR inner2) => inner1
1683 !inner1 AND (inner1 AND inner2) => false
1684 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1685 Likewise for similar cases involving inner2. */
1686 if (inner1 == true_test_var)
1687 return (is_and ? var : inner1);
1688 else if (inner2 == true_test_var)
1689 return (is_and ? var : inner2);
1690 else if (inner1 == false_test_var)
1691 return (is_and
1692 ? boolean_false_node
1693 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1694 else if (inner2 == false_test_var)
1695 return (is_and
1696 ? boolean_false_node
1697 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1699 /* Next, redistribute/reassociate the AND across the inner tests.
1700 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1701 if (TREE_CODE (inner1) == SSA_NAME
1702 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1703 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1704 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1705 gimple_assign_rhs1 (s),
1706 gimple_assign_rhs2 (s),
1707 code2, op2a, op2b)))
1709 /* Handle the AND case, where we are reassociating:
1710 (inner1 AND inner2) AND (op2a code2 op2b)
1711 => (t AND inner2)
1712 If the partial result t is a constant, we win. Otherwise
1713 continue on to try reassociating with the other inner test. */
1714 if (is_and)
1716 if (integer_onep (t))
1717 return inner2;
1718 else if (integer_zerop (t))
1719 return boolean_false_node;
1722 /* Handle the OR case, where we are redistributing:
1723 (inner1 OR inner2) AND (op2a code2 op2b)
1724 => (t OR (inner2 AND (op2a code2 op2b))) */
1725 else if (integer_onep (t))
1726 return boolean_true_node;
1728 /* Save partial result for later. */
1729 partial = t;
1732 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1733 if (TREE_CODE (inner2) == SSA_NAME
1734 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1735 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1736 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1737 gimple_assign_rhs1 (s),
1738 gimple_assign_rhs2 (s),
1739 code2, op2a, op2b)))
1741 /* Handle the AND case, where we are reassociating:
1742 (inner1 AND inner2) AND (op2a code2 op2b)
1743 => (inner1 AND t) */
1744 if (is_and)
1746 if (integer_onep (t))
1747 return inner1;
1748 else if (integer_zerop (t))
1749 return boolean_false_node;
1750 /* If both are the same, we can apply the identity
1751 (x AND x) == x. */
1752 else if (partial && same_bool_result_p (t, partial))
1753 return t;
1756 /* Handle the OR case. where we are redistributing:
1757 (inner1 OR inner2) AND (op2a code2 op2b)
1758 => (t OR (inner1 AND (op2a code2 op2b)))
1759 => (t OR partial) */
1760 else
1762 if (integer_onep (t))
1763 return boolean_true_node;
1764 else if (partial)
1766 /* We already got a simplification for the other
1767 operand to the redistributed OR expression. The
1768 interesting case is when at least one is false.
1769 Or, if both are the same, we can apply the identity
1770 (x OR x) == x. */
1771 if (integer_zerop (partial))
1772 return t;
1773 else if (integer_zerop (t))
1774 return partial;
1775 else if (same_bool_result_p (t, partial))
1776 return t;
1781 return NULL_TREE;
1784 /* Try to simplify the AND of two comparisons defined by
1785 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1786 If this can be done without constructing an intermediate value,
1787 return the resulting tree; otherwise NULL_TREE is returned.
1788 This function is deliberately asymmetric as it recurses on SSA_DEFs
1789 in the first comparison but not the second. */
1791 static tree
1792 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1793 enum tree_code code2, tree op2a, tree op2b)
1795 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1797 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1798 if (operand_equal_p (op1a, op2a, 0)
1799 && operand_equal_p (op1b, op2b, 0))
1801 /* Result will be either NULL_TREE, or a combined comparison. */
1802 tree t = combine_comparisons (UNKNOWN_LOCATION,
1803 TRUTH_ANDIF_EXPR, code1, code2,
1804 truth_type, op1a, op1b);
1805 if (t)
1806 return t;
1809 /* Likewise the swapped case of the above. */
1810 if (operand_equal_p (op1a, op2b, 0)
1811 && operand_equal_p (op1b, op2a, 0))
1813 /* Result will be either NULL_TREE, or a combined comparison. */
1814 tree t = combine_comparisons (UNKNOWN_LOCATION,
1815 TRUTH_ANDIF_EXPR, code1,
1816 swap_tree_comparison (code2),
1817 truth_type, op1a, op1b);
1818 if (t)
1819 return t;
1822 /* If both comparisons are of the same value against constants, we might
1823 be able to merge them. */
1824 if (operand_equal_p (op1a, op2a, 0)
1825 && TREE_CODE (op1b) == INTEGER_CST
1826 && TREE_CODE (op2b) == INTEGER_CST)
1828 int cmp = tree_int_cst_compare (op1b, op2b);
1830 /* If we have (op1a == op1b), we should either be able to
1831 return that or FALSE, depending on whether the constant op1b
1832 also satisfies the other comparison against op2b. */
1833 if (code1 == EQ_EXPR)
1835 bool done = true;
1836 bool val;
1837 switch (code2)
1839 case EQ_EXPR: val = (cmp == 0); break;
1840 case NE_EXPR: val = (cmp != 0); break;
1841 case LT_EXPR: val = (cmp < 0); break;
1842 case GT_EXPR: val = (cmp > 0); break;
1843 case LE_EXPR: val = (cmp <= 0); break;
1844 case GE_EXPR: val = (cmp >= 0); break;
1845 default: done = false;
1847 if (done)
1849 if (val)
1850 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1851 else
1852 return boolean_false_node;
1855 /* Likewise if the second comparison is an == comparison. */
1856 else if (code2 == EQ_EXPR)
1858 bool done = true;
1859 bool val;
1860 switch (code1)
1862 case EQ_EXPR: val = (cmp == 0); break;
1863 case NE_EXPR: val = (cmp != 0); break;
1864 case LT_EXPR: val = (cmp > 0); break;
1865 case GT_EXPR: val = (cmp < 0); break;
1866 case LE_EXPR: val = (cmp >= 0); break;
1867 case GE_EXPR: val = (cmp <= 0); break;
1868 default: done = false;
1870 if (done)
1872 if (val)
1873 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1874 else
1875 return boolean_false_node;
1879 /* Same business with inequality tests. */
1880 else if (code1 == NE_EXPR)
1882 bool val;
1883 switch (code2)
1885 case EQ_EXPR: val = (cmp != 0); break;
1886 case NE_EXPR: val = (cmp == 0); break;
1887 case LT_EXPR: val = (cmp >= 0); break;
1888 case GT_EXPR: val = (cmp <= 0); break;
1889 case LE_EXPR: val = (cmp > 0); break;
1890 case GE_EXPR: val = (cmp < 0); break;
1891 default:
1892 val = false;
1894 if (val)
1895 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1897 else if (code2 == NE_EXPR)
1899 bool val;
1900 switch (code1)
1902 case EQ_EXPR: val = (cmp == 0); break;
1903 case NE_EXPR: val = (cmp != 0); break;
1904 case LT_EXPR: val = (cmp <= 0); break;
1905 case GT_EXPR: val = (cmp >= 0); break;
1906 case LE_EXPR: val = (cmp < 0); break;
1907 case GE_EXPR: val = (cmp > 0); break;
1908 default:
1909 val = false;
1911 if (val)
1912 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1915 /* Chose the more restrictive of two < or <= comparisons. */
1916 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1917 && (code2 == LT_EXPR || code2 == LE_EXPR))
1919 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1920 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1921 else
1922 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1925 /* Likewise chose the more restrictive of two > or >= comparisons. */
1926 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1927 && (code2 == GT_EXPR || code2 == GE_EXPR))
1929 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1930 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1931 else
1932 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1935 /* Check for singleton ranges. */
1936 else if (cmp == 0
1937 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1938 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1939 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1941 /* Check for disjoint ranges. */
1942 else if (cmp <= 0
1943 && (code1 == LT_EXPR || code1 == LE_EXPR)
1944 && (code2 == GT_EXPR || code2 == GE_EXPR))
1945 return boolean_false_node;
1946 else if (cmp >= 0
1947 && (code1 == GT_EXPR || code1 == GE_EXPR)
1948 && (code2 == LT_EXPR || code2 == LE_EXPR))
1949 return boolean_false_node;
1952 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1953 NAME's definition is a truth value. See if there are any simplifications
1954 that can be done against the NAME's definition. */
1955 if (TREE_CODE (op1a) == SSA_NAME
1956 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1957 && (integer_zerop (op1b) || integer_onep (op1b)))
1959 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1960 || (code1 == NE_EXPR && integer_onep (op1b)));
1961 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1962 switch (gimple_code (stmt))
1964 case GIMPLE_ASSIGN:
1965 /* Try to simplify by copy-propagating the definition. */
1966 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1968 case GIMPLE_PHI:
1969 /* If every argument to the PHI produces the same result when
1970 ANDed with the second comparison, we win.
1971 Do not do this unless the type is bool since we need a bool
1972 result here anyway. */
1973 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1975 tree result = NULL_TREE;
1976 unsigned i;
1977 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1979 tree arg = gimple_phi_arg_def (stmt, i);
1981 /* If this PHI has itself as an argument, ignore it.
1982 If all the other args produce the same result,
1983 we're still OK. */
1984 if (arg == gimple_phi_result (stmt))
1985 continue;
1986 else if (TREE_CODE (arg) == INTEGER_CST)
1988 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1990 if (!result)
1991 result = boolean_false_node;
1992 else if (!integer_zerop (result))
1993 return NULL_TREE;
1995 else if (!result)
1996 result = fold_build2 (code2, boolean_type_node,
1997 op2a, op2b);
1998 else if (!same_bool_comparison_p (result,
1999 code2, op2a, op2b))
2000 return NULL_TREE;
2002 else if (TREE_CODE (arg) == SSA_NAME
2003 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2005 tree temp;
2006 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2007 /* In simple cases we can look through PHI nodes,
2008 but we have to be careful with loops.
2009 See PR49073. */
2010 if (! dom_info_available_p (CDI_DOMINATORS)
2011 || gimple_bb (def_stmt) == gimple_bb (stmt)
2012 || dominated_by_p (CDI_DOMINATORS,
2013 gimple_bb (def_stmt),
2014 gimple_bb (stmt)))
2015 return NULL_TREE;
2016 temp = and_var_with_comparison (arg, invert, code2,
2017 op2a, op2b);
2018 if (!temp)
2019 return NULL_TREE;
2020 else if (!result)
2021 result = temp;
2022 else if (!same_bool_result_p (result, temp))
2023 return NULL_TREE;
2025 else
2026 return NULL_TREE;
2028 return result;
2031 default:
2032 break;
2035 return NULL_TREE;
2038 /* Try to simplify the AND of two comparisons, specified by
2039 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2040 If this can be simplified to a single expression (without requiring
2041 introducing more SSA variables to hold intermediate values),
2042 return the resulting tree. Otherwise return NULL_TREE.
2043 If the result expression is non-null, it has boolean type. */
2045 tree
2046 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2047 enum tree_code code2, tree op2a, tree op2b)
2049 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2050 if (t)
2051 return t;
2052 else
2053 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2056 /* Helper function for or_comparisons_1: try to simplify the OR of the
2057 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2058 If INVERT is true, invert the value of VAR before doing the OR.
2059 Return NULL_EXPR if we can't simplify this to a single expression. */
2061 static tree
2062 or_var_with_comparison (tree var, bool invert,
2063 enum tree_code code2, tree op2a, tree op2b)
2065 tree t;
2066 gimple stmt = SSA_NAME_DEF_STMT (var);
2068 /* We can only deal with variables whose definitions are assignments. */
2069 if (!is_gimple_assign (stmt))
2070 return NULL_TREE;
2072 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2073 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2074 Then we only have to consider the simpler non-inverted cases. */
2075 if (invert)
2076 t = and_var_with_comparison_1 (stmt,
2077 invert_tree_comparison (code2, false),
2078 op2a, op2b);
2079 else
2080 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2081 return canonicalize_bool (t, invert);
2084 /* Try to simplify the OR of the ssa variable defined by the assignment
2085 STMT with the comparison specified by (OP2A CODE2 OP2B).
2086 Return NULL_EXPR if we can't simplify this to a single expression. */
2088 static tree
2089 or_var_with_comparison_1 (gimple stmt,
2090 enum tree_code code2, tree op2a, tree op2b)
2092 tree var = gimple_assign_lhs (stmt);
2093 tree true_test_var = NULL_TREE;
2094 tree false_test_var = NULL_TREE;
2095 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2097 /* Check for identities like (var OR (var != 0)) => true . */
2098 if (TREE_CODE (op2a) == SSA_NAME
2099 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2101 if ((code2 == NE_EXPR && integer_zerop (op2b))
2102 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2104 true_test_var = op2a;
2105 if (var == true_test_var)
2106 return var;
2108 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2109 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2111 false_test_var = op2a;
2112 if (var == false_test_var)
2113 return boolean_true_node;
2117 /* If the definition is a comparison, recurse on it. */
2118 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2120 tree t = or_comparisons_1 (innercode,
2121 gimple_assign_rhs1 (stmt),
2122 gimple_assign_rhs2 (stmt),
2123 code2,
2124 op2a,
2125 op2b);
2126 if (t)
2127 return t;
2130 /* If the definition is an AND or OR expression, we may be able to
2131 simplify by reassociating. */
2132 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2133 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2135 tree inner1 = gimple_assign_rhs1 (stmt);
2136 tree inner2 = gimple_assign_rhs2 (stmt);
2137 gimple s;
2138 tree t;
2139 tree partial = NULL_TREE;
2140 bool is_or = (innercode == BIT_IOR_EXPR);
2142 /* Check for boolean identities that don't require recursive examination
2143 of inner1/inner2:
2144 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2145 inner1 OR (inner1 AND inner2) => inner1
2146 !inner1 OR (inner1 OR inner2) => true
2147 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2149 if (inner1 == true_test_var)
2150 return (is_or ? var : inner1);
2151 else if (inner2 == true_test_var)
2152 return (is_or ? var : inner2);
2153 else if (inner1 == false_test_var)
2154 return (is_or
2155 ? boolean_true_node
2156 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2157 else if (inner2 == false_test_var)
2158 return (is_or
2159 ? boolean_true_node
2160 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2162 /* Next, redistribute/reassociate the OR across the inner tests.
2163 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2164 if (TREE_CODE (inner1) == SSA_NAME
2165 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2166 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2167 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2168 gimple_assign_rhs1 (s),
2169 gimple_assign_rhs2 (s),
2170 code2, op2a, op2b)))
2172 /* Handle the OR case, where we are reassociating:
2173 (inner1 OR inner2) OR (op2a code2 op2b)
2174 => (t OR inner2)
2175 If the partial result t is a constant, we win. Otherwise
2176 continue on to try reassociating with the other inner test. */
2177 if (is_or)
2179 if (integer_onep (t))
2180 return boolean_true_node;
2181 else if (integer_zerop (t))
2182 return inner2;
2185 /* Handle the AND case, where we are redistributing:
2186 (inner1 AND inner2) OR (op2a code2 op2b)
2187 => (t AND (inner2 OR (op2a code op2b))) */
2188 else if (integer_zerop (t))
2189 return boolean_false_node;
2191 /* Save partial result for later. */
2192 partial = t;
2195 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2196 if (TREE_CODE (inner2) == SSA_NAME
2197 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2198 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2199 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2200 gimple_assign_rhs1 (s),
2201 gimple_assign_rhs2 (s),
2202 code2, op2a, op2b)))
2204 /* Handle the OR case, where we are reassociating:
2205 (inner1 OR inner2) OR (op2a code2 op2b)
2206 => (inner1 OR t)
2207 => (t OR partial) */
2208 if (is_or)
2210 if (integer_zerop (t))
2211 return inner1;
2212 else if (integer_onep (t))
2213 return boolean_true_node;
2214 /* If both are the same, we can apply the identity
2215 (x OR x) == x. */
2216 else if (partial && same_bool_result_p (t, partial))
2217 return t;
2220 /* Handle the AND case, where we are redistributing:
2221 (inner1 AND inner2) OR (op2a code2 op2b)
2222 => (t AND (inner1 OR (op2a code2 op2b)))
2223 => (t AND partial) */
2224 else
2226 if (integer_zerop (t))
2227 return boolean_false_node;
2228 else if (partial)
2230 /* We already got a simplification for the other
2231 operand to the redistributed AND expression. The
2232 interesting case is when at least one is true.
2233 Or, if both are the same, we can apply the identity
2234 (x AND x) == x. */
2235 if (integer_onep (partial))
2236 return t;
2237 else if (integer_onep (t))
2238 return partial;
2239 else if (same_bool_result_p (t, partial))
2240 return t;
2245 return NULL_TREE;
2248 /* Try to simplify the OR of two comparisons defined by
2249 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2250 If this can be done without constructing an intermediate value,
2251 return the resulting tree; otherwise NULL_TREE is returned.
2252 This function is deliberately asymmetric as it recurses on SSA_DEFs
2253 in the first comparison but not the second. */
2255 static tree
2256 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2257 enum tree_code code2, tree op2a, tree op2b)
2259 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2261 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2262 if (operand_equal_p (op1a, op2a, 0)
2263 && operand_equal_p (op1b, op2b, 0))
2265 /* Result will be either NULL_TREE, or a combined comparison. */
2266 tree t = combine_comparisons (UNKNOWN_LOCATION,
2267 TRUTH_ORIF_EXPR, code1, code2,
2268 truth_type, op1a, op1b);
2269 if (t)
2270 return t;
2273 /* Likewise the swapped case of the above. */
2274 if (operand_equal_p (op1a, op2b, 0)
2275 && operand_equal_p (op1b, op2a, 0))
2277 /* Result will be either NULL_TREE, or a combined comparison. */
2278 tree t = combine_comparisons (UNKNOWN_LOCATION,
2279 TRUTH_ORIF_EXPR, code1,
2280 swap_tree_comparison (code2),
2281 truth_type, op1a, op1b);
2282 if (t)
2283 return t;
2286 /* If both comparisons are of the same value against constants, we might
2287 be able to merge them. */
2288 if (operand_equal_p (op1a, op2a, 0)
2289 && TREE_CODE (op1b) == INTEGER_CST
2290 && TREE_CODE (op2b) == INTEGER_CST)
2292 int cmp = tree_int_cst_compare (op1b, op2b);
2294 /* If we have (op1a != op1b), we should either be able to
2295 return that or TRUE, depending on whether the constant op1b
2296 also satisfies the other comparison against op2b. */
2297 if (code1 == NE_EXPR)
2299 bool done = true;
2300 bool val;
2301 switch (code2)
2303 case EQ_EXPR: val = (cmp == 0); break;
2304 case NE_EXPR: val = (cmp != 0); break;
2305 case LT_EXPR: val = (cmp < 0); break;
2306 case GT_EXPR: val = (cmp > 0); break;
2307 case LE_EXPR: val = (cmp <= 0); break;
2308 case GE_EXPR: val = (cmp >= 0); break;
2309 default: done = false;
2311 if (done)
2313 if (val)
2314 return boolean_true_node;
2315 else
2316 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2319 /* Likewise if the second comparison is a != comparison. */
2320 else if (code2 == NE_EXPR)
2322 bool done = true;
2323 bool val;
2324 switch (code1)
2326 case EQ_EXPR: val = (cmp == 0); break;
2327 case NE_EXPR: val = (cmp != 0); break;
2328 case LT_EXPR: val = (cmp > 0); break;
2329 case GT_EXPR: val = (cmp < 0); break;
2330 case LE_EXPR: val = (cmp >= 0); break;
2331 case GE_EXPR: val = (cmp <= 0); break;
2332 default: done = false;
2334 if (done)
2336 if (val)
2337 return boolean_true_node;
2338 else
2339 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2343 /* See if an equality test is redundant with the other comparison. */
2344 else if (code1 == EQ_EXPR)
2346 bool val;
2347 switch (code2)
2349 case EQ_EXPR: val = (cmp == 0); break;
2350 case NE_EXPR: val = (cmp != 0); break;
2351 case LT_EXPR: val = (cmp < 0); break;
2352 case GT_EXPR: val = (cmp > 0); break;
2353 case LE_EXPR: val = (cmp <= 0); break;
2354 case GE_EXPR: val = (cmp >= 0); break;
2355 default:
2356 val = false;
2358 if (val)
2359 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2361 else if (code2 == EQ_EXPR)
2363 bool val;
2364 switch (code1)
2366 case EQ_EXPR: val = (cmp == 0); break;
2367 case NE_EXPR: val = (cmp != 0); break;
2368 case LT_EXPR: val = (cmp > 0); break;
2369 case GT_EXPR: val = (cmp < 0); break;
2370 case LE_EXPR: val = (cmp >= 0); break;
2371 case GE_EXPR: val = (cmp <= 0); break;
2372 default:
2373 val = false;
2375 if (val)
2376 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2379 /* Chose the less restrictive of two < or <= comparisons. */
2380 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2381 && (code2 == LT_EXPR || code2 == LE_EXPR))
2383 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2384 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2385 else
2386 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2389 /* Likewise chose the less restrictive of two > or >= comparisons. */
2390 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2391 && (code2 == GT_EXPR || code2 == GE_EXPR))
2393 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2394 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2395 else
2396 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2399 /* Check for singleton ranges. */
2400 else if (cmp == 0
2401 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2402 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2403 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2405 /* Check for less/greater pairs that don't restrict the range at all. */
2406 else if (cmp >= 0
2407 && (code1 == LT_EXPR || code1 == LE_EXPR)
2408 && (code2 == GT_EXPR || code2 == GE_EXPR))
2409 return boolean_true_node;
2410 else if (cmp <= 0
2411 && (code1 == GT_EXPR || code1 == GE_EXPR)
2412 && (code2 == LT_EXPR || code2 == LE_EXPR))
2413 return boolean_true_node;
2416 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2417 NAME's definition is a truth value. See if there are any simplifications
2418 that can be done against the NAME's definition. */
2419 if (TREE_CODE (op1a) == SSA_NAME
2420 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2421 && (integer_zerop (op1b) || integer_onep (op1b)))
2423 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2424 || (code1 == NE_EXPR && integer_onep (op1b)));
2425 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2426 switch (gimple_code (stmt))
2428 case GIMPLE_ASSIGN:
2429 /* Try to simplify by copy-propagating the definition. */
2430 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2432 case GIMPLE_PHI:
2433 /* If every argument to the PHI produces the same result when
2434 ORed with the second comparison, we win.
2435 Do not do this unless the type is bool since we need a bool
2436 result here anyway. */
2437 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2439 tree result = NULL_TREE;
2440 unsigned i;
2441 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2443 tree arg = gimple_phi_arg_def (stmt, i);
2445 /* If this PHI has itself as an argument, ignore it.
2446 If all the other args produce the same result,
2447 we're still OK. */
2448 if (arg == gimple_phi_result (stmt))
2449 continue;
2450 else if (TREE_CODE (arg) == INTEGER_CST)
2452 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2454 if (!result)
2455 result = boolean_true_node;
2456 else if (!integer_onep (result))
2457 return NULL_TREE;
2459 else if (!result)
2460 result = fold_build2 (code2, boolean_type_node,
2461 op2a, op2b);
2462 else if (!same_bool_comparison_p (result,
2463 code2, op2a, op2b))
2464 return NULL_TREE;
2466 else if (TREE_CODE (arg) == SSA_NAME
2467 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2469 tree temp;
2470 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2471 /* In simple cases we can look through PHI nodes,
2472 but we have to be careful with loops.
2473 See PR49073. */
2474 if (! dom_info_available_p (CDI_DOMINATORS)
2475 || gimple_bb (def_stmt) == gimple_bb (stmt)
2476 || dominated_by_p (CDI_DOMINATORS,
2477 gimple_bb (def_stmt),
2478 gimple_bb (stmt)))
2479 return NULL_TREE;
2480 temp = or_var_with_comparison (arg, invert, code2,
2481 op2a, op2b);
2482 if (!temp)
2483 return NULL_TREE;
2484 else if (!result)
2485 result = temp;
2486 else if (!same_bool_result_p (result, temp))
2487 return NULL_TREE;
2489 else
2490 return NULL_TREE;
2492 return result;
2495 default:
2496 break;
2499 return NULL_TREE;
2502 /* Try to simplify the OR of two comparisons, specified by
2503 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2504 If this can be simplified to a single expression (without requiring
2505 introducing more SSA variables to hold intermediate values),
2506 return the resulting tree. Otherwise return NULL_TREE.
2507 If the result expression is non-null, it has boolean type. */
2509 tree
2510 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2511 enum tree_code code2, tree op2a, tree op2b)
2513 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2514 if (t)
2515 return t;
2516 else
2517 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2521 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2523 Either NULL_TREE, a simplified but non-constant or a constant
2524 is returned.
2526 ??? This should go into a gimple-fold-inline.h file to be eventually
2527 privatized with the single valueize function used in the various TUs
2528 to avoid the indirect function call overhead. */
2530 tree
2531 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2533 location_t loc = gimple_location (stmt);
2534 switch (gimple_code (stmt))
2536 case GIMPLE_ASSIGN:
2538 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2540 switch (get_gimple_rhs_class (subcode))
2542 case GIMPLE_SINGLE_RHS:
2544 tree rhs = gimple_assign_rhs1 (stmt);
2545 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2547 if (TREE_CODE (rhs) == SSA_NAME)
2549 /* If the RHS is an SSA_NAME, return its known constant value,
2550 if any. */
2551 return (*valueize) (rhs);
2553 /* Handle propagating invariant addresses into address
2554 operations. */
2555 else if (TREE_CODE (rhs) == ADDR_EXPR
2556 && !is_gimple_min_invariant (rhs))
2558 HOST_WIDE_INT offset = 0;
2559 tree base;
2560 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2561 &offset,
2562 valueize);
2563 if (base
2564 && (CONSTANT_CLASS_P (base)
2565 || decl_address_invariant_p (base)))
2566 return build_invariant_address (TREE_TYPE (rhs),
2567 base, offset);
2569 else if (TREE_CODE (rhs) == CONSTRUCTOR
2570 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2571 && (CONSTRUCTOR_NELTS (rhs)
2572 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2574 unsigned i;
2575 tree val, *vec;
2577 vec = XALLOCAVEC (tree,
2578 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2579 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2581 val = (*valueize) (val);
2582 if (TREE_CODE (val) == INTEGER_CST
2583 || TREE_CODE (val) == REAL_CST
2584 || TREE_CODE (val) == FIXED_CST)
2585 vec[i] = val;
2586 else
2587 return NULL_TREE;
2590 return build_vector (TREE_TYPE (rhs), vec);
2592 if (subcode == OBJ_TYPE_REF)
2594 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
2595 /* If callee is constant, we can fold away the wrapper. */
2596 if (is_gimple_min_invariant (val))
2597 return val;
2600 if (kind == tcc_reference)
2602 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2603 || TREE_CODE (rhs) == REALPART_EXPR
2604 || TREE_CODE (rhs) == IMAGPART_EXPR)
2605 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2607 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2608 return fold_unary_loc (EXPR_LOCATION (rhs),
2609 TREE_CODE (rhs),
2610 TREE_TYPE (rhs), val);
2612 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2613 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2615 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2616 return fold_ternary_loc (EXPR_LOCATION (rhs),
2617 TREE_CODE (rhs),
2618 TREE_TYPE (rhs), val,
2619 TREE_OPERAND (rhs, 1),
2620 TREE_OPERAND (rhs, 2));
2622 else if (TREE_CODE (rhs) == MEM_REF
2623 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2625 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2626 if (TREE_CODE (val) == ADDR_EXPR
2627 && is_gimple_min_invariant (val))
2629 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2630 unshare_expr (val),
2631 TREE_OPERAND (rhs, 1));
2632 if (tem)
2633 rhs = tem;
2636 return fold_const_aggregate_ref_1 (rhs, valueize);
2638 else if (kind == tcc_declaration)
2639 return get_symbol_constant_value (rhs);
2640 return rhs;
2643 case GIMPLE_UNARY_RHS:
2645 /* Handle unary operators that can appear in GIMPLE form.
2646 Note that we know the single operand must be a constant,
2647 so this should almost always return a simplified RHS. */
2648 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2650 return
2651 fold_unary_ignore_overflow_loc (loc, subcode,
2652 gimple_expr_type (stmt), op0);
2655 case GIMPLE_BINARY_RHS:
2657 /* Handle binary operators that can appear in GIMPLE form. */
2658 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2659 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2661 /* Translate &x + CST into an invariant form suitable for
2662 further propagation. */
2663 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2664 && TREE_CODE (op0) == ADDR_EXPR
2665 && TREE_CODE (op1) == INTEGER_CST)
2667 tree off = fold_convert (ptr_type_node, op1);
2668 return build_fold_addr_expr_loc
2669 (loc,
2670 fold_build2 (MEM_REF,
2671 TREE_TYPE (TREE_TYPE (op0)),
2672 unshare_expr (op0), off));
2675 return fold_binary_loc (loc, subcode,
2676 gimple_expr_type (stmt), op0, op1);
2679 case GIMPLE_TERNARY_RHS:
2681 /* Handle ternary operators that can appear in GIMPLE form. */
2682 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2683 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2684 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2686 /* Fold embedded expressions in ternary codes. */
2687 if ((subcode == COND_EXPR
2688 || subcode == VEC_COND_EXPR)
2689 && COMPARISON_CLASS_P (op0))
2691 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2692 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2693 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2694 TREE_TYPE (op0), op00, op01);
2695 if (tem)
2696 op0 = tem;
2699 return fold_ternary_loc (loc, subcode,
2700 gimple_expr_type (stmt), op0, op1, op2);
2703 default:
2704 gcc_unreachable ();
2708 case GIMPLE_CALL:
2710 tree fn;
2712 if (gimple_call_internal_p (stmt))
2714 enum tree_code subcode = ERROR_MARK;
2715 switch (gimple_call_internal_fn (stmt))
2717 case IFN_UBSAN_CHECK_ADD:
2718 subcode = PLUS_EXPR;
2719 break;
2720 case IFN_UBSAN_CHECK_SUB:
2721 subcode = MINUS_EXPR;
2722 break;
2723 case IFN_UBSAN_CHECK_MUL:
2724 subcode = MULT_EXPR;
2725 break;
2726 default:
2727 return NULL_TREE;
2729 tree arg0 = gimple_call_arg (stmt, 0);
2730 tree arg1 = gimple_call_arg (stmt, 1);
2731 tree op0 = (*valueize) (arg0);
2732 tree op1 = (*valueize) (arg1);
2734 if (TREE_CODE (op0) != INTEGER_CST
2735 || TREE_CODE (op1) != INTEGER_CST)
2737 switch (subcode)
2739 case MULT_EXPR:
2740 /* x * 0 = 0 * x = 0 without overflow. */
2741 if (integer_zerop (op0) || integer_zerop (op1))
2742 return build_zero_cst (TREE_TYPE (arg0));
2743 break;
2744 case MINUS_EXPR:
2745 /* y - y = 0 without overflow. */
2746 if (operand_equal_p (op0, op1, 0))
2747 return build_zero_cst (TREE_TYPE (arg0));
2748 break;
2749 default:
2750 break;
2753 tree res
2754 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
2755 if (res
2756 && TREE_CODE (res) == INTEGER_CST
2757 && !TREE_OVERFLOW (res))
2758 return res;
2759 return NULL_TREE;
2762 fn = (*valueize) (gimple_call_fn (stmt));
2763 if (TREE_CODE (fn) == ADDR_EXPR
2764 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2765 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
2766 && gimple_builtin_call_types_compatible_p (stmt,
2767 TREE_OPERAND (fn, 0)))
2769 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2770 tree call, retval;
2771 unsigned i;
2772 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2773 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2774 call = build_call_array_loc (loc,
2775 gimple_call_return_type (stmt),
2776 fn, gimple_call_num_args (stmt), args);
2777 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2778 if (retval)
2780 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2781 STRIP_NOPS (retval);
2782 retval = fold_convert (gimple_call_return_type (stmt), retval);
2784 return retval;
2786 return NULL_TREE;
2789 default:
2790 return NULL_TREE;
2794 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2795 Returns NULL_TREE if folding to a constant is not possible, otherwise
2796 returns a constant according to is_gimple_min_invariant. */
2798 tree
2799 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2801 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2802 if (res && is_gimple_min_invariant (res))
2803 return res;
2804 return NULL_TREE;
2808 /* The following set of functions are supposed to fold references using
2809 their constant initializers. */
2811 static tree fold_ctor_reference (tree type, tree ctor,
2812 unsigned HOST_WIDE_INT offset,
2813 unsigned HOST_WIDE_INT size, tree);
2815 /* See if we can find constructor defining value of BASE.
2816 When we know the consructor with constant offset (such as
2817 base is array[40] and we do know constructor of array), then
2818 BIT_OFFSET is adjusted accordingly.
2820 As a special case, return error_mark_node when constructor
2821 is not explicitly available, but it is known to be zero
2822 such as 'static const int a;'. */
2823 static tree
2824 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2825 tree (*valueize)(tree))
2827 HOST_WIDE_INT bit_offset2, size, max_size;
2828 if (TREE_CODE (base) == MEM_REF)
2830 if (!integer_zerop (TREE_OPERAND (base, 1)))
2832 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
2833 return NULL_TREE;
2834 *bit_offset += (mem_ref_offset (base).to_short_addr ()
2835 * BITS_PER_UNIT);
2838 if (valueize
2839 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2840 base = valueize (TREE_OPERAND (base, 0));
2841 if (!base || TREE_CODE (base) != ADDR_EXPR)
2842 return NULL_TREE;
2843 base = TREE_OPERAND (base, 0);
2846 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2847 DECL_INITIAL. If BASE is a nested reference into another
2848 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2849 the inner reference. */
2850 switch (TREE_CODE (base))
2852 case VAR_DECL:
2853 case CONST_DECL:
2855 tree init = ctor_for_folding (base);
2857 /* Our semantic is exact opposite of ctor_for_folding;
2858 NULL means unknown, while error_mark_node is 0. */
2859 if (init == error_mark_node)
2860 return NULL_TREE;
2861 if (!init)
2862 return error_mark_node;
2863 return init;
2866 case ARRAY_REF:
2867 case COMPONENT_REF:
2868 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2869 if (max_size == -1 || size != max_size)
2870 return NULL_TREE;
2871 *bit_offset += bit_offset2;
2872 return get_base_constructor (base, bit_offset, valueize);
2874 case STRING_CST:
2875 case CONSTRUCTOR:
2876 return base;
2878 default:
2879 return NULL_TREE;
2883 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2884 to the memory at bit OFFSET.
2886 We do only simple job of folding byte accesses. */
2888 static tree
2889 fold_string_cst_ctor_reference (tree type, tree ctor,
2890 unsigned HOST_WIDE_INT offset,
2891 unsigned HOST_WIDE_INT size)
2893 if (INTEGRAL_TYPE_P (type)
2894 && (TYPE_MODE (type)
2895 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2896 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2897 == MODE_INT)
2898 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2899 && size == BITS_PER_UNIT
2900 && !(offset % BITS_PER_UNIT))
2902 offset /= BITS_PER_UNIT;
2903 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2904 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2905 [offset]));
2906 /* Folding
2907 const char a[20]="hello";
2908 return a[10];
2910 might lead to offset greater than string length. In this case we
2911 know value is either initialized to 0 or out of bounds. Return 0
2912 in both cases. */
2913 return build_zero_cst (type);
2915 return NULL_TREE;
2918 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2919 SIZE to the memory at bit OFFSET. */
2921 static tree
2922 fold_array_ctor_reference (tree type, tree ctor,
2923 unsigned HOST_WIDE_INT offset,
2924 unsigned HOST_WIDE_INT size,
2925 tree from_decl)
2927 unsigned HOST_WIDE_INT cnt;
2928 tree cfield, cval;
2929 offset_int low_bound;
2930 offset_int elt_size;
2931 offset_int index, max_index;
2932 offset_int access_index;
2933 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2934 HOST_WIDE_INT inner_offset;
2936 /* Compute low bound and elt size. */
2937 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2938 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2939 if (domain_type && TYPE_MIN_VALUE (domain_type))
2941 /* Static constructors for variably sized objects makes no sense. */
2942 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2943 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2944 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
2946 else
2947 low_bound = 0;
2948 /* Static constructors for variably sized objects makes no sense. */
2949 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2950 == INTEGER_CST);
2951 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2953 /* We can handle only constantly sized accesses that are known to not
2954 be larger than size of array element. */
2955 if (!TYPE_SIZE_UNIT (type)
2956 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2957 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
2958 || elt_size == 0)
2959 return NULL_TREE;
2961 /* Compute the array index we look for. */
2962 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
2963 elt_size);
2964 access_index += low_bound;
2965 if (index_type)
2966 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
2967 TYPE_SIGN (index_type));
2969 /* And offset within the access. */
2970 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2972 /* See if the array field is large enough to span whole access. We do not
2973 care to fold accesses spanning multiple array indexes. */
2974 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2975 return NULL_TREE;
2977 index = low_bound - 1;
2978 if (index_type)
2979 index = wi::ext (index, TYPE_PRECISION (index_type),
2980 TYPE_SIGN (index_type));
2982 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2984 /* Array constructor might explicitely set index, or specify range
2985 or leave index NULL meaning that it is next index after previous
2986 one. */
2987 if (cfield)
2989 if (TREE_CODE (cfield) == INTEGER_CST)
2990 max_index = index = wi::to_offset (cfield);
2991 else
2993 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2994 index = wi::to_offset (TREE_OPERAND (cfield, 0));
2995 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
2998 else
3000 index += 1;
3001 if (index_type)
3002 index = wi::ext (index, TYPE_PRECISION (index_type),
3003 TYPE_SIGN (index_type));
3004 max_index = index;
3007 /* Do we have match? */
3008 if (wi::cmpu (access_index, index) >= 0
3009 && wi::cmpu (access_index, max_index) <= 0)
3010 return fold_ctor_reference (type, cval, inner_offset, size,
3011 from_decl);
3013 /* When memory is not explicitely mentioned in constructor,
3014 it is 0 (or out of range). */
3015 return build_zero_cst (type);
3018 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3019 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3021 static tree
3022 fold_nonarray_ctor_reference (tree type, tree ctor,
3023 unsigned HOST_WIDE_INT offset,
3024 unsigned HOST_WIDE_INT size,
3025 tree from_decl)
3027 unsigned HOST_WIDE_INT cnt;
3028 tree cfield, cval;
3030 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3031 cval)
3033 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3034 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3035 tree field_size = DECL_SIZE (cfield);
3036 offset_int bitoffset;
3037 offset_int bitoffset_end, access_end;
3039 /* Variable sized objects in static constructors makes no sense,
3040 but field_size can be NULL for flexible array members. */
3041 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3042 && TREE_CODE (byte_offset) == INTEGER_CST
3043 && (field_size != NULL_TREE
3044 ? TREE_CODE (field_size) == INTEGER_CST
3045 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3047 /* Compute bit offset of the field. */
3048 bitoffset = (wi::to_offset (field_offset)
3049 + wi::lshift (wi::to_offset (byte_offset),
3050 LOG2_BITS_PER_UNIT));
3051 /* Compute bit offset where the field ends. */
3052 if (field_size != NULL_TREE)
3053 bitoffset_end = bitoffset + wi::to_offset (field_size);
3054 else
3055 bitoffset_end = 0;
3057 access_end = offset_int (offset) + size;
3059 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3060 [BITOFFSET, BITOFFSET_END)? */
3061 if (wi::cmps (access_end, bitoffset) > 0
3062 && (field_size == NULL_TREE
3063 || wi::lts_p (offset, bitoffset_end)))
3065 offset_int inner_offset = offset_int (offset) - bitoffset;
3066 /* We do have overlap. Now see if field is large enough to
3067 cover the access. Give up for accesses spanning multiple
3068 fields. */
3069 if (wi::cmps (access_end, bitoffset_end) > 0)
3070 return NULL_TREE;
3071 if (wi::lts_p (offset, bitoffset))
3072 return NULL_TREE;
3073 return fold_ctor_reference (type, cval,
3074 inner_offset.to_uhwi (), size,
3075 from_decl);
3078 /* When memory is not explicitely mentioned in constructor, it is 0. */
3079 return build_zero_cst (type);
3082 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3083 to the memory at bit OFFSET. */
3085 static tree
3086 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3087 unsigned HOST_WIDE_INT size, tree from_decl)
3089 tree ret;
3091 /* We found the field with exact match. */
3092 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3093 && !offset)
3094 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3096 /* We are at the end of walk, see if we can view convert the
3097 result. */
3098 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3099 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3100 && operand_equal_p (TYPE_SIZE (type),
3101 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3103 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3104 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3105 if (ret)
3106 STRIP_NOPS (ret);
3107 return ret;
3109 if (TREE_CODE (ctor) == STRING_CST)
3110 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3111 if (TREE_CODE (ctor) == CONSTRUCTOR)
3114 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3115 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3116 return fold_array_ctor_reference (type, ctor, offset, size,
3117 from_decl);
3118 else
3119 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3120 from_decl);
3123 return NULL_TREE;
3126 /* Return the tree representing the element referenced by T if T is an
3127 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3128 names using VALUEIZE. Return NULL_TREE otherwise. */
3130 tree
3131 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3133 tree ctor, idx, base;
3134 HOST_WIDE_INT offset, size, max_size;
3135 tree tem;
3137 if (TREE_THIS_VOLATILE (t))
3138 return NULL_TREE;
3140 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3141 return get_symbol_constant_value (t);
3143 tem = fold_read_from_constant_string (t);
3144 if (tem)
3145 return tem;
3147 switch (TREE_CODE (t))
3149 case ARRAY_REF:
3150 case ARRAY_RANGE_REF:
3151 /* Constant indexes are handled well by get_base_constructor.
3152 Only special case variable offsets.
3153 FIXME: This code can't handle nested references with variable indexes
3154 (they will be handled only by iteration of ccp). Perhaps we can bring
3155 get_ref_base_and_extent here and make it use a valueize callback. */
3156 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3157 && valueize
3158 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3159 && TREE_CODE (idx) == INTEGER_CST)
3161 tree low_bound, unit_size;
3163 /* If the resulting bit-offset is constant, track it. */
3164 if ((low_bound = array_ref_low_bound (t),
3165 TREE_CODE (low_bound) == INTEGER_CST)
3166 && (unit_size = array_ref_element_size (t),
3167 tree_fits_uhwi_p (unit_size)))
3169 offset_int woffset
3170 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
3171 TYPE_PRECISION (TREE_TYPE (idx)));
3173 if (wi::fits_shwi_p (woffset))
3175 offset = woffset.to_shwi ();
3176 /* TODO: This code seems wrong, multiply then check
3177 to see if it fits. */
3178 offset *= tree_to_uhwi (unit_size);
3179 offset *= BITS_PER_UNIT;
3181 base = TREE_OPERAND (t, 0);
3182 ctor = get_base_constructor (base, &offset, valueize);
3183 /* Empty constructor. Always fold to 0. */
3184 if (ctor == error_mark_node)
3185 return build_zero_cst (TREE_TYPE (t));
3186 /* Out of bound array access. Value is undefined,
3187 but don't fold. */
3188 if (offset < 0)
3189 return NULL_TREE;
3190 /* We can not determine ctor. */
3191 if (!ctor)
3192 return NULL_TREE;
3193 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3194 tree_to_uhwi (unit_size)
3195 * BITS_PER_UNIT,
3196 base);
3200 /* Fallthru. */
3202 case COMPONENT_REF:
3203 case BIT_FIELD_REF:
3204 case TARGET_MEM_REF:
3205 case MEM_REF:
3206 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3207 ctor = get_base_constructor (base, &offset, valueize);
3209 /* Empty constructor. Always fold to 0. */
3210 if (ctor == error_mark_node)
3211 return build_zero_cst (TREE_TYPE (t));
3212 /* We do not know precise address. */
3213 if (max_size == -1 || max_size != size)
3214 return NULL_TREE;
3215 /* We can not determine ctor. */
3216 if (!ctor)
3217 return NULL_TREE;
3219 /* Out of bound array access. Value is undefined, but don't fold. */
3220 if (offset < 0)
3221 return NULL_TREE;
3223 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3224 base);
3226 case REALPART_EXPR:
3227 case IMAGPART_EXPR:
3229 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3230 if (c && TREE_CODE (c) == COMPLEX_CST)
3231 return fold_build1_loc (EXPR_LOCATION (t),
3232 TREE_CODE (t), TREE_TYPE (t), c);
3233 break;
3236 default:
3237 break;
3240 return NULL_TREE;
3243 tree
3244 fold_const_aggregate_ref (tree t)
3246 return fold_const_aggregate_ref_1 (t, NULL);
3249 /* Lookup virtual method with index TOKEN in a virtual table V
3250 at OFFSET.
3251 Set CAN_REFER if non-NULL to false if method
3252 is not referable or if the virtual table is ill-formed (such as rewriten
3253 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3255 tree
3256 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
3257 tree v,
3258 unsigned HOST_WIDE_INT offset,
3259 bool *can_refer)
3261 tree vtable = v, init, fn;
3262 unsigned HOST_WIDE_INT size;
3263 unsigned HOST_WIDE_INT elt_size, access_index;
3264 tree domain_type;
3266 if (can_refer)
3267 *can_refer = true;
3269 /* First of all double check we have virtual table. */
3270 if (TREE_CODE (v) != VAR_DECL
3271 || !DECL_VIRTUAL_P (v))
3273 gcc_assert (in_lto_p);
3274 /* Pass down that we lost track of the target. */
3275 if (can_refer)
3276 *can_refer = false;
3277 return NULL_TREE;
3280 init = ctor_for_folding (v);
3282 /* The virtual tables should always be born with constructors
3283 and we always should assume that they are avaialble for
3284 folding. At the moment we do not stream them in all cases,
3285 but it should never happen that ctor seem unreachable. */
3286 gcc_assert (init);
3287 if (init == error_mark_node)
3289 gcc_assert (in_lto_p);
3290 /* Pass down that we lost track of the target. */
3291 if (can_refer)
3292 *can_refer = false;
3293 return NULL_TREE;
3295 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3296 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
3297 offset *= BITS_PER_UNIT;
3298 offset += token * size;
3300 /* Lookup the value in the constructor that is assumed to be array.
3301 This is equivalent to
3302 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3303 offset, size, NULL);
3304 but in a constant time. We expect that frontend produced a simple
3305 array without indexed initializers. */
3307 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
3308 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
3309 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
3310 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
3312 access_index = offset / BITS_PER_UNIT / elt_size;
3313 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
3315 /* This code makes an assumption that there are no
3316 indexed fileds produced by C++ FE, so we can directly index the array. */
3317 if (access_index < CONSTRUCTOR_NELTS (init))
3319 fn = CONSTRUCTOR_ELT (init, access_index)->value;
3320 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
3321 STRIP_NOPS (fn);
3323 else
3324 fn = NULL;
3326 /* For type inconsistent program we may end up looking up virtual method
3327 in virtual table that does not contain TOKEN entries. We may overrun
3328 the virtual table and pick up a constant or RTTI info pointer.
3329 In any case the call is undefined. */
3330 if (!fn
3331 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
3332 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3333 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3334 else
3336 fn = TREE_OPERAND (fn, 0);
3338 /* When cgraph node is missing and function is not public, we cannot
3339 devirtualize. This can happen in WHOPR when the actual method
3340 ends up in other partition, because we found devirtualization
3341 possibility too late. */
3342 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3344 if (can_refer)
3346 *can_refer = false;
3347 return fn;
3349 return NULL_TREE;
3353 /* Make sure we create a cgraph node for functions we'll reference.
3354 They can be non-existent if the reference comes from an entry
3355 of an external vtable for example. */
3356 cgraph_get_create_node (fn);
3358 return fn;
3361 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3362 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3363 KNOWN_BINFO carries the binfo describing the true type of
3364 OBJ_TYPE_REF_OBJECT(REF).
3365 Set CAN_REFER if non-NULL to false if method
3366 is not referable or if the virtual table is ill-formed (such as rewriten
3367 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3369 tree
3370 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
3371 bool *can_refer)
3373 unsigned HOST_WIDE_INT offset;
3374 tree v;
3376 v = BINFO_VTABLE (known_binfo);
3377 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3378 if (!v)
3379 return NULL_TREE;
3381 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
3383 if (can_refer)
3384 *can_refer = false;
3385 return NULL_TREE;
3387 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
3390 /* Return true iff VAL is a gimple expression that is known to be
3391 non-negative. Restricted to floating-point inputs. */
3393 bool
3394 gimple_val_nonnegative_real_p (tree val)
3396 gimple def_stmt;
3398 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3400 /* Use existing logic for non-gimple trees. */
3401 if (tree_expr_nonnegative_p (val))
3402 return true;
3404 if (TREE_CODE (val) != SSA_NAME)
3405 return false;
3407 /* Currently we look only at the immediately defining statement
3408 to make this determination, since recursion on defining
3409 statements of operands can lead to quadratic behavior in the
3410 worst case. This is expected to catch almost all occurrences
3411 in practice. It would be possible to implement limited-depth
3412 recursion if important cases are lost. Alternatively, passes
3413 that need this information (such as the pow/powi lowering code
3414 in the cse_sincos pass) could be revised to provide it through
3415 dataflow propagation. */
3417 def_stmt = SSA_NAME_DEF_STMT (val);
3419 if (is_gimple_assign (def_stmt))
3421 tree op0, op1;
3423 /* See fold-const.c:tree_expr_nonnegative_p for additional
3424 cases that could be handled with recursion. */
3426 switch (gimple_assign_rhs_code (def_stmt))
3428 case ABS_EXPR:
3429 /* Always true for floating-point operands. */
3430 return true;
3432 case MULT_EXPR:
3433 /* True if the two operands are identical (since we are
3434 restricted to floating-point inputs). */
3435 op0 = gimple_assign_rhs1 (def_stmt);
3436 op1 = gimple_assign_rhs2 (def_stmt);
3438 if (op0 == op1
3439 || operand_equal_p (op0, op1, 0))
3440 return true;
3442 default:
3443 return false;
3446 else if (is_gimple_call (def_stmt))
3448 tree fndecl = gimple_call_fndecl (def_stmt);
3449 if (fndecl
3450 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3452 tree arg1;
3454 switch (DECL_FUNCTION_CODE (fndecl))
3456 CASE_FLT_FN (BUILT_IN_ACOS):
3457 CASE_FLT_FN (BUILT_IN_ACOSH):
3458 CASE_FLT_FN (BUILT_IN_CABS):
3459 CASE_FLT_FN (BUILT_IN_COSH):
3460 CASE_FLT_FN (BUILT_IN_ERFC):
3461 CASE_FLT_FN (BUILT_IN_EXP):
3462 CASE_FLT_FN (BUILT_IN_EXP10):
3463 CASE_FLT_FN (BUILT_IN_EXP2):
3464 CASE_FLT_FN (BUILT_IN_FABS):
3465 CASE_FLT_FN (BUILT_IN_FDIM):
3466 CASE_FLT_FN (BUILT_IN_HYPOT):
3467 CASE_FLT_FN (BUILT_IN_POW10):
3468 return true;
3470 CASE_FLT_FN (BUILT_IN_SQRT):
3471 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3472 nonnegative inputs. */
3473 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3474 return true;
3476 break;
3478 CASE_FLT_FN (BUILT_IN_POWI):
3479 /* True if the second argument is an even integer. */
3480 arg1 = gimple_call_arg (def_stmt, 1);
3482 if (TREE_CODE (arg1) == INTEGER_CST
3483 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3484 return true;
3486 break;
3488 CASE_FLT_FN (BUILT_IN_POW):
3489 /* True if the second argument is an even integer-valued
3490 real. */
3491 arg1 = gimple_call_arg (def_stmt, 1);
3493 if (TREE_CODE (arg1) == REAL_CST)
3495 REAL_VALUE_TYPE c;
3496 HOST_WIDE_INT n;
3498 c = TREE_REAL_CST (arg1);
3499 n = real_to_integer (&c);
3501 if ((n & 1) == 0)
3503 REAL_VALUE_TYPE cint;
3504 real_from_integer (&cint, VOIDmode, n, SIGNED);
3505 if (real_identical (&c, &cint))
3506 return true;
3510 break;
3512 default:
3513 return false;
3518 return false;
3521 /* Given a pointer value OP0, return a simplified version of an
3522 indirection through OP0, or NULL_TREE if no simplification is
3523 possible. Note that the resulting type may be different from
3524 the type pointed to in the sense that it is still compatible
3525 from the langhooks point of view. */
3527 tree
3528 gimple_fold_indirect_ref (tree t)
3530 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3531 tree sub = t;
3532 tree subtype;
3534 STRIP_NOPS (sub);
3535 subtype = TREE_TYPE (sub);
3536 if (!POINTER_TYPE_P (subtype))
3537 return NULL_TREE;
3539 if (TREE_CODE (sub) == ADDR_EXPR)
3541 tree op = TREE_OPERAND (sub, 0);
3542 tree optype = TREE_TYPE (op);
3543 /* *&p => p */
3544 if (useless_type_conversion_p (type, optype))
3545 return op;
3547 /* *(foo *)&fooarray => fooarray[0] */
3548 if (TREE_CODE (optype) == ARRAY_TYPE
3549 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3550 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3552 tree type_domain = TYPE_DOMAIN (optype);
3553 tree min_val = size_zero_node;
3554 if (type_domain && TYPE_MIN_VALUE (type_domain))
3555 min_val = TYPE_MIN_VALUE (type_domain);
3556 if (TREE_CODE (min_val) == INTEGER_CST)
3557 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3559 /* *(foo *)&complexfoo => __real__ complexfoo */
3560 else if (TREE_CODE (optype) == COMPLEX_TYPE
3561 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3562 return fold_build1 (REALPART_EXPR, type, op);
3563 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3564 else if (TREE_CODE (optype) == VECTOR_TYPE
3565 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3567 tree part_width = TYPE_SIZE (type);
3568 tree index = bitsize_int (0);
3569 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3573 /* *(p + CST) -> ... */
3574 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3575 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3577 tree addr = TREE_OPERAND (sub, 0);
3578 tree off = TREE_OPERAND (sub, 1);
3579 tree addrtype;
3581 STRIP_NOPS (addr);
3582 addrtype = TREE_TYPE (addr);
3584 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3585 if (TREE_CODE (addr) == ADDR_EXPR
3586 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3587 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3588 && tree_fits_uhwi_p (off))
3590 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
3591 tree part_width = TYPE_SIZE (type);
3592 unsigned HOST_WIDE_INT part_widthi
3593 = tree_to_shwi (part_width) / BITS_PER_UNIT;
3594 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3595 tree index = bitsize_int (indexi);
3596 if (offset / part_widthi
3597 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3598 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3599 part_width, index);
3602 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3603 if (TREE_CODE (addr) == ADDR_EXPR
3604 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3605 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3607 tree size = TYPE_SIZE_UNIT (type);
3608 if (tree_int_cst_equal (size, off))
3609 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3612 /* *(p + CST) -> MEM_REF <p, CST>. */
3613 if (TREE_CODE (addr) != ADDR_EXPR
3614 || DECL_P (TREE_OPERAND (addr, 0)))
3615 return fold_build2 (MEM_REF, type,
3616 addr,
3617 wide_int_to_tree (ptype, off));
3620 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3621 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3622 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3623 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3625 tree type_domain;
3626 tree min_val = size_zero_node;
3627 tree osub = sub;
3628 sub = gimple_fold_indirect_ref (sub);
3629 if (! sub)
3630 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3631 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3632 if (type_domain && TYPE_MIN_VALUE (type_domain))
3633 min_val = TYPE_MIN_VALUE (type_domain);
3634 if (TREE_CODE (min_val) == INTEGER_CST)
3635 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3638 return NULL_TREE;
3641 /* Return true if CODE is an operation that when operating on signed
3642 integer types involves undefined behavior on overflow and the
3643 operation can be expressed with unsigned arithmetic. */
3645 bool
3646 arith_code_with_undefined_signed_overflow (tree_code code)
3648 switch (code)
3650 case PLUS_EXPR:
3651 case MINUS_EXPR:
3652 case MULT_EXPR:
3653 case NEGATE_EXPR:
3654 case POINTER_PLUS_EXPR:
3655 return true;
3656 default:
3657 return false;
3661 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3662 operation that can be transformed to unsigned arithmetic by converting
3663 its operand, carrying out the operation in the corresponding unsigned
3664 type and converting the result back to the original type.
3666 Returns a sequence of statements that replace STMT and also contain
3667 a modified form of STMT itself. */
3669 gimple_seq
3670 rewrite_to_defined_overflow (gimple stmt)
3672 if (dump_file && (dump_flags & TDF_DETAILS))
3674 fprintf (dump_file, "rewriting stmt with undefined signed "
3675 "overflow ");
3676 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3679 tree lhs = gimple_assign_lhs (stmt);
3680 tree type = unsigned_type_for (TREE_TYPE (lhs));
3681 gimple_seq stmts = NULL;
3682 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
3684 gimple_seq stmts2 = NULL;
3685 gimple_set_op (stmt, i,
3686 force_gimple_operand (fold_convert (type,
3687 gimple_op (stmt, i)),
3688 &stmts2, true, NULL_TREE));
3689 gimple_seq_add_seq (&stmts, stmts2);
3691 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
3692 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3693 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
3694 gimple_seq_add_stmt (&stmts, stmt);
3695 gimple cvt = gimple_build_assign_with_ops
3696 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
3697 gimple_seq_add_stmt (&stmts, cvt);
3699 return stmts;