Daily bump.
[official-gcc.git] / gcc / gimple-fold.c
blob2527d292aef778639dfe11b961105f2d45250991
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "dumpfile.h"
33 #include "bitmap.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "tree-dfa.h"
47 #include "tree-ssa.h"
48 #include "tree-ssa-propagate.h"
49 #include "target.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
56 /* Return true when DECL can be referenced from current unit.
57 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
58 We can get declarations that are not possible to reference for various
59 reasons:
61 1) When analyzing C++ virtual tables.
62 C++ virtual tables do have known constructors even
63 when they are keyed to other compilation unit.
64 Those tables can contain pointers to methods and vars
65 in other units. Those methods have both STATIC and EXTERNAL
66 set.
67 2) In WHOPR mode devirtualization might lead to reference
68 to method that was partitioned elsehwere.
69 In this case we have static VAR_DECL or FUNCTION_DECL
70 that has no corresponding callgraph/varpool node
71 declaring the body.
72 3) COMDAT functions referred by external vtables that
73 we devirtualize only during final compilation stage.
74 At this time we already decided that we will not output
75 the function body and thus we can't reference the symbol
76 directly. */
78 static bool
79 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
81 varpool_node *vnode;
82 struct cgraph_node *node;
83 symtab_node *snode;
85 if (DECL_ABSTRACT (decl))
86 return false;
88 /* We are concerned only about static/external vars and functions. */
89 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
90 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
91 return true;
93 /* Static objects can be referred only if they was not optimized out yet. */
94 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
96 snode = symtab_get_node (decl);
97 if (!snode)
98 return false;
99 node = dyn_cast <cgraph_node> (snode);
100 return !node || !node->global.inlined_to;
103 /* We will later output the initializer, so we can refer to it.
104 So we are concerned only when DECL comes from initializer of
105 external var. */
106 if (!from_decl
107 || TREE_CODE (from_decl) != VAR_DECL
108 || !DECL_EXTERNAL (from_decl)
109 || (flag_ltrans
110 && symtab_get_node (from_decl)->in_other_partition))
111 return true;
112 /* We are folding reference from external vtable. The vtable may reffer
113 to a symbol keyed to other compilation unit. The other compilation
114 unit may be in separate DSO and the symbol may be hidden. */
115 if (DECL_VISIBILITY_SPECIFIED (decl)
116 && DECL_EXTERNAL (decl)
117 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
118 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
119 return false;
120 /* When function is public, we always can introduce new reference.
121 Exception are the COMDAT functions where introducing a direct
122 reference imply need to include function body in the curren tunit. */
123 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
124 return true;
125 /* We are not at ltrans stage; so don't worry about WHOPR.
126 Also when still gimplifying all referred comdat functions will be
127 produced.
129 As observed in PR20991 for already optimized out comdat virtual functions
130 it may be tempting to not necessarily give up because the copy will be
131 output elsewhere when corresponding vtable is output.
132 This is however not possible - ABI specify that COMDATs are output in
133 units where they are used and when the other unit was compiled with LTO
134 it is possible that vtable was kept public while the function itself
135 was privatized. */
136 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
137 return true;
139 /* OK we are seeing either COMDAT or static variable. In this case we must
140 check that the definition is still around so we can refer it. */
141 if (TREE_CODE (decl) == FUNCTION_DECL)
143 node = cgraph_get_node (decl);
144 /* Check that we still have function body and that we didn't took
145 the decision to eliminate offline copy of the function yet.
146 The second is important when devirtualization happens during final
147 compilation stage when making a new reference no longer makes callee
148 to be compiled. */
149 if (!node || !node->definition
150 || DECL_EXTERNAL (decl) || node->global.inlined_to)
152 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
153 return false;
156 else if (TREE_CODE (decl) == VAR_DECL)
158 vnode = varpool_get_node (decl);
159 if (!vnode || !vnode->definition)
161 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
162 return false;
165 return true;
168 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
169 acceptable form for is_gimple_min_invariant.
170 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
172 tree
173 canonicalize_constructor_val (tree cval, tree from_decl)
175 tree orig_cval = cval;
176 STRIP_NOPS (cval);
177 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
178 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
180 tree ptr = TREE_OPERAND (cval, 0);
181 if (is_gimple_min_invariant (ptr))
182 cval = build1_loc (EXPR_LOCATION (cval),
183 ADDR_EXPR, TREE_TYPE (ptr),
184 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
185 ptr,
186 fold_convert (ptr_type_node,
187 TREE_OPERAND (cval, 1))));
189 if (TREE_CODE (cval) == ADDR_EXPR)
191 tree base = NULL_TREE;
192 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
194 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
195 if (base)
196 TREE_OPERAND (cval, 0) = base;
198 else
199 base = get_base_address (TREE_OPERAND (cval, 0));
200 if (!base)
201 return NULL_TREE;
203 if ((TREE_CODE (base) == VAR_DECL
204 || TREE_CODE (base) == FUNCTION_DECL)
205 && !can_refer_decl_in_current_unit_p (base, from_decl))
206 return NULL_TREE;
207 if (TREE_CODE (base) == VAR_DECL)
208 TREE_ADDRESSABLE (base) = 1;
209 else if (TREE_CODE (base) == FUNCTION_DECL)
211 /* Make sure we create a cgraph node for functions we'll reference.
212 They can be non-existent if the reference comes from an entry
213 of an external vtable for example. */
214 cgraph_get_create_node (base);
216 /* Fixup types in global initializers. */
217 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
218 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
220 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
221 cval = fold_convert (TREE_TYPE (orig_cval), cval);
222 return cval;
224 if (TREE_OVERFLOW_P (cval))
225 return drop_tree_overflow (cval);
226 return orig_cval;
229 /* If SYM is a constant variable with known value, return the value.
230 NULL_TREE is returned otherwise. */
232 tree
233 get_symbol_constant_value (tree sym)
235 tree val = ctor_for_folding (sym);
236 if (val != error_mark_node)
238 if (val)
240 val = canonicalize_constructor_val (unshare_expr (val), sym);
241 if (val && is_gimple_min_invariant (val))
242 return val;
243 else
244 return NULL_TREE;
246 /* Variables declared 'const' without an initializer
247 have zero as the initializer if they may not be
248 overridden at link or run time. */
249 if (!val
250 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
251 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
252 return build_zero_cst (TREE_TYPE (sym));
255 return NULL_TREE;
260 /* Subroutine of fold_stmt. We perform several simplifications of the
261 memory reference tree EXPR and make sure to re-gimplify them properly
262 after propagation of constant addresses. IS_LHS is true if the
263 reference is supposed to be an lvalue. */
265 static tree
266 maybe_fold_reference (tree expr, bool is_lhs)
268 tree *t = &expr;
269 tree result;
271 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
272 || TREE_CODE (expr) == REALPART_EXPR
273 || TREE_CODE (expr) == IMAGPART_EXPR)
274 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
275 return fold_unary_loc (EXPR_LOCATION (expr),
276 TREE_CODE (expr),
277 TREE_TYPE (expr),
278 TREE_OPERAND (expr, 0));
279 else if (TREE_CODE (expr) == BIT_FIELD_REF
280 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
281 return fold_ternary_loc (EXPR_LOCATION (expr),
282 TREE_CODE (expr),
283 TREE_TYPE (expr),
284 TREE_OPERAND (expr, 0),
285 TREE_OPERAND (expr, 1),
286 TREE_OPERAND (expr, 2));
288 while (handled_component_p (*t))
289 t = &TREE_OPERAND (*t, 0);
291 /* Canonicalize MEM_REFs invariant address operand. Do this first
292 to avoid feeding non-canonical MEM_REFs elsewhere. */
293 if (TREE_CODE (*t) == MEM_REF
294 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
296 bool volatile_p = TREE_THIS_VOLATILE (*t);
297 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
298 TREE_OPERAND (*t, 0),
299 TREE_OPERAND (*t, 1));
300 if (tem)
302 TREE_THIS_VOLATILE (tem) = volatile_p;
303 *t = tem;
304 tem = maybe_fold_reference (expr, is_lhs);
305 if (tem)
306 return tem;
307 return expr;
311 if (!is_lhs
312 && (result = fold_const_aggregate_ref (expr))
313 && is_gimple_min_invariant (result))
314 return result;
316 /* Fold back MEM_REFs to reference trees. */
317 if (TREE_CODE (*t) == MEM_REF
318 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
319 && integer_zerop (TREE_OPERAND (*t, 1))
320 && (TREE_THIS_VOLATILE (*t)
321 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
322 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
323 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
324 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
325 /* We have to look out here to not drop a required conversion
326 from the rhs to the lhs if is_lhs, but we don't have the
327 rhs here to verify that. Thus require strict type
328 compatibility. */
329 && types_compatible_p (TREE_TYPE (*t),
330 TREE_TYPE (TREE_OPERAND
331 (TREE_OPERAND (*t, 0), 0))))
333 tree tem;
334 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
335 tem = maybe_fold_reference (expr, is_lhs);
336 if (tem)
337 return tem;
338 return expr;
340 else if (TREE_CODE (*t) == TARGET_MEM_REF)
342 tree tem = maybe_fold_tmr (*t);
343 if (tem)
345 *t = tem;
346 tem = maybe_fold_reference (expr, is_lhs);
347 if (tem)
348 return tem;
349 return expr;
353 return NULL_TREE;
357 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
358 replacement rhs for the statement or NULL_TREE if no simplification
359 could be made. It is assumed that the operands have been previously
360 folded. */
362 static tree
363 fold_gimple_assign (gimple_stmt_iterator *si)
365 gimple stmt = gsi_stmt (*si);
366 enum tree_code subcode = gimple_assign_rhs_code (stmt);
367 location_t loc = gimple_location (stmt);
369 tree result = NULL_TREE;
371 switch (get_gimple_rhs_class (subcode))
373 case GIMPLE_SINGLE_RHS:
375 tree rhs = gimple_assign_rhs1 (stmt);
377 if (REFERENCE_CLASS_P (rhs))
378 return maybe_fold_reference (rhs, false);
380 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
382 tree val = OBJ_TYPE_REF_EXPR (rhs);
383 if (is_gimple_min_invariant (val))
384 return val;
385 else if (flag_devirtualize && virtual_method_call_p (val))
387 bool final;
388 vec <cgraph_node *>targets
389 = possible_polymorphic_call_targets (val, &final);
390 if (final && targets.length () <= 1)
392 tree fndecl;
393 if (targets.length () == 1)
394 fndecl = targets[0]->decl;
395 else
396 fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
397 val = fold_convert (TREE_TYPE (val), fndecl);
398 STRIP_USELESS_TYPE_CONVERSION (val);
399 return val;
404 else if (TREE_CODE (rhs) == ADDR_EXPR)
406 tree ref = TREE_OPERAND (rhs, 0);
407 tree tem = maybe_fold_reference (ref, true);
408 if (tem
409 && TREE_CODE (tem) == MEM_REF
410 && integer_zerop (TREE_OPERAND (tem, 1)))
411 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
412 else if (tem)
413 result = fold_convert (TREE_TYPE (rhs),
414 build_fold_addr_expr_loc (loc, tem));
415 else if (TREE_CODE (ref) == MEM_REF
416 && integer_zerop (TREE_OPERAND (ref, 1)))
417 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
420 else if (TREE_CODE (rhs) == CONSTRUCTOR
421 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
422 && (CONSTRUCTOR_NELTS (rhs)
423 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
425 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
426 unsigned i;
427 tree val;
429 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
430 if (TREE_CODE (val) != INTEGER_CST
431 && TREE_CODE (val) != REAL_CST
432 && TREE_CODE (val) != FIXED_CST)
433 return NULL_TREE;
435 return build_vector_from_ctor (TREE_TYPE (rhs),
436 CONSTRUCTOR_ELTS (rhs));
439 else if (DECL_P (rhs))
440 return get_symbol_constant_value (rhs);
442 /* If we couldn't fold the RHS, hand over to the generic
443 fold routines. */
444 if (result == NULL_TREE)
445 result = fold (rhs);
447 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
448 that may have been added by fold, and "useless" type
449 conversions that might now be apparent due to propagation. */
450 STRIP_USELESS_TYPE_CONVERSION (result);
452 if (result != rhs && valid_gimple_rhs_p (result))
453 return result;
455 return NULL_TREE;
457 break;
459 case GIMPLE_UNARY_RHS:
461 tree rhs = gimple_assign_rhs1 (stmt);
463 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
464 if (result)
466 /* If the operation was a conversion do _not_ mark a
467 resulting constant with TREE_OVERFLOW if the original
468 constant was not. These conversions have implementation
469 defined behavior and retaining the TREE_OVERFLOW flag
470 here would confuse later passes such as VRP. */
471 if (CONVERT_EXPR_CODE_P (subcode)
472 && TREE_CODE (result) == INTEGER_CST
473 && TREE_CODE (rhs) == INTEGER_CST)
474 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
476 STRIP_USELESS_TYPE_CONVERSION (result);
477 if (valid_gimple_rhs_p (result))
478 return result;
481 break;
483 case GIMPLE_BINARY_RHS:
484 /* Try to canonicalize for boolean-typed X the comparisons
485 X == 0, X == 1, X != 0, and X != 1. */
486 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
487 || gimple_assign_rhs_code (stmt) == NE_EXPR)
489 tree lhs = gimple_assign_lhs (stmt);
490 tree op1 = gimple_assign_rhs1 (stmt);
491 tree op2 = gimple_assign_rhs2 (stmt);
492 tree type = TREE_TYPE (op1);
494 /* Check whether the comparison operands are of the same boolean
495 type as the result type is.
496 Check that second operand is an integer-constant with value
497 one or zero. */
498 if (TREE_CODE (op2) == INTEGER_CST
499 && (integer_zerop (op2) || integer_onep (op2))
500 && useless_type_conversion_p (TREE_TYPE (lhs), type))
502 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
503 bool is_logical_not = false;
505 /* X == 0 and X != 1 is a logical-not.of X
506 X == 1 and X != 0 is X */
507 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
508 || (cmp_code == NE_EXPR && integer_onep (op2)))
509 is_logical_not = true;
511 if (is_logical_not == false)
512 result = op1;
513 /* Only for one-bit precision typed X the transformation
514 !X -> ~X is valied. */
515 else if (TYPE_PRECISION (type) == 1)
516 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
517 type, op1);
518 /* Otherwise we use !X -> X ^ 1. */
519 else
520 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
521 type, op1, build_int_cst (type, 1));
526 if (!result)
527 result = fold_binary_loc (loc, subcode,
528 TREE_TYPE (gimple_assign_lhs (stmt)),
529 gimple_assign_rhs1 (stmt),
530 gimple_assign_rhs2 (stmt));
532 if (result)
534 STRIP_USELESS_TYPE_CONVERSION (result);
535 if (valid_gimple_rhs_p (result))
536 return result;
538 break;
540 case GIMPLE_TERNARY_RHS:
541 /* Try to fold a conditional expression. */
542 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
544 tree op0 = gimple_assign_rhs1 (stmt);
545 tree tem;
546 bool set = false;
547 location_t cond_loc = gimple_location (stmt);
549 if (COMPARISON_CLASS_P (op0))
551 fold_defer_overflow_warnings ();
552 tem = fold_binary_loc (cond_loc,
553 TREE_CODE (op0), TREE_TYPE (op0),
554 TREE_OPERAND (op0, 0),
555 TREE_OPERAND (op0, 1));
556 /* This is actually a conditional expression, not a GIMPLE
557 conditional statement, however, the valid_gimple_rhs_p
558 test still applies. */
559 set = (tem && is_gimple_condexpr (tem)
560 && valid_gimple_rhs_p (tem));
561 fold_undefer_overflow_warnings (set, stmt, 0);
563 else if (is_gimple_min_invariant (op0))
565 tem = op0;
566 set = true;
568 else
569 return NULL_TREE;
571 if (set)
572 result = fold_build3_loc (cond_loc, COND_EXPR,
573 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
574 gimple_assign_rhs2 (stmt),
575 gimple_assign_rhs3 (stmt));
578 if (!result)
579 result = fold_ternary_loc (loc, subcode,
580 TREE_TYPE (gimple_assign_lhs (stmt)),
581 gimple_assign_rhs1 (stmt),
582 gimple_assign_rhs2 (stmt),
583 gimple_assign_rhs3 (stmt));
585 if (result)
587 STRIP_USELESS_TYPE_CONVERSION (result);
588 if (valid_gimple_rhs_p (result))
589 return result;
591 break;
593 case GIMPLE_INVALID_RHS:
594 gcc_unreachable ();
597 return NULL_TREE;
600 /* Attempt to fold a conditional statement. Return true if any changes were
601 made. We only attempt to fold the condition expression, and do not perform
602 any transformation that would require alteration of the cfg. It is
603 assumed that the operands have been previously folded. */
605 static bool
606 fold_gimple_cond (gimple stmt)
608 tree result = fold_binary_loc (gimple_location (stmt),
609 gimple_cond_code (stmt),
610 boolean_type_node,
611 gimple_cond_lhs (stmt),
612 gimple_cond_rhs (stmt));
614 if (result)
616 STRIP_USELESS_TYPE_CONVERSION (result);
617 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
619 gimple_cond_set_condition_from_tree (stmt, result);
620 return true;
624 return false;
627 /* Convert EXPR into a GIMPLE value suitable for substitution on the
628 RHS of an assignment. Insert the necessary statements before
629 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
630 is replaced. If the call is expected to produces a result, then it
631 is replaced by an assignment of the new RHS to the result variable.
632 If the result is to be ignored, then the call is replaced by a
633 GIMPLE_NOP. A proper VDEF chain is retained by making the first
634 VUSE and the last VDEF of the whole sequence be the same as the replaced
635 statement and using new SSA names for stores in between. */
637 void
638 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
640 tree lhs;
641 gimple stmt, new_stmt;
642 gimple_stmt_iterator i;
643 gimple_seq stmts = NULL;
644 gimple laststore;
645 tree reaching_vuse;
647 stmt = gsi_stmt (*si_p);
649 gcc_assert (is_gimple_call (stmt));
651 push_gimplify_context (gimple_in_ssa_p (cfun));
653 lhs = gimple_call_lhs (stmt);
654 if (lhs == NULL_TREE)
656 gimplify_and_add (expr, &stmts);
657 /* We can end up with folding a memcpy of an empty class assignment
658 which gets optimized away by C++ gimplification. */
659 if (gimple_seq_empty_p (stmts))
661 pop_gimplify_context (NULL);
662 if (gimple_in_ssa_p (cfun))
664 unlink_stmt_vdef (stmt);
665 release_defs (stmt);
667 gsi_replace (si_p, gimple_build_nop (), true);
668 return;
671 else
673 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
674 new_stmt = gimple_build_assign (lhs, tmp);
675 i = gsi_last (stmts);
676 gsi_insert_after_without_update (&i, new_stmt,
677 GSI_CONTINUE_LINKING);
680 pop_gimplify_context (NULL);
682 if (gimple_has_location (stmt))
683 annotate_all_with_location (stmts, gimple_location (stmt));
685 /* First iterate over the replacement statements backward, assigning
686 virtual operands to their defining statements. */
687 laststore = NULL;
688 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
690 new_stmt = gsi_stmt (i);
691 if ((gimple_assign_single_p (new_stmt)
692 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
693 || (is_gimple_call (new_stmt)
694 && (gimple_call_flags (new_stmt)
695 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
697 tree vdef;
698 if (!laststore)
699 vdef = gimple_vdef (stmt);
700 else
701 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
702 gimple_set_vdef (new_stmt, vdef);
703 if (vdef && TREE_CODE (vdef) == SSA_NAME)
704 SSA_NAME_DEF_STMT (vdef) = new_stmt;
705 laststore = new_stmt;
709 /* Second iterate over the statements forward, assigning virtual
710 operands to their uses. */
711 reaching_vuse = gimple_vuse (stmt);
712 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
714 new_stmt = gsi_stmt (i);
715 /* If the new statement possibly has a VUSE, update it with exact SSA
716 name we know will reach this one. */
717 if (gimple_has_mem_ops (new_stmt))
718 gimple_set_vuse (new_stmt, reaching_vuse);
719 gimple_set_modified (new_stmt, true);
720 if (gimple_vdef (new_stmt))
721 reaching_vuse = gimple_vdef (new_stmt);
724 /* If the new sequence does not do a store release the virtual
725 definition of the original statement. */
726 if (reaching_vuse
727 && reaching_vuse == gimple_vuse (stmt))
729 tree vdef = gimple_vdef (stmt);
730 if (vdef
731 && TREE_CODE (vdef) == SSA_NAME)
733 unlink_stmt_vdef (stmt);
734 release_ssa_name (vdef);
738 /* Finally replace the original statement with the sequence. */
739 gsi_replace_with_seq (si_p, stmts, false);
742 /* Return the string length, maximum string length or maximum value of
743 ARG in LENGTH.
744 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
745 is not NULL and, for TYPE == 0, its value is not equal to the length
746 we determine or if we are unable to determine the length or value,
747 return false. VISITED is a bitmap of visited variables.
748 TYPE is 0 if string length should be returned, 1 for maximum string
749 length and 2 for maximum value ARG can have. */
751 static bool
752 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
754 tree var, val;
755 gimple def_stmt;
757 if (TREE_CODE (arg) != SSA_NAME)
759 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
760 if (TREE_CODE (arg) == ADDR_EXPR
761 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
762 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
764 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
765 if (TREE_CODE (aop0) == INDIRECT_REF
766 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
767 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
768 length, visited, type);
771 if (type == 2)
773 val = arg;
774 if (TREE_CODE (val) != INTEGER_CST
775 || tree_int_cst_sgn (val) < 0)
776 return false;
778 else
779 val = c_strlen (arg, 1);
780 if (!val)
781 return false;
783 if (*length)
785 if (type > 0)
787 if (TREE_CODE (*length) != INTEGER_CST
788 || TREE_CODE (val) != INTEGER_CST)
789 return false;
791 if (tree_int_cst_lt (*length, val))
792 *length = val;
793 return true;
795 else if (simple_cst_equal (val, *length) != 1)
796 return false;
799 *length = val;
800 return true;
803 /* If ARG is registered for SSA update we cannot look at its defining
804 statement. */
805 if (name_registered_for_update_p (arg))
806 return false;
808 /* If we were already here, break the infinite cycle. */
809 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
810 return true;
812 var = arg;
813 def_stmt = SSA_NAME_DEF_STMT (var);
815 switch (gimple_code (def_stmt))
817 case GIMPLE_ASSIGN:
818 /* The RHS of the statement defining VAR must either have a
819 constant length or come from another SSA_NAME with a constant
820 length. */
821 if (gimple_assign_single_p (def_stmt)
822 || gimple_assign_unary_nop_p (def_stmt))
824 tree rhs = gimple_assign_rhs1 (def_stmt);
825 return get_maxval_strlen (rhs, length, visited, type);
827 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
829 tree op2 = gimple_assign_rhs2 (def_stmt);
830 tree op3 = gimple_assign_rhs3 (def_stmt);
831 return get_maxval_strlen (op2, length, visited, type)
832 && get_maxval_strlen (op3, length, visited, type);
834 return false;
836 case GIMPLE_PHI:
838 /* All the arguments of the PHI node must have the same constant
839 length. */
840 unsigned i;
842 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
844 tree arg = gimple_phi_arg (def_stmt, i)->def;
846 /* If this PHI has itself as an argument, we cannot
847 determine the string length of this argument. However,
848 if we can find a constant string length for the other
849 PHI args then we can still be sure that this is a
850 constant string length. So be optimistic and just
851 continue with the next argument. */
852 if (arg == gimple_phi_result (def_stmt))
853 continue;
855 if (!get_maxval_strlen (arg, length, visited, type))
856 return false;
859 return true;
861 default:
862 return false;
867 /* Fold builtin call in statement STMT. Returns a simplified tree.
868 We may return a non-constant expression, including another call
869 to a different function and with different arguments, e.g.,
870 substituting memcpy for strcpy when the string length is known.
871 Note that some builtins expand into inline code that may not
872 be valid in GIMPLE. Callers must take care. */
874 tree
875 gimple_fold_builtin (gimple stmt)
877 tree result, val[3];
878 tree callee, a;
879 int arg_idx, type;
880 bitmap visited;
881 bool ignore;
882 int nargs;
883 location_t loc = gimple_location (stmt);
885 ignore = (gimple_call_lhs (stmt) == NULL);
887 /* First try the generic builtin folder. If that succeeds, return the
888 result directly. */
889 result = fold_call_stmt (stmt, ignore);
890 if (result)
892 if (ignore)
893 STRIP_NOPS (result);
894 else
895 result = fold_convert (gimple_call_return_type (stmt), result);
896 return result;
899 /* Ignore MD builtins. */
900 callee = gimple_call_fndecl (stmt);
901 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
902 return NULL_TREE;
904 /* Give up for always_inline inline builtins until they are
905 inlined. */
906 if (avoid_folding_inline_builtin (callee))
907 return NULL_TREE;
909 /* If the builtin could not be folded, and it has no argument list,
910 we're done. */
911 nargs = gimple_call_num_args (stmt);
912 if (nargs == 0)
913 return NULL_TREE;
915 /* Limit the work only for builtins we know how to simplify. */
916 switch (DECL_FUNCTION_CODE (callee))
918 case BUILT_IN_STRLEN:
919 case BUILT_IN_FPUTS:
920 case BUILT_IN_FPUTS_UNLOCKED:
921 arg_idx = 0;
922 type = 0;
923 break;
924 case BUILT_IN_STRCPY:
925 case BUILT_IN_STRNCPY:
926 case BUILT_IN_STRCAT:
927 arg_idx = 1;
928 type = 0;
929 break;
930 case BUILT_IN_MEMCPY_CHK:
931 case BUILT_IN_MEMPCPY_CHK:
932 case BUILT_IN_MEMMOVE_CHK:
933 case BUILT_IN_MEMSET_CHK:
934 case BUILT_IN_STRNCPY_CHK:
935 case BUILT_IN_STPNCPY_CHK:
936 arg_idx = 2;
937 type = 2;
938 break;
939 case BUILT_IN_STRCPY_CHK:
940 case BUILT_IN_STPCPY_CHK:
941 arg_idx = 1;
942 type = 1;
943 break;
944 case BUILT_IN_SNPRINTF_CHK:
945 case BUILT_IN_VSNPRINTF_CHK:
946 arg_idx = 1;
947 type = 2;
948 break;
949 default:
950 return NULL_TREE;
953 if (arg_idx >= nargs)
954 return NULL_TREE;
956 /* Try to use the dataflow information gathered by the CCP process. */
957 visited = BITMAP_ALLOC (NULL);
958 bitmap_clear (visited);
960 memset (val, 0, sizeof (val));
961 a = gimple_call_arg (stmt, arg_idx);
962 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
963 val[arg_idx] = NULL_TREE;
965 BITMAP_FREE (visited);
967 result = NULL_TREE;
968 switch (DECL_FUNCTION_CODE (callee))
970 case BUILT_IN_STRLEN:
971 if (val[0] && nargs == 1)
973 tree new_val =
974 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
976 /* If the result is not a valid gimple value, or not a cast
977 of a valid gimple value, then we cannot use the result. */
978 if (is_gimple_val (new_val)
979 || (CONVERT_EXPR_P (new_val)
980 && is_gimple_val (TREE_OPERAND (new_val, 0))))
981 return new_val;
983 break;
985 case BUILT_IN_STRCPY:
986 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
987 result = fold_builtin_strcpy (loc, callee,
988 gimple_call_arg (stmt, 0),
989 gimple_call_arg (stmt, 1),
990 val[1]);
991 break;
993 case BUILT_IN_STRNCPY:
994 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
995 result = fold_builtin_strncpy (loc, callee,
996 gimple_call_arg (stmt, 0),
997 gimple_call_arg (stmt, 1),
998 gimple_call_arg (stmt, 2),
999 val[1]);
1000 break;
1002 case BUILT_IN_STRCAT:
1003 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1004 result = fold_builtin_strcat (loc, gimple_call_arg (stmt, 0),
1005 gimple_call_arg (stmt, 1),
1006 val[1]);
1007 break;
1009 case BUILT_IN_FPUTS:
1010 if (nargs == 2)
1011 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1012 gimple_call_arg (stmt, 1),
1013 ignore, false, val[0]);
1014 break;
1016 case BUILT_IN_FPUTS_UNLOCKED:
1017 if (nargs == 2)
1018 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1019 gimple_call_arg (stmt, 1),
1020 ignore, true, val[0]);
1021 break;
1023 case BUILT_IN_MEMCPY_CHK:
1024 case BUILT_IN_MEMPCPY_CHK:
1025 case BUILT_IN_MEMMOVE_CHK:
1026 case BUILT_IN_MEMSET_CHK:
1027 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1028 result = fold_builtin_memory_chk (loc, callee,
1029 gimple_call_arg (stmt, 0),
1030 gimple_call_arg (stmt, 1),
1031 gimple_call_arg (stmt, 2),
1032 gimple_call_arg (stmt, 3),
1033 val[2], ignore,
1034 DECL_FUNCTION_CODE (callee));
1035 break;
1037 case BUILT_IN_STRCPY_CHK:
1038 case BUILT_IN_STPCPY_CHK:
1039 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1040 result = fold_builtin_stxcpy_chk (loc, callee,
1041 gimple_call_arg (stmt, 0),
1042 gimple_call_arg (stmt, 1),
1043 gimple_call_arg (stmt, 2),
1044 val[1], ignore,
1045 DECL_FUNCTION_CODE (callee));
1046 break;
1048 case BUILT_IN_STRNCPY_CHK:
1049 case BUILT_IN_STPNCPY_CHK:
1050 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1051 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1052 gimple_call_arg (stmt, 1),
1053 gimple_call_arg (stmt, 2),
1054 gimple_call_arg (stmt, 3),
1055 val[2], ignore,
1056 DECL_FUNCTION_CODE (callee));
1057 break;
1059 case BUILT_IN_SNPRINTF_CHK:
1060 case BUILT_IN_VSNPRINTF_CHK:
1061 if (val[1] && is_gimple_val (val[1]))
1062 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1063 DECL_FUNCTION_CODE (callee));
1064 break;
1066 default:
1067 gcc_unreachable ();
1070 if (result && ignore)
1071 result = fold_ignored_result (result);
1072 return result;
1076 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1077 The statement may be replaced by another statement, e.g., if the call
1078 simplifies to a constant value. Return true if any changes were made.
1079 It is assumed that the operands have been previously folded. */
1081 static bool
1082 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1084 gimple stmt = gsi_stmt (*gsi);
1085 tree callee;
1086 bool changed = false;
1087 unsigned i;
1089 /* Fold *& in call arguments. */
1090 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1091 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1093 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1094 if (tmp)
1096 gimple_call_set_arg (stmt, i, tmp);
1097 changed = true;
1101 /* Check for virtual calls that became direct calls. */
1102 callee = gimple_call_fn (stmt);
1103 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1105 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1107 if (dump_file && virtual_method_call_p (callee)
1108 && !possible_polymorphic_call_target_p
1109 (callee, cgraph_get_node (gimple_call_addr_fndecl
1110 (OBJ_TYPE_REF_EXPR (callee)))))
1112 fprintf (dump_file,
1113 "Type inheritance inconsistent devirtualization of ");
1114 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1115 fprintf (dump_file, " to ");
1116 print_generic_expr (dump_file, callee, TDF_SLIM);
1117 fprintf (dump_file, "\n");
1120 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1121 changed = true;
1123 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
1125 bool final;
1126 vec <cgraph_node *>targets
1127 = possible_polymorphic_call_targets (callee, &final);
1128 if (final && targets.length () <= 1)
1130 tree lhs = gimple_call_lhs (stmt);
1131 if (targets.length () == 1)
1133 gimple_call_set_fndecl (stmt, targets[0]->decl);
1134 changed = true;
1135 /* If the call becomes noreturn, remove the lhs. */
1136 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
1138 if (TREE_CODE (lhs) == SSA_NAME)
1140 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1141 tree def = get_or_create_ssa_default_def (cfun, var);
1142 gimple new_stmt = gimple_build_assign (lhs, def);
1143 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1145 gimple_call_set_lhs (stmt, NULL_TREE);
1148 else
1150 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
1151 gimple new_stmt = gimple_build_call (fndecl, 0);
1152 gimple_set_location (new_stmt, gimple_location (stmt));
1153 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1155 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1156 tree def = get_or_create_ssa_default_def (cfun, var);
1158 /* To satisfy condition for
1159 cgraph_update_edges_for_call_stmt_node,
1160 we need to preserve GIMPLE_CALL statement
1161 at position of GSI iterator. */
1162 update_call_from_tree (gsi, def);
1163 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
1165 else
1166 gsi_replace (gsi, new_stmt, true);
1167 return true;
1173 if (inplace)
1174 return changed;
1176 /* Check for builtins that CCP can handle using information not
1177 available in the generic fold routines. */
1178 if (gimple_call_builtin_p (stmt))
1180 tree result = gimple_fold_builtin (stmt);
1181 if (result)
1183 if (!update_call_from_tree (gsi, result))
1184 gimplify_and_update_call_from_tree (gsi, result);
1185 changed = true;
1187 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
1188 changed |= targetm.gimple_fold_builtin (gsi);
1190 else if (gimple_call_internal_p (stmt))
1192 enum tree_code subcode = ERROR_MARK;
1193 tree result = NULL_TREE;
1194 switch (gimple_call_internal_fn (stmt))
1196 case IFN_BUILTIN_EXPECT:
1197 result = fold_builtin_expect (gimple_location (stmt),
1198 gimple_call_arg (stmt, 0),
1199 gimple_call_arg (stmt, 1),
1200 gimple_call_arg (stmt, 2));
1201 break;
1202 case IFN_UBSAN_CHECK_ADD:
1203 subcode = PLUS_EXPR;
1204 break;
1205 case IFN_UBSAN_CHECK_SUB:
1206 subcode = MINUS_EXPR;
1207 break;
1208 case IFN_UBSAN_CHECK_MUL:
1209 subcode = MULT_EXPR;
1210 break;
1211 default:
1212 break;
1214 if (subcode != ERROR_MARK)
1216 tree arg0 = gimple_call_arg (stmt, 0);
1217 tree arg1 = gimple_call_arg (stmt, 1);
1218 /* x = y + 0; x = y - 0; x = y * 0; */
1219 if (integer_zerop (arg1))
1220 result = subcode == MULT_EXPR
1221 ? build_zero_cst (TREE_TYPE (arg0))
1222 : arg0;
1223 /* x = 0 + y; x = 0 * y; */
1224 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1225 result = subcode == MULT_EXPR
1226 ? build_zero_cst (TREE_TYPE (arg0))
1227 : arg1;
1228 /* x = y - y; */
1229 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1230 result = build_zero_cst (TREE_TYPE (arg0));
1231 /* x = y * 1; x = 1 * y; */
1232 else if (subcode == MULT_EXPR)
1234 if (integer_onep (arg1))
1235 result = arg0;
1236 else if (integer_onep (arg0))
1237 result = arg1;
1240 if (result)
1242 if (!update_call_from_tree (gsi, result))
1243 gimplify_and_update_call_from_tree (gsi, result);
1244 changed = true;
1248 return changed;
1251 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1252 distinguishes both cases. */
1254 static bool
1255 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1257 bool changed = false;
1258 gimple stmt = gsi_stmt (*gsi);
1259 unsigned i;
1261 /* Fold the main computation performed by the statement. */
1262 switch (gimple_code (stmt))
1264 case GIMPLE_ASSIGN:
1266 unsigned old_num_ops = gimple_num_ops (stmt);
1267 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1268 tree lhs = gimple_assign_lhs (stmt);
1269 tree new_rhs;
1270 /* First canonicalize operand order. This avoids building new
1271 trees if this is the only thing fold would later do. */
1272 if ((commutative_tree_code (subcode)
1273 || commutative_ternary_tree_code (subcode))
1274 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1275 gimple_assign_rhs2 (stmt), false))
1277 tree tem = gimple_assign_rhs1 (stmt);
1278 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1279 gimple_assign_set_rhs2 (stmt, tem);
1280 changed = true;
1282 new_rhs = fold_gimple_assign (gsi);
1283 if (new_rhs
1284 && !useless_type_conversion_p (TREE_TYPE (lhs),
1285 TREE_TYPE (new_rhs)))
1286 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1287 if (new_rhs
1288 && (!inplace
1289 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1291 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1292 changed = true;
1294 break;
1297 case GIMPLE_COND:
1298 changed |= fold_gimple_cond (stmt);
1299 break;
1301 case GIMPLE_CALL:
1302 changed |= gimple_fold_call (gsi, inplace);
1303 break;
1305 case GIMPLE_ASM:
1306 /* Fold *& in asm operands. */
1308 size_t noutputs;
1309 const char **oconstraints;
1310 const char *constraint;
1311 bool allows_mem, allows_reg;
1313 noutputs = gimple_asm_noutputs (stmt);
1314 oconstraints = XALLOCAVEC (const char *, noutputs);
1316 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1318 tree link = gimple_asm_output_op (stmt, i);
1319 tree op = TREE_VALUE (link);
1320 oconstraints[i]
1321 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1322 if (REFERENCE_CLASS_P (op)
1323 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1325 TREE_VALUE (link) = op;
1326 changed = true;
1329 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1331 tree link = gimple_asm_input_op (stmt, i);
1332 tree op = TREE_VALUE (link);
1333 constraint
1334 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1335 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1336 oconstraints, &allows_mem, &allows_reg);
1337 if (REFERENCE_CLASS_P (op)
1338 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1339 != NULL_TREE)
1341 TREE_VALUE (link) = op;
1342 changed = true;
1346 break;
1348 case GIMPLE_DEBUG:
1349 if (gimple_debug_bind_p (stmt))
1351 tree val = gimple_debug_bind_get_value (stmt);
1352 if (val
1353 && REFERENCE_CLASS_P (val))
1355 tree tem = maybe_fold_reference (val, false);
1356 if (tem)
1358 gimple_debug_bind_set_value (stmt, tem);
1359 changed = true;
1362 else if (val
1363 && TREE_CODE (val) == ADDR_EXPR)
1365 tree ref = TREE_OPERAND (val, 0);
1366 tree tem = maybe_fold_reference (ref, false);
1367 if (tem)
1369 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1370 gimple_debug_bind_set_value (stmt, tem);
1371 changed = true;
1375 break;
1377 default:;
1380 stmt = gsi_stmt (*gsi);
1382 /* Fold *& on the lhs. */
1383 if (gimple_has_lhs (stmt))
1385 tree lhs = gimple_get_lhs (stmt);
1386 if (lhs && REFERENCE_CLASS_P (lhs))
1388 tree new_lhs = maybe_fold_reference (lhs, true);
1389 if (new_lhs)
1391 gimple_set_lhs (stmt, new_lhs);
1392 changed = true;
1397 return changed;
1400 /* Fold the statement pointed to by GSI. In some cases, this function may
1401 replace the whole statement with a new one. Returns true iff folding
1402 makes any changes.
1403 The statement pointed to by GSI should be in valid gimple form but may
1404 be in unfolded state as resulting from for example constant propagation
1405 which can produce *&x = 0. */
1407 bool
1408 fold_stmt (gimple_stmt_iterator *gsi)
1410 return fold_stmt_1 (gsi, false);
1413 /* Perform the minimal folding on statement *GSI. Only operations like
1414 *&x created by constant propagation are handled. The statement cannot
1415 be replaced with a new one. Return true if the statement was
1416 changed, false otherwise.
1417 The statement *GSI should be in valid gimple form but may
1418 be in unfolded state as resulting from for example constant propagation
1419 which can produce *&x = 0. */
1421 bool
1422 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1424 gimple stmt = gsi_stmt (*gsi);
1425 bool changed = fold_stmt_1 (gsi, true);
1426 gcc_assert (gsi_stmt (*gsi) == stmt);
1427 return changed;
1430 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1431 if EXPR is null or we don't know how.
1432 If non-null, the result always has boolean type. */
1434 static tree
1435 canonicalize_bool (tree expr, bool invert)
1437 if (!expr)
1438 return NULL_TREE;
1439 else if (invert)
1441 if (integer_nonzerop (expr))
1442 return boolean_false_node;
1443 else if (integer_zerop (expr))
1444 return boolean_true_node;
1445 else if (TREE_CODE (expr) == SSA_NAME)
1446 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1447 build_int_cst (TREE_TYPE (expr), 0));
1448 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1449 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1450 boolean_type_node,
1451 TREE_OPERAND (expr, 0),
1452 TREE_OPERAND (expr, 1));
1453 else
1454 return NULL_TREE;
1456 else
1458 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1459 return expr;
1460 if (integer_nonzerop (expr))
1461 return boolean_true_node;
1462 else if (integer_zerop (expr))
1463 return boolean_false_node;
1464 else if (TREE_CODE (expr) == SSA_NAME)
1465 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1466 build_int_cst (TREE_TYPE (expr), 0));
1467 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1468 return fold_build2 (TREE_CODE (expr),
1469 boolean_type_node,
1470 TREE_OPERAND (expr, 0),
1471 TREE_OPERAND (expr, 1));
1472 else
1473 return NULL_TREE;
1477 /* Check to see if a boolean expression EXPR is logically equivalent to the
1478 comparison (OP1 CODE OP2). Check for various identities involving
1479 SSA_NAMEs. */
1481 static bool
1482 same_bool_comparison_p (const_tree expr, enum tree_code code,
1483 const_tree op1, const_tree op2)
1485 gimple s;
1487 /* The obvious case. */
1488 if (TREE_CODE (expr) == code
1489 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1490 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1491 return true;
1493 /* Check for comparing (name, name != 0) and the case where expr
1494 is an SSA_NAME with a definition matching the comparison. */
1495 if (TREE_CODE (expr) == SSA_NAME
1496 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1498 if (operand_equal_p (expr, op1, 0))
1499 return ((code == NE_EXPR && integer_zerop (op2))
1500 || (code == EQ_EXPR && integer_nonzerop (op2)));
1501 s = SSA_NAME_DEF_STMT (expr);
1502 if (is_gimple_assign (s)
1503 && gimple_assign_rhs_code (s) == code
1504 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1505 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1506 return true;
1509 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1510 of name is a comparison, recurse. */
1511 if (TREE_CODE (op1) == SSA_NAME
1512 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1514 s = SSA_NAME_DEF_STMT (op1);
1515 if (is_gimple_assign (s)
1516 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1518 enum tree_code c = gimple_assign_rhs_code (s);
1519 if ((c == NE_EXPR && integer_zerop (op2))
1520 || (c == EQ_EXPR && integer_nonzerop (op2)))
1521 return same_bool_comparison_p (expr, c,
1522 gimple_assign_rhs1 (s),
1523 gimple_assign_rhs2 (s));
1524 if ((c == EQ_EXPR && integer_zerop (op2))
1525 || (c == NE_EXPR && integer_nonzerop (op2)))
1526 return same_bool_comparison_p (expr,
1527 invert_tree_comparison (c, false),
1528 gimple_assign_rhs1 (s),
1529 gimple_assign_rhs2 (s));
1532 return false;
1535 /* Check to see if two boolean expressions OP1 and OP2 are logically
1536 equivalent. */
1538 static bool
1539 same_bool_result_p (const_tree op1, const_tree op2)
1541 /* Simple cases first. */
1542 if (operand_equal_p (op1, op2, 0))
1543 return true;
1545 /* Check the cases where at least one of the operands is a comparison.
1546 These are a bit smarter than operand_equal_p in that they apply some
1547 identifies on SSA_NAMEs. */
1548 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1549 && same_bool_comparison_p (op1, TREE_CODE (op2),
1550 TREE_OPERAND (op2, 0),
1551 TREE_OPERAND (op2, 1)))
1552 return true;
1553 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1554 && same_bool_comparison_p (op2, TREE_CODE (op1),
1555 TREE_OPERAND (op1, 0),
1556 TREE_OPERAND (op1, 1)))
1557 return true;
1559 /* Default case. */
1560 return false;
1563 /* Forward declarations for some mutually recursive functions. */
1565 static tree
1566 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1567 enum tree_code code2, tree op2a, tree op2b);
1568 static tree
1569 and_var_with_comparison (tree var, bool invert,
1570 enum tree_code code2, tree op2a, tree op2b);
1571 static tree
1572 and_var_with_comparison_1 (gimple stmt,
1573 enum tree_code code2, tree op2a, tree op2b);
1574 static tree
1575 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1576 enum tree_code code2, tree op2a, tree op2b);
1577 static tree
1578 or_var_with_comparison (tree var, bool invert,
1579 enum tree_code code2, tree op2a, tree op2b);
1580 static tree
1581 or_var_with_comparison_1 (gimple stmt,
1582 enum tree_code code2, tree op2a, tree op2b);
1584 /* Helper function for and_comparisons_1: try to simplify the AND of the
1585 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1586 If INVERT is true, invert the value of the VAR before doing the AND.
1587 Return NULL_EXPR if we can't simplify this to a single expression. */
1589 static tree
1590 and_var_with_comparison (tree var, bool invert,
1591 enum tree_code code2, tree op2a, tree op2b)
1593 tree t;
1594 gimple stmt = SSA_NAME_DEF_STMT (var);
1596 /* We can only deal with variables whose definitions are assignments. */
1597 if (!is_gimple_assign (stmt))
1598 return NULL_TREE;
1600 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1601 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1602 Then we only have to consider the simpler non-inverted cases. */
1603 if (invert)
1604 t = or_var_with_comparison_1 (stmt,
1605 invert_tree_comparison (code2, false),
1606 op2a, op2b);
1607 else
1608 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1609 return canonicalize_bool (t, invert);
1612 /* Try to simplify the AND of the ssa variable defined by the assignment
1613 STMT with the comparison specified by (OP2A CODE2 OP2B).
1614 Return NULL_EXPR if we can't simplify this to a single expression. */
1616 static tree
1617 and_var_with_comparison_1 (gimple stmt,
1618 enum tree_code code2, tree op2a, tree op2b)
1620 tree var = gimple_assign_lhs (stmt);
1621 tree true_test_var = NULL_TREE;
1622 tree false_test_var = NULL_TREE;
1623 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1625 /* Check for identities like (var AND (var == 0)) => false. */
1626 if (TREE_CODE (op2a) == SSA_NAME
1627 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1629 if ((code2 == NE_EXPR && integer_zerop (op2b))
1630 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1632 true_test_var = op2a;
1633 if (var == true_test_var)
1634 return var;
1636 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1637 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1639 false_test_var = op2a;
1640 if (var == false_test_var)
1641 return boolean_false_node;
1645 /* If the definition is a comparison, recurse on it. */
1646 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1648 tree t = and_comparisons_1 (innercode,
1649 gimple_assign_rhs1 (stmt),
1650 gimple_assign_rhs2 (stmt),
1651 code2,
1652 op2a,
1653 op2b);
1654 if (t)
1655 return t;
1658 /* If the definition is an AND or OR expression, we may be able to
1659 simplify by reassociating. */
1660 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1661 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1663 tree inner1 = gimple_assign_rhs1 (stmt);
1664 tree inner2 = gimple_assign_rhs2 (stmt);
1665 gimple s;
1666 tree t;
1667 tree partial = NULL_TREE;
1668 bool is_and = (innercode == BIT_AND_EXPR);
1670 /* Check for boolean identities that don't require recursive examination
1671 of inner1/inner2:
1672 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1673 inner1 AND (inner1 OR inner2) => inner1
1674 !inner1 AND (inner1 AND inner2) => false
1675 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1676 Likewise for similar cases involving inner2. */
1677 if (inner1 == true_test_var)
1678 return (is_and ? var : inner1);
1679 else if (inner2 == true_test_var)
1680 return (is_and ? var : inner2);
1681 else if (inner1 == false_test_var)
1682 return (is_and
1683 ? boolean_false_node
1684 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1685 else if (inner2 == false_test_var)
1686 return (is_and
1687 ? boolean_false_node
1688 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1690 /* Next, redistribute/reassociate the AND across the inner tests.
1691 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1692 if (TREE_CODE (inner1) == SSA_NAME
1693 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1694 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1695 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1696 gimple_assign_rhs1 (s),
1697 gimple_assign_rhs2 (s),
1698 code2, op2a, op2b)))
1700 /* Handle the AND case, where we are reassociating:
1701 (inner1 AND inner2) AND (op2a code2 op2b)
1702 => (t AND inner2)
1703 If the partial result t is a constant, we win. Otherwise
1704 continue on to try reassociating with the other inner test. */
1705 if (is_and)
1707 if (integer_onep (t))
1708 return inner2;
1709 else if (integer_zerop (t))
1710 return boolean_false_node;
1713 /* Handle the OR case, where we are redistributing:
1714 (inner1 OR inner2) AND (op2a code2 op2b)
1715 => (t OR (inner2 AND (op2a code2 op2b))) */
1716 else if (integer_onep (t))
1717 return boolean_true_node;
1719 /* Save partial result for later. */
1720 partial = t;
1723 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1724 if (TREE_CODE (inner2) == SSA_NAME
1725 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1726 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1727 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1728 gimple_assign_rhs1 (s),
1729 gimple_assign_rhs2 (s),
1730 code2, op2a, op2b)))
1732 /* Handle the AND case, where we are reassociating:
1733 (inner1 AND inner2) AND (op2a code2 op2b)
1734 => (inner1 AND t) */
1735 if (is_and)
1737 if (integer_onep (t))
1738 return inner1;
1739 else if (integer_zerop (t))
1740 return boolean_false_node;
1741 /* If both are the same, we can apply the identity
1742 (x AND x) == x. */
1743 else if (partial && same_bool_result_p (t, partial))
1744 return t;
1747 /* Handle the OR case. where we are redistributing:
1748 (inner1 OR inner2) AND (op2a code2 op2b)
1749 => (t OR (inner1 AND (op2a code2 op2b)))
1750 => (t OR partial) */
1751 else
1753 if (integer_onep (t))
1754 return boolean_true_node;
1755 else if (partial)
1757 /* We already got a simplification for the other
1758 operand to the redistributed OR expression. The
1759 interesting case is when at least one is false.
1760 Or, if both are the same, we can apply the identity
1761 (x OR x) == x. */
1762 if (integer_zerop (partial))
1763 return t;
1764 else if (integer_zerop (t))
1765 return partial;
1766 else if (same_bool_result_p (t, partial))
1767 return t;
1772 return NULL_TREE;
1775 /* Try to simplify the AND of two comparisons defined by
1776 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1777 If this can be done without constructing an intermediate value,
1778 return the resulting tree; otherwise NULL_TREE is returned.
1779 This function is deliberately asymmetric as it recurses on SSA_DEFs
1780 in the first comparison but not the second. */
1782 static tree
1783 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1784 enum tree_code code2, tree op2a, tree op2b)
1786 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1788 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1789 if (operand_equal_p (op1a, op2a, 0)
1790 && operand_equal_p (op1b, op2b, 0))
1792 /* Result will be either NULL_TREE, or a combined comparison. */
1793 tree t = combine_comparisons (UNKNOWN_LOCATION,
1794 TRUTH_ANDIF_EXPR, code1, code2,
1795 truth_type, op1a, op1b);
1796 if (t)
1797 return t;
1800 /* Likewise the swapped case of the above. */
1801 if (operand_equal_p (op1a, op2b, 0)
1802 && operand_equal_p (op1b, op2a, 0))
1804 /* Result will be either NULL_TREE, or a combined comparison. */
1805 tree t = combine_comparisons (UNKNOWN_LOCATION,
1806 TRUTH_ANDIF_EXPR, code1,
1807 swap_tree_comparison (code2),
1808 truth_type, op1a, op1b);
1809 if (t)
1810 return t;
1813 /* If both comparisons are of the same value against constants, we might
1814 be able to merge them. */
1815 if (operand_equal_p (op1a, op2a, 0)
1816 && TREE_CODE (op1b) == INTEGER_CST
1817 && TREE_CODE (op2b) == INTEGER_CST)
1819 int cmp = tree_int_cst_compare (op1b, op2b);
1821 /* If we have (op1a == op1b), we should either be able to
1822 return that or FALSE, depending on whether the constant op1b
1823 also satisfies the other comparison against op2b. */
1824 if (code1 == EQ_EXPR)
1826 bool done = true;
1827 bool val;
1828 switch (code2)
1830 case EQ_EXPR: val = (cmp == 0); break;
1831 case NE_EXPR: val = (cmp != 0); break;
1832 case LT_EXPR: val = (cmp < 0); break;
1833 case GT_EXPR: val = (cmp > 0); break;
1834 case LE_EXPR: val = (cmp <= 0); break;
1835 case GE_EXPR: val = (cmp >= 0); break;
1836 default: done = false;
1838 if (done)
1840 if (val)
1841 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1842 else
1843 return boolean_false_node;
1846 /* Likewise if the second comparison is an == comparison. */
1847 else if (code2 == EQ_EXPR)
1849 bool done = true;
1850 bool val;
1851 switch (code1)
1853 case EQ_EXPR: val = (cmp == 0); break;
1854 case NE_EXPR: val = (cmp != 0); break;
1855 case LT_EXPR: val = (cmp > 0); break;
1856 case GT_EXPR: val = (cmp < 0); break;
1857 case LE_EXPR: val = (cmp >= 0); break;
1858 case GE_EXPR: val = (cmp <= 0); break;
1859 default: done = false;
1861 if (done)
1863 if (val)
1864 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1865 else
1866 return boolean_false_node;
1870 /* Same business with inequality tests. */
1871 else if (code1 == NE_EXPR)
1873 bool val;
1874 switch (code2)
1876 case EQ_EXPR: val = (cmp != 0); break;
1877 case NE_EXPR: val = (cmp == 0); break;
1878 case LT_EXPR: val = (cmp >= 0); break;
1879 case GT_EXPR: val = (cmp <= 0); break;
1880 case LE_EXPR: val = (cmp > 0); break;
1881 case GE_EXPR: val = (cmp < 0); break;
1882 default:
1883 val = false;
1885 if (val)
1886 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1888 else if (code2 == NE_EXPR)
1890 bool val;
1891 switch (code1)
1893 case EQ_EXPR: val = (cmp == 0); break;
1894 case NE_EXPR: val = (cmp != 0); break;
1895 case LT_EXPR: val = (cmp <= 0); break;
1896 case GT_EXPR: val = (cmp >= 0); break;
1897 case LE_EXPR: val = (cmp < 0); break;
1898 case GE_EXPR: val = (cmp > 0); break;
1899 default:
1900 val = false;
1902 if (val)
1903 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1906 /* Chose the more restrictive of two < or <= comparisons. */
1907 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1908 && (code2 == LT_EXPR || code2 == LE_EXPR))
1910 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1911 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1912 else
1913 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1916 /* Likewise chose the more restrictive of two > or >= comparisons. */
1917 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1918 && (code2 == GT_EXPR || code2 == GE_EXPR))
1920 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1921 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1922 else
1923 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1926 /* Check for singleton ranges. */
1927 else if (cmp == 0
1928 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1929 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1930 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1932 /* Check for disjoint ranges. */
1933 else if (cmp <= 0
1934 && (code1 == LT_EXPR || code1 == LE_EXPR)
1935 && (code2 == GT_EXPR || code2 == GE_EXPR))
1936 return boolean_false_node;
1937 else if (cmp >= 0
1938 && (code1 == GT_EXPR || code1 == GE_EXPR)
1939 && (code2 == LT_EXPR || code2 == LE_EXPR))
1940 return boolean_false_node;
1943 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1944 NAME's definition is a truth value. See if there are any simplifications
1945 that can be done against the NAME's definition. */
1946 if (TREE_CODE (op1a) == SSA_NAME
1947 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1948 && (integer_zerop (op1b) || integer_onep (op1b)))
1950 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1951 || (code1 == NE_EXPR && integer_onep (op1b)));
1952 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1953 switch (gimple_code (stmt))
1955 case GIMPLE_ASSIGN:
1956 /* Try to simplify by copy-propagating the definition. */
1957 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1959 case GIMPLE_PHI:
1960 /* If every argument to the PHI produces the same result when
1961 ANDed with the second comparison, we win.
1962 Do not do this unless the type is bool since we need a bool
1963 result here anyway. */
1964 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1966 tree result = NULL_TREE;
1967 unsigned i;
1968 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1970 tree arg = gimple_phi_arg_def (stmt, i);
1972 /* If this PHI has itself as an argument, ignore it.
1973 If all the other args produce the same result,
1974 we're still OK. */
1975 if (arg == gimple_phi_result (stmt))
1976 continue;
1977 else if (TREE_CODE (arg) == INTEGER_CST)
1979 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1981 if (!result)
1982 result = boolean_false_node;
1983 else if (!integer_zerop (result))
1984 return NULL_TREE;
1986 else if (!result)
1987 result = fold_build2 (code2, boolean_type_node,
1988 op2a, op2b);
1989 else if (!same_bool_comparison_p (result,
1990 code2, op2a, op2b))
1991 return NULL_TREE;
1993 else if (TREE_CODE (arg) == SSA_NAME
1994 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1996 tree temp;
1997 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1998 /* In simple cases we can look through PHI nodes,
1999 but we have to be careful with loops.
2000 See PR49073. */
2001 if (! dom_info_available_p (CDI_DOMINATORS)
2002 || gimple_bb (def_stmt) == gimple_bb (stmt)
2003 || dominated_by_p (CDI_DOMINATORS,
2004 gimple_bb (def_stmt),
2005 gimple_bb (stmt)))
2006 return NULL_TREE;
2007 temp = and_var_with_comparison (arg, invert, code2,
2008 op2a, op2b);
2009 if (!temp)
2010 return NULL_TREE;
2011 else if (!result)
2012 result = temp;
2013 else if (!same_bool_result_p (result, temp))
2014 return NULL_TREE;
2016 else
2017 return NULL_TREE;
2019 return result;
2022 default:
2023 break;
2026 return NULL_TREE;
2029 /* Try to simplify the AND of two comparisons, specified by
2030 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2031 If this can be simplified to a single expression (without requiring
2032 introducing more SSA variables to hold intermediate values),
2033 return the resulting tree. Otherwise return NULL_TREE.
2034 If the result expression is non-null, it has boolean type. */
2036 tree
2037 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2038 enum tree_code code2, tree op2a, tree op2b)
2040 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2041 if (t)
2042 return t;
2043 else
2044 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2047 /* Helper function for or_comparisons_1: try to simplify the OR of the
2048 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2049 If INVERT is true, invert the value of VAR before doing the OR.
2050 Return NULL_EXPR if we can't simplify this to a single expression. */
2052 static tree
2053 or_var_with_comparison (tree var, bool invert,
2054 enum tree_code code2, tree op2a, tree op2b)
2056 tree t;
2057 gimple stmt = SSA_NAME_DEF_STMT (var);
2059 /* We can only deal with variables whose definitions are assignments. */
2060 if (!is_gimple_assign (stmt))
2061 return NULL_TREE;
2063 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2064 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2065 Then we only have to consider the simpler non-inverted cases. */
2066 if (invert)
2067 t = and_var_with_comparison_1 (stmt,
2068 invert_tree_comparison (code2, false),
2069 op2a, op2b);
2070 else
2071 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2072 return canonicalize_bool (t, invert);
2075 /* Try to simplify the OR of the ssa variable defined by the assignment
2076 STMT with the comparison specified by (OP2A CODE2 OP2B).
2077 Return NULL_EXPR if we can't simplify this to a single expression. */
2079 static tree
2080 or_var_with_comparison_1 (gimple stmt,
2081 enum tree_code code2, tree op2a, tree op2b)
2083 tree var = gimple_assign_lhs (stmt);
2084 tree true_test_var = NULL_TREE;
2085 tree false_test_var = NULL_TREE;
2086 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2088 /* Check for identities like (var OR (var != 0)) => true . */
2089 if (TREE_CODE (op2a) == SSA_NAME
2090 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2092 if ((code2 == NE_EXPR && integer_zerop (op2b))
2093 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2095 true_test_var = op2a;
2096 if (var == true_test_var)
2097 return var;
2099 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2100 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2102 false_test_var = op2a;
2103 if (var == false_test_var)
2104 return boolean_true_node;
2108 /* If the definition is a comparison, recurse on it. */
2109 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2111 tree t = or_comparisons_1 (innercode,
2112 gimple_assign_rhs1 (stmt),
2113 gimple_assign_rhs2 (stmt),
2114 code2,
2115 op2a,
2116 op2b);
2117 if (t)
2118 return t;
2121 /* If the definition is an AND or OR expression, we may be able to
2122 simplify by reassociating. */
2123 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2124 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2126 tree inner1 = gimple_assign_rhs1 (stmt);
2127 tree inner2 = gimple_assign_rhs2 (stmt);
2128 gimple s;
2129 tree t;
2130 tree partial = NULL_TREE;
2131 bool is_or = (innercode == BIT_IOR_EXPR);
2133 /* Check for boolean identities that don't require recursive examination
2134 of inner1/inner2:
2135 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2136 inner1 OR (inner1 AND inner2) => inner1
2137 !inner1 OR (inner1 OR inner2) => true
2138 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2140 if (inner1 == true_test_var)
2141 return (is_or ? var : inner1);
2142 else if (inner2 == true_test_var)
2143 return (is_or ? var : inner2);
2144 else if (inner1 == false_test_var)
2145 return (is_or
2146 ? boolean_true_node
2147 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2148 else if (inner2 == false_test_var)
2149 return (is_or
2150 ? boolean_true_node
2151 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2153 /* Next, redistribute/reassociate the OR across the inner tests.
2154 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2155 if (TREE_CODE (inner1) == SSA_NAME
2156 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2157 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2158 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2159 gimple_assign_rhs1 (s),
2160 gimple_assign_rhs2 (s),
2161 code2, op2a, op2b)))
2163 /* Handle the OR case, where we are reassociating:
2164 (inner1 OR inner2) OR (op2a code2 op2b)
2165 => (t OR inner2)
2166 If the partial result t is a constant, we win. Otherwise
2167 continue on to try reassociating with the other inner test. */
2168 if (is_or)
2170 if (integer_onep (t))
2171 return boolean_true_node;
2172 else if (integer_zerop (t))
2173 return inner2;
2176 /* Handle the AND case, where we are redistributing:
2177 (inner1 AND inner2) OR (op2a code2 op2b)
2178 => (t AND (inner2 OR (op2a code op2b))) */
2179 else if (integer_zerop (t))
2180 return boolean_false_node;
2182 /* Save partial result for later. */
2183 partial = t;
2186 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2187 if (TREE_CODE (inner2) == SSA_NAME
2188 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2189 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2190 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2191 gimple_assign_rhs1 (s),
2192 gimple_assign_rhs2 (s),
2193 code2, op2a, op2b)))
2195 /* Handle the OR case, where we are reassociating:
2196 (inner1 OR inner2) OR (op2a code2 op2b)
2197 => (inner1 OR t)
2198 => (t OR partial) */
2199 if (is_or)
2201 if (integer_zerop (t))
2202 return inner1;
2203 else if (integer_onep (t))
2204 return boolean_true_node;
2205 /* If both are the same, we can apply the identity
2206 (x OR x) == x. */
2207 else if (partial && same_bool_result_p (t, partial))
2208 return t;
2211 /* Handle the AND case, where we are redistributing:
2212 (inner1 AND inner2) OR (op2a code2 op2b)
2213 => (t AND (inner1 OR (op2a code2 op2b)))
2214 => (t AND partial) */
2215 else
2217 if (integer_zerop (t))
2218 return boolean_false_node;
2219 else if (partial)
2221 /* We already got a simplification for the other
2222 operand to the redistributed AND expression. The
2223 interesting case is when at least one is true.
2224 Or, if both are the same, we can apply the identity
2225 (x AND x) == x. */
2226 if (integer_onep (partial))
2227 return t;
2228 else if (integer_onep (t))
2229 return partial;
2230 else if (same_bool_result_p (t, partial))
2231 return t;
2236 return NULL_TREE;
2239 /* Try to simplify the OR of two comparisons defined by
2240 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2241 If this can be done without constructing an intermediate value,
2242 return the resulting tree; otherwise NULL_TREE is returned.
2243 This function is deliberately asymmetric as it recurses on SSA_DEFs
2244 in the first comparison but not the second. */
2246 static tree
2247 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2248 enum tree_code code2, tree op2a, tree op2b)
2250 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2252 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2253 if (operand_equal_p (op1a, op2a, 0)
2254 && operand_equal_p (op1b, op2b, 0))
2256 /* Result will be either NULL_TREE, or a combined comparison. */
2257 tree t = combine_comparisons (UNKNOWN_LOCATION,
2258 TRUTH_ORIF_EXPR, code1, code2,
2259 truth_type, op1a, op1b);
2260 if (t)
2261 return t;
2264 /* Likewise the swapped case of the above. */
2265 if (operand_equal_p (op1a, op2b, 0)
2266 && operand_equal_p (op1b, op2a, 0))
2268 /* Result will be either NULL_TREE, or a combined comparison. */
2269 tree t = combine_comparisons (UNKNOWN_LOCATION,
2270 TRUTH_ORIF_EXPR, code1,
2271 swap_tree_comparison (code2),
2272 truth_type, op1a, op1b);
2273 if (t)
2274 return t;
2277 /* If both comparisons are of the same value against constants, we might
2278 be able to merge them. */
2279 if (operand_equal_p (op1a, op2a, 0)
2280 && TREE_CODE (op1b) == INTEGER_CST
2281 && TREE_CODE (op2b) == INTEGER_CST)
2283 int cmp = tree_int_cst_compare (op1b, op2b);
2285 /* If we have (op1a != op1b), we should either be able to
2286 return that or TRUE, depending on whether the constant op1b
2287 also satisfies the other comparison against op2b. */
2288 if (code1 == NE_EXPR)
2290 bool done = true;
2291 bool val;
2292 switch (code2)
2294 case EQ_EXPR: val = (cmp == 0); break;
2295 case NE_EXPR: val = (cmp != 0); break;
2296 case LT_EXPR: val = (cmp < 0); break;
2297 case GT_EXPR: val = (cmp > 0); break;
2298 case LE_EXPR: val = (cmp <= 0); break;
2299 case GE_EXPR: val = (cmp >= 0); break;
2300 default: done = false;
2302 if (done)
2304 if (val)
2305 return boolean_true_node;
2306 else
2307 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2310 /* Likewise if the second comparison is a != comparison. */
2311 else if (code2 == NE_EXPR)
2313 bool done = true;
2314 bool val;
2315 switch (code1)
2317 case EQ_EXPR: val = (cmp == 0); break;
2318 case NE_EXPR: val = (cmp != 0); break;
2319 case LT_EXPR: val = (cmp > 0); break;
2320 case GT_EXPR: val = (cmp < 0); break;
2321 case LE_EXPR: val = (cmp >= 0); break;
2322 case GE_EXPR: val = (cmp <= 0); break;
2323 default: done = false;
2325 if (done)
2327 if (val)
2328 return boolean_true_node;
2329 else
2330 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2334 /* See if an equality test is redundant with the other comparison. */
2335 else if (code1 == EQ_EXPR)
2337 bool val;
2338 switch (code2)
2340 case EQ_EXPR: val = (cmp == 0); break;
2341 case NE_EXPR: val = (cmp != 0); break;
2342 case LT_EXPR: val = (cmp < 0); break;
2343 case GT_EXPR: val = (cmp > 0); break;
2344 case LE_EXPR: val = (cmp <= 0); break;
2345 case GE_EXPR: val = (cmp >= 0); break;
2346 default:
2347 val = false;
2349 if (val)
2350 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2352 else if (code2 == EQ_EXPR)
2354 bool val;
2355 switch (code1)
2357 case EQ_EXPR: val = (cmp == 0); break;
2358 case NE_EXPR: val = (cmp != 0); break;
2359 case LT_EXPR: val = (cmp > 0); break;
2360 case GT_EXPR: val = (cmp < 0); break;
2361 case LE_EXPR: val = (cmp >= 0); break;
2362 case GE_EXPR: val = (cmp <= 0); break;
2363 default:
2364 val = false;
2366 if (val)
2367 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2370 /* Chose the less restrictive of two < or <= comparisons. */
2371 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2372 && (code2 == LT_EXPR || code2 == LE_EXPR))
2374 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2375 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2376 else
2377 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2380 /* Likewise chose the less restrictive of two > or >= comparisons. */
2381 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2382 && (code2 == GT_EXPR || code2 == GE_EXPR))
2384 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2385 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2386 else
2387 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2390 /* Check for singleton ranges. */
2391 else if (cmp == 0
2392 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2393 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2394 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2396 /* Check for less/greater pairs that don't restrict the range at all. */
2397 else if (cmp >= 0
2398 && (code1 == LT_EXPR || code1 == LE_EXPR)
2399 && (code2 == GT_EXPR || code2 == GE_EXPR))
2400 return boolean_true_node;
2401 else if (cmp <= 0
2402 && (code1 == GT_EXPR || code1 == GE_EXPR)
2403 && (code2 == LT_EXPR || code2 == LE_EXPR))
2404 return boolean_true_node;
2407 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2408 NAME's definition is a truth value. See if there are any simplifications
2409 that can be done against the NAME's definition. */
2410 if (TREE_CODE (op1a) == SSA_NAME
2411 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2412 && (integer_zerop (op1b) || integer_onep (op1b)))
2414 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2415 || (code1 == NE_EXPR && integer_onep (op1b)));
2416 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2417 switch (gimple_code (stmt))
2419 case GIMPLE_ASSIGN:
2420 /* Try to simplify by copy-propagating the definition. */
2421 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2423 case GIMPLE_PHI:
2424 /* If every argument to the PHI produces the same result when
2425 ORed with the second comparison, we win.
2426 Do not do this unless the type is bool since we need a bool
2427 result here anyway. */
2428 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2430 tree result = NULL_TREE;
2431 unsigned i;
2432 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2434 tree arg = gimple_phi_arg_def (stmt, i);
2436 /* If this PHI has itself as an argument, ignore it.
2437 If all the other args produce the same result,
2438 we're still OK. */
2439 if (arg == gimple_phi_result (stmt))
2440 continue;
2441 else if (TREE_CODE (arg) == INTEGER_CST)
2443 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2445 if (!result)
2446 result = boolean_true_node;
2447 else if (!integer_onep (result))
2448 return NULL_TREE;
2450 else if (!result)
2451 result = fold_build2 (code2, boolean_type_node,
2452 op2a, op2b);
2453 else if (!same_bool_comparison_p (result,
2454 code2, op2a, op2b))
2455 return NULL_TREE;
2457 else if (TREE_CODE (arg) == SSA_NAME
2458 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2460 tree temp;
2461 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2462 /* In simple cases we can look through PHI nodes,
2463 but we have to be careful with loops.
2464 See PR49073. */
2465 if (! dom_info_available_p (CDI_DOMINATORS)
2466 || gimple_bb (def_stmt) == gimple_bb (stmt)
2467 || dominated_by_p (CDI_DOMINATORS,
2468 gimple_bb (def_stmt),
2469 gimple_bb (stmt)))
2470 return NULL_TREE;
2471 temp = or_var_with_comparison (arg, invert, code2,
2472 op2a, op2b);
2473 if (!temp)
2474 return NULL_TREE;
2475 else if (!result)
2476 result = temp;
2477 else if (!same_bool_result_p (result, temp))
2478 return NULL_TREE;
2480 else
2481 return NULL_TREE;
2483 return result;
2486 default:
2487 break;
2490 return NULL_TREE;
2493 /* Try to simplify the OR of two comparisons, specified by
2494 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2495 If this can be simplified to a single expression (without requiring
2496 introducing more SSA variables to hold intermediate values),
2497 return the resulting tree. Otherwise return NULL_TREE.
2498 If the result expression is non-null, it has boolean type. */
2500 tree
2501 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2502 enum tree_code code2, tree op2a, tree op2b)
2504 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2505 if (t)
2506 return t;
2507 else
2508 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2512 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2514 Either NULL_TREE, a simplified but non-constant or a constant
2515 is returned.
2517 ??? This should go into a gimple-fold-inline.h file to be eventually
2518 privatized with the single valueize function used in the various TUs
2519 to avoid the indirect function call overhead. */
2521 tree
2522 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2524 location_t loc = gimple_location (stmt);
2525 switch (gimple_code (stmt))
2527 case GIMPLE_ASSIGN:
2529 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2531 switch (get_gimple_rhs_class (subcode))
2533 case GIMPLE_SINGLE_RHS:
2535 tree rhs = gimple_assign_rhs1 (stmt);
2536 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2538 if (TREE_CODE (rhs) == SSA_NAME)
2540 /* If the RHS is an SSA_NAME, return its known constant value,
2541 if any. */
2542 return (*valueize) (rhs);
2544 /* Handle propagating invariant addresses into address
2545 operations. */
2546 else if (TREE_CODE (rhs) == ADDR_EXPR
2547 && !is_gimple_min_invariant (rhs))
2549 HOST_WIDE_INT offset = 0;
2550 tree base;
2551 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2552 &offset,
2553 valueize);
2554 if (base
2555 && (CONSTANT_CLASS_P (base)
2556 || decl_address_invariant_p (base)))
2557 return build_invariant_address (TREE_TYPE (rhs),
2558 base, offset);
2560 else if (TREE_CODE (rhs) == CONSTRUCTOR
2561 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2562 && (CONSTRUCTOR_NELTS (rhs)
2563 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2565 unsigned i;
2566 tree val, *vec;
2568 vec = XALLOCAVEC (tree,
2569 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2570 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2572 val = (*valueize) (val);
2573 if (TREE_CODE (val) == INTEGER_CST
2574 || TREE_CODE (val) == REAL_CST
2575 || TREE_CODE (val) == FIXED_CST)
2576 vec[i] = val;
2577 else
2578 return NULL_TREE;
2581 return build_vector (TREE_TYPE (rhs), vec);
2583 if (subcode == OBJ_TYPE_REF)
2585 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
2586 /* If callee is constant, we can fold away the wrapper. */
2587 if (is_gimple_min_invariant (val))
2588 return val;
2591 if (kind == tcc_reference)
2593 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2594 || TREE_CODE (rhs) == REALPART_EXPR
2595 || TREE_CODE (rhs) == IMAGPART_EXPR)
2596 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2598 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2599 return fold_unary_loc (EXPR_LOCATION (rhs),
2600 TREE_CODE (rhs),
2601 TREE_TYPE (rhs), val);
2603 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2604 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2606 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2607 return fold_ternary_loc (EXPR_LOCATION (rhs),
2608 TREE_CODE (rhs),
2609 TREE_TYPE (rhs), val,
2610 TREE_OPERAND (rhs, 1),
2611 TREE_OPERAND (rhs, 2));
2613 else if (TREE_CODE (rhs) == MEM_REF
2614 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2616 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2617 if (TREE_CODE (val) == ADDR_EXPR
2618 && is_gimple_min_invariant (val))
2620 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2621 unshare_expr (val),
2622 TREE_OPERAND (rhs, 1));
2623 if (tem)
2624 rhs = tem;
2627 return fold_const_aggregate_ref_1 (rhs, valueize);
2629 else if (kind == tcc_declaration)
2630 return get_symbol_constant_value (rhs);
2631 return rhs;
2634 case GIMPLE_UNARY_RHS:
2636 /* Handle unary operators that can appear in GIMPLE form.
2637 Note that we know the single operand must be a constant,
2638 so this should almost always return a simplified RHS. */
2639 tree lhs = gimple_assign_lhs (stmt);
2640 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2642 /* Conversions are useless for CCP purposes if they are
2643 value-preserving. Thus the restrictions that
2644 useless_type_conversion_p places for restrict qualification
2645 of pointer types should not apply here.
2646 Substitution later will only substitute to allowed places. */
2647 if (CONVERT_EXPR_CODE_P (subcode)
2648 && POINTER_TYPE_P (TREE_TYPE (lhs))
2649 && POINTER_TYPE_P (TREE_TYPE (op0))
2650 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2651 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2652 && TYPE_MODE (TREE_TYPE (lhs))
2653 == TYPE_MODE (TREE_TYPE (op0)))
2654 return op0;
2656 return
2657 fold_unary_ignore_overflow_loc (loc, subcode,
2658 gimple_expr_type (stmt), op0);
2661 case GIMPLE_BINARY_RHS:
2663 /* Handle binary operators that can appear in GIMPLE form. */
2664 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2665 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2667 /* Translate &x + CST into an invariant form suitable for
2668 further propagation. */
2669 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2670 && TREE_CODE (op0) == ADDR_EXPR
2671 && TREE_CODE (op1) == INTEGER_CST)
2673 tree off = fold_convert (ptr_type_node, op1);
2674 return build_fold_addr_expr_loc
2675 (loc,
2676 fold_build2 (MEM_REF,
2677 TREE_TYPE (TREE_TYPE (op0)),
2678 unshare_expr (op0), off));
2681 return fold_binary_loc (loc, subcode,
2682 gimple_expr_type (stmt), op0, op1);
2685 case GIMPLE_TERNARY_RHS:
2687 /* Handle ternary operators that can appear in GIMPLE form. */
2688 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2689 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2690 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2692 /* Fold embedded expressions in ternary codes. */
2693 if ((subcode == COND_EXPR
2694 || subcode == VEC_COND_EXPR)
2695 && COMPARISON_CLASS_P (op0))
2697 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2698 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2699 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2700 TREE_TYPE (op0), op00, op01);
2701 if (tem)
2702 op0 = tem;
2705 return fold_ternary_loc (loc, subcode,
2706 gimple_expr_type (stmt), op0, op1, op2);
2709 default:
2710 gcc_unreachable ();
2714 case GIMPLE_CALL:
2716 tree fn;
2718 if (gimple_call_internal_p (stmt))
2720 enum tree_code subcode = ERROR_MARK;
2721 switch (gimple_call_internal_fn (stmt))
2723 case IFN_UBSAN_CHECK_ADD:
2724 subcode = PLUS_EXPR;
2725 break;
2726 case IFN_UBSAN_CHECK_SUB:
2727 subcode = MINUS_EXPR;
2728 break;
2729 case IFN_UBSAN_CHECK_MUL:
2730 subcode = MULT_EXPR;
2731 break;
2732 default:
2733 return NULL_TREE;
2735 tree arg0 = gimple_call_arg (stmt, 0);
2736 tree arg1 = gimple_call_arg (stmt, 1);
2737 tree op0 = (*valueize) (arg0);
2738 tree op1 = (*valueize) (arg1);
2740 if (TREE_CODE (op0) != INTEGER_CST
2741 || TREE_CODE (op1) != INTEGER_CST)
2743 switch (subcode)
2745 case MULT_EXPR:
2746 /* x * 0 = 0 * x = 0 without overflow. */
2747 if (integer_zerop (op0) || integer_zerop (op1))
2748 return build_zero_cst (TREE_TYPE (arg0));
2749 break;
2750 case MINUS_EXPR:
2751 /* y - y = 0 without overflow. */
2752 if (operand_equal_p (op0, op1, 0))
2753 return build_zero_cst (TREE_TYPE (arg0));
2754 break;
2755 default:
2756 break;
2759 tree res
2760 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
2761 if (res
2762 && TREE_CODE (res) == INTEGER_CST
2763 && !TREE_OVERFLOW (res))
2764 return res;
2765 return NULL_TREE;
2768 fn = (*valueize) (gimple_call_fn (stmt));
2769 if (TREE_CODE (fn) == ADDR_EXPR
2770 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2771 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
2772 && gimple_builtin_call_types_compatible_p (stmt,
2773 TREE_OPERAND (fn, 0)))
2775 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2776 tree call, retval;
2777 unsigned i;
2778 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2779 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2780 call = build_call_array_loc (loc,
2781 gimple_call_return_type (stmt),
2782 fn, gimple_call_num_args (stmt), args);
2783 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2784 if (retval)
2786 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2787 STRIP_NOPS (retval);
2788 retval = fold_convert (gimple_call_return_type (stmt), retval);
2790 return retval;
2792 return NULL_TREE;
2795 default:
2796 return NULL_TREE;
2800 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2801 Returns NULL_TREE if folding to a constant is not possible, otherwise
2802 returns a constant according to is_gimple_min_invariant. */
2804 tree
2805 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2807 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2808 if (res && is_gimple_min_invariant (res))
2809 return res;
2810 return NULL_TREE;
2814 /* The following set of functions are supposed to fold references using
2815 their constant initializers. */
2817 static tree fold_ctor_reference (tree type, tree ctor,
2818 unsigned HOST_WIDE_INT offset,
2819 unsigned HOST_WIDE_INT size, tree);
2821 /* See if we can find constructor defining value of BASE.
2822 When we know the consructor with constant offset (such as
2823 base is array[40] and we do know constructor of array), then
2824 BIT_OFFSET is adjusted accordingly.
2826 As a special case, return error_mark_node when constructor
2827 is not explicitly available, but it is known to be zero
2828 such as 'static const int a;'. */
2829 static tree
2830 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2831 tree (*valueize)(tree))
2833 HOST_WIDE_INT bit_offset2, size, max_size;
2834 if (TREE_CODE (base) == MEM_REF)
2836 if (!integer_zerop (TREE_OPERAND (base, 1)))
2838 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
2839 return NULL_TREE;
2840 *bit_offset += (mem_ref_offset (base).low
2841 * BITS_PER_UNIT);
2844 if (valueize
2845 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2846 base = valueize (TREE_OPERAND (base, 0));
2847 if (!base || TREE_CODE (base) != ADDR_EXPR)
2848 return NULL_TREE;
2849 base = TREE_OPERAND (base, 0);
2852 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2853 DECL_INITIAL. If BASE is a nested reference into another
2854 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2855 the inner reference. */
2856 switch (TREE_CODE (base))
2858 case VAR_DECL:
2859 case CONST_DECL:
2861 tree init = ctor_for_folding (base);
2863 /* Our semantic is exact opposite of ctor_for_folding;
2864 NULL means unknown, while error_mark_node is 0. */
2865 if (init == error_mark_node)
2866 return NULL_TREE;
2867 if (!init)
2868 return error_mark_node;
2869 return init;
2872 case ARRAY_REF:
2873 case COMPONENT_REF:
2874 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2875 if (max_size == -1 || size != max_size)
2876 return NULL_TREE;
2877 *bit_offset += bit_offset2;
2878 return get_base_constructor (base, bit_offset, valueize);
2880 case STRING_CST:
2881 case CONSTRUCTOR:
2882 return base;
2884 default:
2885 return NULL_TREE;
2889 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2890 to the memory at bit OFFSET.
2892 We do only simple job of folding byte accesses. */
2894 static tree
2895 fold_string_cst_ctor_reference (tree type, tree ctor,
2896 unsigned HOST_WIDE_INT offset,
2897 unsigned HOST_WIDE_INT size)
2899 if (INTEGRAL_TYPE_P (type)
2900 && (TYPE_MODE (type)
2901 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2902 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2903 == MODE_INT)
2904 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2905 && size == BITS_PER_UNIT
2906 && !(offset % BITS_PER_UNIT))
2908 offset /= BITS_PER_UNIT;
2909 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2910 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2911 [offset]));
2912 /* Folding
2913 const char a[20]="hello";
2914 return a[10];
2916 might lead to offset greater than string length. In this case we
2917 know value is either initialized to 0 or out of bounds. Return 0
2918 in both cases. */
2919 return build_zero_cst (type);
2921 return NULL_TREE;
2924 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2925 SIZE to the memory at bit OFFSET. */
2927 static tree
2928 fold_array_ctor_reference (tree type, tree ctor,
2929 unsigned HOST_WIDE_INT offset,
2930 unsigned HOST_WIDE_INT size,
2931 tree from_decl)
2933 unsigned HOST_WIDE_INT cnt;
2934 tree cfield, cval;
2935 double_int low_bound, elt_size;
2936 double_int index, max_index;
2937 double_int access_index;
2938 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2939 HOST_WIDE_INT inner_offset;
2941 /* Compute low bound and elt size. */
2942 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2943 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2944 if (domain_type && TYPE_MIN_VALUE (domain_type))
2946 /* Static constructors for variably sized objects makes no sense. */
2947 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2948 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2949 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2951 else
2952 low_bound = double_int_zero;
2953 /* Static constructors for variably sized objects makes no sense. */
2954 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2955 == INTEGER_CST);
2956 elt_size =
2957 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2960 /* We can handle only constantly sized accesses that are known to not
2961 be larger than size of array element. */
2962 if (!TYPE_SIZE_UNIT (type)
2963 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2964 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type)))
2965 || elt_size.is_zero ())
2966 return NULL_TREE;
2968 /* Compute the array index we look for. */
2969 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2970 .udiv (elt_size, TRUNC_DIV_EXPR);
2971 access_index += low_bound;
2972 if (index_type)
2973 access_index = access_index.ext (TYPE_PRECISION (index_type),
2974 TYPE_UNSIGNED (index_type));
2976 /* And offset within the access. */
2977 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2979 /* See if the array field is large enough to span whole access. We do not
2980 care to fold accesses spanning multiple array indexes. */
2981 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2982 return NULL_TREE;
2984 index = low_bound - double_int_one;
2985 if (index_type)
2986 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2988 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2990 /* Array constructor might explicitely set index, or specify range
2991 or leave index NULL meaning that it is next index after previous
2992 one. */
2993 if (cfield)
2995 if (TREE_CODE (cfield) == INTEGER_CST)
2996 max_index = index = tree_to_double_int (cfield);
2997 else
2999 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3000 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3001 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3004 else
3006 index += double_int_one;
3007 if (index_type)
3008 index = index.ext (TYPE_PRECISION (index_type),
3009 TYPE_UNSIGNED (index_type));
3010 max_index = index;
3013 /* Do we have match? */
3014 if (access_index.cmp (index, 1) >= 0
3015 && access_index.cmp (max_index, 1) <= 0)
3016 return fold_ctor_reference (type, cval, inner_offset, size,
3017 from_decl);
3019 /* When memory is not explicitely mentioned in constructor,
3020 it is 0 (or out of range). */
3021 return build_zero_cst (type);
3024 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3025 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3027 static tree
3028 fold_nonarray_ctor_reference (tree type, tree ctor,
3029 unsigned HOST_WIDE_INT offset,
3030 unsigned HOST_WIDE_INT size,
3031 tree from_decl)
3033 unsigned HOST_WIDE_INT cnt;
3034 tree cfield, cval;
3036 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3037 cval)
3039 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3040 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3041 tree field_size = DECL_SIZE (cfield);
3042 double_int bitoffset;
3043 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3044 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
3045 double_int bitoffset_end, access_end;
3047 /* Variable sized objects in static constructors makes no sense,
3048 but field_size can be NULL for flexible array members. */
3049 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3050 && TREE_CODE (byte_offset) == INTEGER_CST
3051 && (field_size != NULL_TREE
3052 ? TREE_CODE (field_size) == INTEGER_CST
3053 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3055 /* Compute bit offset of the field. */
3056 bitoffset = tree_to_double_int (field_offset)
3057 + byte_offset_cst * bits_per_unit_cst;
3058 /* Compute bit offset where the field ends. */
3059 if (field_size != NULL_TREE)
3060 bitoffset_end = bitoffset + tree_to_double_int (field_size);
3061 else
3062 bitoffset_end = double_int_zero;
3064 access_end = double_int::from_uhwi (offset)
3065 + double_int::from_uhwi (size);
3067 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3068 [BITOFFSET, BITOFFSET_END)? */
3069 if (access_end.cmp (bitoffset, 0) > 0
3070 && (field_size == NULL_TREE
3071 || double_int::from_uhwi (offset).slt (bitoffset_end)))
3073 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
3074 /* We do have overlap. Now see if field is large enough to
3075 cover the access. Give up for accesses spanning multiple
3076 fields. */
3077 if (access_end.cmp (bitoffset_end, 0) > 0)
3078 return NULL_TREE;
3079 if (double_int::from_uhwi (offset).slt (bitoffset))
3080 return NULL_TREE;
3081 return fold_ctor_reference (type, cval,
3082 inner_offset.to_uhwi (), size,
3083 from_decl);
3086 /* When memory is not explicitely mentioned in constructor, it is 0. */
3087 return build_zero_cst (type);
3090 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3091 to the memory at bit OFFSET. */
3093 static tree
3094 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3095 unsigned HOST_WIDE_INT size, tree from_decl)
3097 tree ret;
3099 /* We found the field with exact match. */
3100 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3101 && !offset)
3102 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3104 /* We are at the end of walk, see if we can view convert the
3105 result. */
3106 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3107 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3108 && !compare_tree_int (TYPE_SIZE (type), size)
3109 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
3111 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3112 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3113 if (ret)
3114 STRIP_NOPS (ret);
3115 return ret;
3117 if (TREE_CODE (ctor) == STRING_CST)
3118 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3119 if (TREE_CODE (ctor) == CONSTRUCTOR)
3122 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3123 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3124 return fold_array_ctor_reference (type, ctor, offset, size,
3125 from_decl);
3126 else
3127 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3128 from_decl);
3131 return NULL_TREE;
3134 /* Return the tree representing the element referenced by T if T is an
3135 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3136 names using VALUEIZE. Return NULL_TREE otherwise. */
3138 tree
3139 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3141 tree ctor, idx, base;
3142 HOST_WIDE_INT offset, size, max_size;
3143 tree tem;
3145 if (TREE_THIS_VOLATILE (t))
3146 return NULL_TREE;
3148 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3149 return get_symbol_constant_value (t);
3151 tem = fold_read_from_constant_string (t);
3152 if (tem)
3153 return tem;
3155 switch (TREE_CODE (t))
3157 case ARRAY_REF:
3158 case ARRAY_RANGE_REF:
3159 /* Constant indexes are handled well by get_base_constructor.
3160 Only special case variable offsets.
3161 FIXME: This code can't handle nested references with variable indexes
3162 (they will be handled only by iteration of ccp). Perhaps we can bring
3163 get_ref_base_and_extent here and make it use a valueize callback. */
3164 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3165 && valueize
3166 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3167 && TREE_CODE (idx) == INTEGER_CST)
3169 tree low_bound, unit_size;
3170 double_int doffset;
3172 /* If the resulting bit-offset is constant, track it. */
3173 if ((low_bound = array_ref_low_bound (t),
3174 TREE_CODE (low_bound) == INTEGER_CST)
3175 && (unit_size = array_ref_element_size (t),
3176 tree_fits_uhwi_p (unit_size))
3177 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3178 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3179 doffset.fits_shwi ()))
3181 offset = doffset.to_shwi ();
3182 offset *= tree_to_uhwi (unit_size);
3183 offset *= BITS_PER_UNIT;
3185 base = TREE_OPERAND (t, 0);
3186 ctor = get_base_constructor (base, &offset, valueize);
3187 /* Empty constructor. Always fold to 0. */
3188 if (ctor == error_mark_node)
3189 return build_zero_cst (TREE_TYPE (t));
3190 /* Out of bound array access. Value is undefined,
3191 but don't fold. */
3192 if (offset < 0)
3193 return NULL_TREE;
3194 /* We can not determine ctor. */
3195 if (!ctor)
3196 return NULL_TREE;
3197 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3198 tree_to_uhwi (unit_size)
3199 * BITS_PER_UNIT,
3200 base);
3203 /* Fallthru. */
3205 case COMPONENT_REF:
3206 case BIT_FIELD_REF:
3207 case TARGET_MEM_REF:
3208 case MEM_REF:
3209 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3210 ctor = get_base_constructor (base, &offset, valueize);
3212 /* Empty constructor. Always fold to 0. */
3213 if (ctor == error_mark_node)
3214 return build_zero_cst (TREE_TYPE (t));
3215 /* We do not know precise address. */
3216 if (max_size == -1 || max_size != size)
3217 return NULL_TREE;
3218 /* We can not determine ctor. */
3219 if (!ctor)
3220 return NULL_TREE;
3222 /* Out of bound array access. Value is undefined, but don't fold. */
3223 if (offset < 0)
3224 return NULL_TREE;
3226 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3227 base);
3229 case REALPART_EXPR:
3230 case IMAGPART_EXPR:
3232 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3233 if (c && TREE_CODE (c) == COMPLEX_CST)
3234 return fold_build1_loc (EXPR_LOCATION (t),
3235 TREE_CODE (t), TREE_TYPE (t), c);
3236 break;
3239 default:
3240 break;
3243 return NULL_TREE;
3246 tree
3247 fold_const_aggregate_ref (tree t)
3249 return fold_const_aggregate_ref_1 (t, NULL);
3252 /* Lookup virtual method with index TOKEN in a virtual table V
3253 at OFFSET.
3254 Set CAN_REFER if non-NULL to false if method
3255 is not referable or if the virtual table is ill-formed (such as rewriten
3256 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3258 tree
3259 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
3260 tree v,
3261 unsigned HOST_WIDE_INT offset,
3262 bool *can_refer)
3264 tree vtable = v, init, fn;
3265 unsigned HOST_WIDE_INT size;
3266 unsigned HOST_WIDE_INT elt_size, access_index;
3267 tree domain_type;
3269 if (can_refer)
3270 *can_refer = true;
3272 /* First of all double check we have virtual table. */
3273 if (TREE_CODE (v) != VAR_DECL
3274 || !DECL_VIRTUAL_P (v))
3276 gcc_assert (in_lto_p);
3277 /* Pass down that we lost track of the target. */
3278 if (can_refer)
3279 *can_refer = false;
3280 return NULL_TREE;
3283 init = ctor_for_folding (v);
3285 /* The virtual tables should always be born with constructors
3286 and we always should assume that they are avaialble for
3287 folding. At the moment we do not stream them in all cases,
3288 but it should never happen that ctor seem unreachable. */
3289 gcc_assert (init);
3290 if (init == error_mark_node)
3292 gcc_assert (in_lto_p);
3293 /* Pass down that we lost track of the target. */
3294 if (can_refer)
3295 *can_refer = false;
3296 return NULL_TREE;
3298 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3299 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
3300 offset *= BITS_PER_UNIT;
3301 offset += token * size;
3303 /* Lookup the value in the constructor that is assumed to be array.
3304 This is equivalent to
3305 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3306 offset, size, NULL);
3307 but in a constant time. We expect that frontend produced a simple
3308 array without indexed initializers. */
3310 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
3311 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
3312 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
3313 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
3315 access_index = offset / BITS_PER_UNIT / elt_size;
3316 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
3318 /* This code makes an assumption that there are no
3319 indexed fileds produced by C++ FE, so we can directly index the array. */
3320 if (access_index < CONSTRUCTOR_NELTS (init))
3322 fn = CONSTRUCTOR_ELT (init, access_index)->value;
3323 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
3324 STRIP_NOPS (fn);
3326 else
3327 fn = NULL;
3329 /* For type inconsistent program we may end up looking up virtual method
3330 in virtual table that does not contain TOKEN entries. We may overrun
3331 the virtual table and pick up a constant or RTTI info pointer.
3332 In any case the call is undefined. */
3333 if (!fn
3334 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
3335 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3336 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3337 else
3339 fn = TREE_OPERAND (fn, 0);
3341 /* When cgraph node is missing and function is not public, we cannot
3342 devirtualize. This can happen in WHOPR when the actual method
3343 ends up in other partition, because we found devirtualization
3344 possibility too late. */
3345 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3347 if (can_refer)
3349 *can_refer = false;
3350 return fn;
3352 return NULL_TREE;
3356 /* Make sure we create a cgraph node for functions we'll reference.
3357 They can be non-existent if the reference comes from an entry
3358 of an external vtable for example. */
3359 cgraph_get_create_node (fn);
3361 return fn;
3364 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3365 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3366 KNOWN_BINFO carries the binfo describing the true type of
3367 OBJ_TYPE_REF_OBJECT(REF).
3368 Set CAN_REFER if non-NULL to false if method
3369 is not referable or if the virtual table is ill-formed (such as rewriten
3370 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3372 tree
3373 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
3374 bool *can_refer)
3376 unsigned HOST_WIDE_INT offset;
3377 tree v;
3379 v = BINFO_VTABLE (known_binfo);
3380 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3381 if (!v)
3382 return NULL_TREE;
3384 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
3386 if (can_refer)
3387 *can_refer = false;
3388 return NULL_TREE;
3390 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
3393 /* Return true iff VAL is a gimple expression that is known to be
3394 non-negative. Restricted to floating-point inputs. */
3396 bool
3397 gimple_val_nonnegative_real_p (tree val)
3399 gimple def_stmt;
3401 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3403 /* Use existing logic for non-gimple trees. */
3404 if (tree_expr_nonnegative_p (val))
3405 return true;
3407 if (TREE_CODE (val) != SSA_NAME)
3408 return false;
3410 /* Currently we look only at the immediately defining statement
3411 to make this determination, since recursion on defining
3412 statements of operands can lead to quadratic behavior in the
3413 worst case. This is expected to catch almost all occurrences
3414 in practice. It would be possible to implement limited-depth
3415 recursion if important cases are lost. Alternatively, passes
3416 that need this information (such as the pow/powi lowering code
3417 in the cse_sincos pass) could be revised to provide it through
3418 dataflow propagation. */
3420 def_stmt = SSA_NAME_DEF_STMT (val);
3422 if (is_gimple_assign (def_stmt))
3424 tree op0, op1;
3426 /* See fold-const.c:tree_expr_nonnegative_p for additional
3427 cases that could be handled with recursion. */
3429 switch (gimple_assign_rhs_code (def_stmt))
3431 case ABS_EXPR:
3432 /* Always true for floating-point operands. */
3433 return true;
3435 case MULT_EXPR:
3436 /* True if the two operands are identical (since we are
3437 restricted to floating-point inputs). */
3438 op0 = gimple_assign_rhs1 (def_stmt);
3439 op1 = gimple_assign_rhs2 (def_stmt);
3441 if (op0 == op1
3442 || operand_equal_p (op0, op1, 0))
3443 return true;
3445 default:
3446 return false;
3449 else if (is_gimple_call (def_stmt))
3451 tree fndecl = gimple_call_fndecl (def_stmt);
3452 if (fndecl
3453 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3455 tree arg1;
3457 switch (DECL_FUNCTION_CODE (fndecl))
3459 CASE_FLT_FN (BUILT_IN_ACOS):
3460 CASE_FLT_FN (BUILT_IN_ACOSH):
3461 CASE_FLT_FN (BUILT_IN_CABS):
3462 CASE_FLT_FN (BUILT_IN_COSH):
3463 CASE_FLT_FN (BUILT_IN_ERFC):
3464 CASE_FLT_FN (BUILT_IN_EXP):
3465 CASE_FLT_FN (BUILT_IN_EXP10):
3466 CASE_FLT_FN (BUILT_IN_EXP2):
3467 CASE_FLT_FN (BUILT_IN_FABS):
3468 CASE_FLT_FN (BUILT_IN_FDIM):
3469 CASE_FLT_FN (BUILT_IN_HYPOT):
3470 CASE_FLT_FN (BUILT_IN_POW10):
3471 return true;
3473 CASE_FLT_FN (BUILT_IN_SQRT):
3474 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3475 nonnegative inputs. */
3476 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3477 return true;
3479 break;
3481 CASE_FLT_FN (BUILT_IN_POWI):
3482 /* True if the second argument is an even integer. */
3483 arg1 = gimple_call_arg (def_stmt, 1);
3485 if (TREE_CODE (arg1) == INTEGER_CST
3486 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3487 return true;
3489 break;
3491 CASE_FLT_FN (BUILT_IN_POW):
3492 /* True if the second argument is an even integer-valued
3493 real. */
3494 arg1 = gimple_call_arg (def_stmt, 1);
3496 if (TREE_CODE (arg1) == REAL_CST)
3498 REAL_VALUE_TYPE c;
3499 HOST_WIDE_INT n;
3501 c = TREE_REAL_CST (arg1);
3502 n = real_to_integer (&c);
3504 if ((n & 1) == 0)
3506 REAL_VALUE_TYPE cint;
3507 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3508 if (real_identical (&c, &cint))
3509 return true;
3513 break;
3515 default:
3516 return false;
3521 return false;
3524 /* Given a pointer value OP0, return a simplified version of an
3525 indirection through OP0, or NULL_TREE if no simplification is
3526 possible. Note that the resulting type may be different from
3527 the type pointed to in the sense that it is still compatible
3528 from the langhooks point of view. */
3530 tree
3531 gimple_fold_indirect_ref (tree t)
3533 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3534 tree sub = t;
3535 tree subtype;
3537 STRIP_NOPS (sub);
3538 subtype = TREE_TYPE (sub);
3539 if (!POINTER_TYPE_P (subtype))
3540 return NULL_TREE;
3542 if (TREE_CODE (sub) == ADDR_EXPR)
3544 tree op = TREE_OPERAND (sub, 0);
3545 tree optype = TREE_TYPE (op);
3546 /* *&p => p */
3547 if (useless_type_conversion_p (type, optype))
3548 return op;
3550 /* *(foo *)&fooarray => fooarray[0] */
3551 if (TREE_CODE (optype) == ARRAY_TYPE
3552 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3553 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3555 tree type_domain = TYPE_DOMAIN (optype);
3556 tree min_val = size_zero_node;
3557 if (type_domain && TYPE_MIN_VALUE (type_domain))
3558 min_val = TYPE_MIN_VALUE (type_domain);
3559 if (TREE_CODE (min_val) == INTEGER_CST)
3560 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3562 /* *(foo *)&complexfoo => __real__ complexfoo */
3563 else if (TREE_CODE (optype) == COMPLEX_TYPE
3564 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3565 return fold_build1 (REALPART_EXPR, type, op);
3566 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3567 else if (TREE_CODE (optype) == VECTOR_TYPE
3568 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3570 tree part_width = TYPE_SIZE (type);
3571 tree index = bitsize_int (0);
3572 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3576 /* *(p + CST) -> ... */
3577 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3578 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3580 tree addr = TREE_OPERAND (sub, 0);
3581 tree off = TREE_OPERAND (sub, 1);
3582 tree addrtype;
3584 STRIP_NOPS (addr);
3585 addrtype = TREE_TYPE (addr);
3587 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3588 if (TREE_CODE (addr) == ADDR_EXPR
3589 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3590 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3591 && tree_fits_uhwi_p (off))
3593 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
3594 tree part_width = TYPE_SIZE (type);
3595 unsigned HOST_WIDE_INT part_widthi
3596 = tree_to_shwi (part_width) / BITS_PER_UNIT;
3597 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3598 tree index = bitsize_int (indexi);
3599 if (offset / part_widthi
3600 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3601 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3602 part_width, index);
3605 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3606 if (TREE_CODE (addr) == ADDR_EXPR
3607 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3608 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3610 tree size = TYPE_SIZE_UNIT (type);
3611 if (tree_int_cst_equal (size, off))
3612 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3615 /* *(p + CST) -> MEM_REF <p, CST>. */
3616 if (TREE_CODE (addr) != ADDR_EXPR
3617 || DECL_P (TREE_OPERAND (addr, 0)))
3618 return fold_build2 (MEM_REF, type,
3619 addr,
3620 build_int_cst_wide (ptype,
3621 TREE_INT_CST_LOW (off),
3622 TREE_INT_CST_HIGH (off)));
3625 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3626 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3627 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3628 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3630 tree type_domain;
3631 tree min_val = size_zero_node;
3632 tree osub = sub;
3633 sub = gimple_fold_indirect_ref (sub);
3634 if (! sub)
3635 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3636 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3637 if (type_domain && TYPE_MIN_VALUE (type_domain))
3638 min_val = TYPE_MIN_VALUE (type_domain);
3639 if (TREE_CODE (min_val) == INTEGER_CST)
3640 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3643 return NULL_TREE;
3646 /* Return true if CODE is an operation that when operating on signed
3647 integer types involves undefined behavior on overflow and the
3648 operation can be expressed with unsigned arithmetic. */
3650 bool
3651 arith_code_with_undefined_signed_overflow (tree_code code)
3653 switch (code)
3655 case PLUS_EXPR:
3656 case MINUS_EXPR:
3657 case MULT_EXPR:
3658 case NEGATE_EXPR:
3659 case POINTER_PLUS_EXPR:
3660 return true;
3661 default:
3662 return false;
3666 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3667 operation that can be transformed to unsigned arithmetic by converting
3668 its operand, carrying out the operation in the corresponding unsigned
3669 type and converting the result back to the original type.
3671 Returns a sequence of statements that replace STMT and also contain
3672 a modified form of STMT itself. */
3674 gimple_seq
3675 rewrite_to_defined_overflow (gimple stmt)
3677 if (dump_file && (dump_flags & TDF_DETAILS))
3679 fprintf (dump_file, "rewriting stmt with undefined signed "
3680 "overflow ");
3681 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3684 tree lhs = gimple_assign_lhs (stmt);
3685 tree type = unsigned_type_for (TREE_TYPE (lhs));
3686 gimple_seq stmts = NULL;
3687 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
3689 gimple_seq stmts2 = NULL;
3690 gimple_set_op (stmt, i,
3691 force_gimple_operand (fold_convert (type,
3692 gimple_op (stmt, i)),
3693 &stmts2, true, NULL_TREE));
3694 gimple_seq_add_seq (&stmts, stmts2);
3696 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
3697 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3698 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
3699 gimple_seq_add_stmt (&stmts, stmt);
3700 gimple cvt = gimple_build_assign_with_ops
3701 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
3702 gimple_seq_add_stmt (&stmts, cvt);
3704 return stmts;