* config/darwin.c (darwin_assemble_visibility): Treat
[official-gcc.git] / gcc / gimple-fold.c
blob66d076664cc3cadda3d2a7b67af77840c44733f5
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "tree-flow.h"
30 #include "tree-ssa-propagate.h"
31 #include "target.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
37 reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61 symtab_node snode;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
65 external var. */
66 if (!from_decl
67 || TREE_CODE (from_decl) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl)
69 || (flag_ltrans
70 && symtab_get_node (from_decl)->symbol.in_other_partition))
71 return true;
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
74 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
75 return true;
76 /* Weakrefs have somewhat confusing DECL_EXTERNAL flag set; they
77 are always safe. */
78 if (DECL_EXTERNAL (decl)
79 && lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
80 return true;
81 /* We are folding reference from external vtable. The vtable may reffer
82 to a symbol keyed to other compilation unit. The other compilation
83 unit may be in separate DSO and the symbol may be hidden. */
84 if (DECL_VISIBILITY_SPECIFIED (decl)
85 && DECL_EXTERNAL (decl)
86 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
87 return false;
88 /* When function is public, we always can introduce new reference.
89 Exception are the COMDAT functions where introducing a direct
90 reference imply need to include function body in the curren tunit. */
91 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
92 return true;
93 /* We are not at ltrans stage; so don't worry about WHOPR.
94 Also when still gimplifying all referred comdat functions will be
95 produced.
97 As observed in PR20991 for already optimized out comdat virtual functions
98 it may be tempting to not necessarily give up because the copy will be
99 output elsewhere when corresponding vtable is output.
100 This is however not possible - ABI specify that COMDATs are output in
101 units where they are used and when the other unit was compiled with LTO
102 it is possible that vtable was kept public while the function itself
103 was privatized. */
104 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
105 return true;
107 /* OK we are seeing either COMDAT or static variable. In this case we must
108 check that the definition is still around so we can refer it. */
109 if (TREE_CODE (decl) == FUNCTION_DECL)
111 node = cgraph_get_node (decl);
112 /* Check that we still have function body and that we didn't took
113 the decision to eliminate offline copy of the function yet.
114 The second is important when devirtualization happens during final
115 compilation stage when making a new reference no longer makes callee
116 to be compiled. */
117 if (!node || !node->analyzed || node->global.inlined_to)
119 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
120 return false;
123 else if (TREE_CODE (decl) == VAR_DECL)
125 vnode = varpool_get_node (decl);
126 if (!vnode || !vnode->analyzed)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
129 return false;
132 return true;
135 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
136 acceptable form for is_gimple_min_invariant.
137 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
139 tree
140 canonicalize_constructor_val (tree cval, tree from_decl)
142 STRIP_USELESS_TYPE_CONVERSION (cval);
143 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
144 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
146 tree ptr = TREE_OPERAND (cval, 0);
147 if (is_gimple_min_invariant (ptr))
148 cval = build1_loc (EXPR_LOCATION (cval),
149 ADDR_EXPR, TREE_TYPE (ptr),
150 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
151 ptr,
152 fold_convert (ptr_type_node,
153 TREE_OPERAND (cval, 1))));
155 if (TREE_CODE (cval) == ADDR_EXPR)
157 tree base = NULL_TREE;
158 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
160 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
161 if (base)
162 TREE_OPERAND (cval, 0) = base;
164 else
165 base = get_base_address (TREE_OPERAND (cval, 0));
166 if (!base)
167 return NULL_TREE;
169 if ((TREE_CODE (base) == VAR_DECL
170 || TREE_CODE (base) == FUNCTION_DECL)
171 && !can_refer_decl_in_current_unit_p (base, from_decl))
172 return NULL_TREE;
173 if (TREE_CODE (base) == VAR_DECL)
174 TREE_ADDRESSABLE (base) = 1;
175 else if (TREE_CODE (base) == FUNCTION_DECL)
177 /* Make sure we create a cgraph node for functions we'll reference.
178 They can be non-existent if the reference comes from an entry
179 of an external vtable for example. */
180 cgraph_get_create_node (base);
182 /* Fixup types in global initializers. */
183 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
184 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
186 return cval;
189 /* If SYM is a constant variable with known value, return the value.
190 NULL_TREE is returned otherwise. */
192 tree
193 get_symbol_constant_value (tree sym)
195 if (const_value_known_p (sym))
197 tree val = DECL_INITIAL (sym);
198 if (val)
200 val = canonicalize_constructor_val (val, sym);
201 if (val && is_gimple_min_invariant (val))
202 return val;
203 else
204 return NULL_TREE;
206 /* Variables declared 'const' without an initializer
207 have zero as the initializer if they may not be
208 overridden at link or run time. */
209 if (!val
210 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
211 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
212 return build_zero_cst (TREE_TYPE (sym));
215 return NULL_TREE;
220 /* Subroutine of fold_stmt. We perform several simplifications of the
221 memory reference tree EXPR and make sure to re-gimplify them properly
222 after propagation of constant addresses. IS_LHS is true if the
223 reference is supposed to be an lvalue. */
225 static tree
226 maybe_fold_reference (tree expr, bool is_lhs)
228 tree *t = &expr;
229 tree result;
231 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
232 || TREE_CODE (expr) == REALPART_EXPR
233 || TREE_CODE (expr) == IMAGPART_EXPR)
234 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
235 return fold_unary_loc (EXPR_LOCATION (expr),
236 TREE_CODE (expr),
237 TREE_TYPE (expr),
238 TREE_OPERAND (expr, 0));
239 else if (TREE_CODE (expr) == BIT_FIELD_REF
240 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
241 return fold_ternary_loc (EXPR_LOCATION (expr),
242 TREE_CODE (expr),
243 TREE_TYPE (expr),
244 TREE_OPERAND (expr, 0),
245 TREE_OPERAND (expr, 1),
246 TREE_OPERAND (expr, 2));
248 while (handled_component_p (*t))
249 t = &TREE_OPERAND (*t, 0);
251 /* Canonicalize MEM_REFs invariant address operand. Do this first
252 to avoid feeding non-canonical MEM_REFs elsewhere. */
253 if (TREE_CODE (*t) == MEM_REF
254 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
256 bool volatile_p = TREE_THIS_VOLATILE (*t);
257 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
258 TREE_OPERAND (*t, 0),
259 TREE_OPERAND (*t, 1));
260 if (tem)
262 TREE_THIS_VOLATILE (tem) = volatile_p;
263 *t = tem;
264 tem = maybe_fold_reference (expr, is_lhs);
265 if (tem)
266 return tem;
267 return expr;
271 if (!is_lhs
272 && (result = fold_const_aggregate_ref (expr))
273 && is_gimple_min_invariant (result))
274 return result;
276 /* Fold back MEM_REFs to reference trees. */
277 if (TREE_CODE (*t) == MEM_REF
278 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
279 && integer_zerop (TREE_OPERAND (*t, 1))
280 && (TREE_THIS_VOLATILE (*t)
281 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
282 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
283 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
284 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
285 /* We have to look out here to not drop a required conversion
286 from the rhs to the lhs if is_lhs, but we don't have the
287 rhs here to verify that. Thus require strict type
288 compatibility. */
289 && types_compatible_p (TREE_TYPE (*t),
290 TREE_TYPE (TREE_OPERAND
291 (TREE_OPERAND (*t, 0), 0))))
293 tree tem;
294 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
295 tem = maybe_fold_reference (expr, is_lhs);
296 if (tem)
297 return tem;
298 return expr;
300 else if (TREE_CODE (*t) == TARGET_MEM_REF)
302 tree tem = maybe_fold_tmr (*t);
303 if (tem)
305 *t = tem;
306 tem = maybe_fold_reference (expr, is_lhs);
307 if (tem)
308 return tem;
309 return expr;
313 return NULL_TREE;
317 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
318 replacement rhs for the statement or NULL_TREE if no simplification
319 could be made. It is assumed that the operands have been previously
320 folded. */
322 static tree
323 fold_gimple_assign (gimple_stmt_iterator *si)
325 gimple stmt = gsi_stmt (*si);
326 enum tree_code subcode = gimple_assign_rhs_code (stmt);
327 location_t loc = gimple_location (stmt);
329 tree result = NULL_TREE;
331 switch (get_gimple_rhs_class (subcode))
333 case GIMPLE_SINGLE_RHS:
335 tree rhs = gimple_assign_rhs1 (stmt);
337 if (REFERENCE_CLASS_P (rhs))
338 return maybe_fold_reference (rhs, false);
340 else if (TREE_CODE (rhs) == ADDR_EXPR)
342 tree ref = TREE_OPERAND (rhs, 0);
343 tree tem = maybe_fold_reference (ref, true);
344 if (tem
345 && TREE_CODE (tem) == MEM_REF
346 && integer_zerop (TREE_OPERAND (tem, 1)))
347 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
348 else if (tem)
349 result = fold_convert (TREE_TYPE (rhs),
350 build_fold_addr_expr_loc (loc, tem));
351 else if (TREE_CODE (ref) == MEM_REF
352 && integer_zerop (TREE_OPERAND (ref, 1)))
353 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
356 else if (TREE_CODE (rhs) == CONSTRUCTOR
357 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
358 && (CONSTRUCTOR_NELTS (rhs)
359 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
361 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
362 unsigned i;
363 tree val;
365 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
366 if (TREE_CODE (val) != INTEGER_CST
367 && TREE_CODE (val) != REAL_CST
368 && TREE_CODE (val) != FIXED_CST)
369 return NULL_TREE;
371 return build_vector_from_ctor (TREE_TYPE (rhs),
372 CONSTRUCTOR_ELTS (rhs));
375 else if (DECL_P (rhs))
376 return unshare_expr (get_symbol_constant_value (rhs));
378 /* If we couldn't fold the RHS, hand over to the generic
379 fold routines. */
380 if (result == NULL_TREE)
381 result = fold (rhs);
383 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
384 that may have been added by fold, and "useless" type
385 conversions that might now be apparent due to propagation. */
386 STRIP_USELESS_TYPE_CONVERSION (result);
388 if (result != rhs && valid_gimple_rhs_p (result))
389 return result;
391 return NULL_TREE;
393 break;
395 case GIMPLE_UNARY_RHS:
397 tree rhs = gimple_assign_rhs1 (stmt);
399 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
400 if (result)
402 /* If the operation was a conversion do _not_ mark a
403 resulting constant with TREE_OVERFLOW if the original
404 constant was not. These conversions have implementation
405 defined behavior and retaining the TREE_OVERFLOW flag
406 here would confuse later passes such as VRP. */
407 if (CONVERT_EXPR_CODE_P (subcode)
408 && TREE_CODE (result) == INTEGER_CST
409 && TREE_CODE (rhs) == INTEGER_CST)
410 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
412 STRIP_USELESS_TYPE_CONVERSION (result);
413 if (valid_gimple_rhs_p (result))
414 return result;
417 break;
419 case GIMPLE_BINARY_RHS:
420 /* Try to canonicalize for boolean-typed X the comparisons
421 X == 0, X == 1, X != 0, and X != 1. */
422 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
423 || gimple_assign_rhs_code (stmt) == NE_EXPR)
425 tree lhs = gimple_assign_lhs (stmt);
426 tree op1 = gimple_assign_rhs1 (stmt);
427 tree op2 = gimple_assign_rhs2 (stmt);
428 tree type = TREE_TYPE (op1);
430 /* Check whether the comparison operands are of the same boolean
431 type as the result type is.
432 Check that second operand is an integer-constant with value
433 one or zero. */
434 if (TREE_CODE (op2) == INTEGER_CST
435 && (integer_zerop (op2) || integer_onep (op2))
436 && useless_type_conversion_p (TREE_TYPE (lhs), type))
438 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
439 bool is_logical_not = false;
441 /* X == 0 and X != 1 is a logical-not.of X
442 X == 1 and X != 0 is X */
443 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
444 || (cmp_code == NE_EXPR && integer_onep (op2)))
445 is_logical_not = true;
447 if (is_logical_not == false)
448 result = op1;
449 /* Only for one-bit precision typed X the transformation
450 !X -> ~X is valied. */
451 else if (TYPE_PRECISION (type) == 1)
452 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
453 type, op1);
454 /* Otherwise we use !X -> X ^ 1. */
455 else
456 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
457 type, op1, build_int_cst (type, 1));
462 if (!result)
463 result = fold_binary_loc (loc, subcode,
464 TREE_TYPE (gimple_assign_lhs (stmt)),
465 gimple_assign_rhs1 (stmt),
466 gimple_assign_rhs2 (stmt));
468 if (result)
470 STRIP_USELESS_TYPE_CONVERSION (result);
471 if (valid_gimple_rhs_p (result))
472 return result;
474 break;
476 case GIMPLE_TERNARY_RHS:
477 /* Try to fold a conditional expression. */
478 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
480 tree op0 = gimple_assign_rhs1 (stmt);
481 tree tem;
482 bool set = false;
483 location_t cond_loc = gimple_location (stmt);
485 if (COMPARISON_CLASS_P (op0))
487 fold_defer_overflow_warnings ();
488 tem = fold_binary_loc (cond_loc,
489 TREE_CODE (op0), TREE_TYPE (op0),
490 TREE_OPERAND (op0, 0),
491 TREE_OPERAND (op0, 1));
492 /* This is actually a conditional expression, not a GIMPLE
493 conditional statement, however, the valid_gimple_rhs_p
494 test still applies. */
495 set = (tem && is_gimple_condexpr (tem)
496 && valid_gimple_rhs_p (tem));
497 fold_undefer_overflow_warnings (set, stmt, 0);
499 else if (is_gimple_min_invariant (op0))
501 tem = op0;
502 set = true;
504 else
505 return NULL_TREE;
507 if (set)
508 result = fold_build3_loc (cond_loc, COND_EXPR,
509 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
510 gimple_assign_rhs2 (stmt),
511 gimple_assign_rhs3 (stmt));
514 if (!result)
515 result = fold_ternary_loc (loc, subcode,
516 TREE_TYPE (gimple_assign_lhs (stmt)),
517 gimple_assign_rhs1 (stmt),
518 gimple_assign_rhs2 (stmt),
519 gimple_assign_rhs3 (stmt));
521 if (result)
523 STRIP_USELESS_TYPE_CONVERSION (result);
524 if (valid_gimple_rhs_p (result))
525 return result;
527 break;
529 case GIMPLE_INVALID_RHS:
530 gcc_unreachable ();
533 return NULL_TREE;
536 /* Attempt to fold a conditional statement. Return true if any changes were
537 made. We only attempt to fold the condition expression, and do not perform
538 any transformation that would require alteration of the cfg. It is
539 assumed that the operands have been previously folded. */
541 static bool
542 fold_gimple_cond (gimple stmt)
544 tree result = fold_binary_loc (gimple_location (stmt),
545 gimple_cond_code (stmt),
546 boolean_type_node,
547 gimple_cond_lhs (stmt),
548 gimple_cond_rhs (stmt));
550 if (result)
552 STRIP_USELESS_TYPE_CONVERSION (result);
553 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
555 gimple_cond_set_condition_from_tree (stmt, result);
556 return true;
560 return false;
563 /* Convert EXPR into a GIMPLE value suitable for substitution on the
564 RHS of an assignment. Insert the necessary statements before
565 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
566 is replaced. If the call is expected to produces a result, then it
567 is replaced by an assignment of the new RHS to the result variable.
568 If the result is to be ignored, then the call is replaced by a
569 GIMPLE_NOP. A proper VDEF chain is retained by making the first
570 VUSE and the last VDEF of the whole sequence be the same as the replaced
571 statement and using new SSA names for stores in between. */
573 void
574 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
576 tree lhs;
577 gimple stmt, new_stmt;
578 gimple_stmt_iterator i;
579 gimple_seq stmts = NULL;
580 struct gimplify_ctx gctx;
581 gimple laststore;
582 tree reaching_vuse;
584 stmt = gsi_stmt (*si_p);
586 gcc_assert (is_gimple_call (stmt));
588 push_gimplify_context (&gctx);
589 gctx.into_ssa = gimple_in_ssa_p (cfun);
591 lhs = gimple_call_lhs (stmt);
592 if (lhs == NULL_TREE)
594 gimplify_and_add (expr, &stmts);
595 /* We can end up with folding a memcpy of an empty class assignment
596 which gets optimized away by C++ gimplification. */
597 if (gimple_seq_empty_p (stmts))
599 pop_gimplify_context (NULL);
600 if (gimple_in_ssa_p (cfun))
602 unlink_stmt_vdef (stmt);
603 release_defs (stmt);
605 gsi_remove (si_p, true);
606 return;
609 else
611 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
612 new_stmt = gimple_build_assign (lhs, tmp);
613 i = gsi_last (stmts);
614 gsi_insert_after_without_update (&i, new_stmt,
615 GSI_CONTINUE_LINKING);
618 pop_gimplify_context (NULL);
620 if (gimple_has_location (stmt))
621 annotate_all_with_location (stmts, gimple_location (stmt));
623 /* First iterate over the replacement statements backward, assigning
624 virtual operands to their defining statements. */
625 laststore = NULL;
626 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
628 new_stmt = gsi_stmt (i);
629 if ((gimple_assign_single_p (new_stmt)
630 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
631 || (is_gimple_call (new_stmt)
632 && (gimple_call_flags (new_stmt)
633 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
635 tree vdef;
636 if (!laststore)
637 vdef = gimple_vdef (stmt);
638 else
639 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
640 gimple_set_vdef (new_stmt, vdef);
641 if (vdef && TREE_CODE (vdef) == SSA_NAME)
642 SSA_NAME_DEF_STMT (vdef) = new_stmt;
643 laststore = new_stmt;
647 /* Second iterate over the statements forward, assigning virtual
648 operands to their uses. */
649 reaching_vuse = gimple_vuse (stmt);
650 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
652 new_stmt = gsi_stmt (i);
653 /* If the new statement possibly has a VUSE, update it with exact SSA
654 name we know will reach this one. */
655 if (gimple_has_mem_ops (new_stmt))
656 gimple_set_vuse (new_stmt, reaching_vuse);
657 gimple_set_modified (new_stmt, true);
658 if (gimple_vdef (new_stmt))
659 reaching_vuse = gimple_vdef (new_stmt);
662 /* If the new sequence does not do a store release the virtual
663 definition of the original statement. */
664 if (reaching_vuse
665 && reaching_vuse == gimple_vuse (stmt))
667 tree vdef = gimple_vdef (stmt);
668 if (vdef
669 && TREE_CODE (vdef) == SSA_NAME)
671 unlink_stmt_vdef (stmt);
672 release_ssa_name (vdef);
676 /* Finally replace the original statement with the sequence. */
677 gsi_replace_with_seq (si_p, stmts, false);
680 /* Return the string length, maximum string length or maximum value of
681 ARG in LENGTH.
682 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
683 is not NULL and, for TYPE == 0, its value is not equal to the length
684 we determine or if we are unable to determine the length or value,
685 return false. VISITED is a bitmap of visited variables.
686 TYPE is 0 if string length should be returned, 1 for maximum string
687 length and 2 for maximum value ARG can have. */
689 static bool
690 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
692 tree var, val;
693 gimple def_stmt;
695 if (TREE_CODE (arg) != SSA_NAME)
697 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
698 if (TREE_CODE (arg) == ADDR_EXPR
699 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
700 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
702 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
703 if (TREE_CODE (aop0) == INDIRECT_REF
704 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
705 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
706 length, visited, type);
709 if (type == 2)
711 val = arg;
712 if (TREE_CODE (val) != INTEGER_CST
713 || tree_int_cst_sgn (val) < 0)
714 return false;
716 else
717 val = c_strlen (arg, 1);
718 if (!val)
719 return false;
721 if (*length)
723 if (type > 0)
725 if (TREE_CODE (*length) != INTEGER_CST
726 || TREE_CODE (val) != INTEGER_CST)
727 return false;
729 if (tree_int_cst_lt (*length, val))
730 *length = val;
731 return true;
733 else if (simple_cst_equal (val, *length) != 1)
734 return false;
737 *length = val;
738 return true;
741 /* If ARG is registered for SSA update we cannot look at its defining
742 statement. */
743 if (name_registered_for_update_p (arg))
744 return false;
746 /* If we were already here, break the infinite cycle. */
747 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
748 return true;
750 var = arg;
751 def_stmt = SSA_NAME_DEF_STMT (var);
753 switch (gimple_code (def_stmt))
755 case GIMPLE_ASSIGN:
756 /* The RHS of the statement defining VAR must either have a
757 constant length or come from another SSA_NAME with a constant
758 length. */
759 if (gimple_assign_single_p (def_stmt)
760 || gimple_assign_unary_nop_p (def_stmt))
762 tree rhs = gimple_assign_rhs1 (def_stmt);
763 return get_maxval_strlen (rhs, length, visited, type);
765 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
767 tree op2 = gimple_assign_rhs2 (def_stmt);
768 tree op3 = gimple_assign_rhs3 (def_stmt);
769 return get_maxval_strlen (op2, length, visited, type)
770 && get_maxval_strlen (op3, length, visited, type);
772 return false;
774 case GIMPLE_PHI:
776 /* All the arguments of the PHI node must have the same constant
777 length. */
778 unsigned i;
780 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
782 tree arg = gimple_phi_arg (def_stmt, i)->def;
784 /* If this PHI has itself as an argument, we cannot
785 determine the string length of this argument. However,
786 if we can find a constant string length for the other
787 PHI args then we can still be sure that this is a
788 constant string length. So be optimistic and just
789 continue with the next argument. */
790 if (arg == gimple_phi_result (def_stmt))
791 continue;
793 if (!get_maxval_strlen (arg, length, visited, type))
794 return false;
797 return true;
799 default:
800 return false;
805 /* Fold builtin call in statement STMT. Returns a simplified tree.
806 We may return a non-constant expression, including another call
807 to a different function and with different arguments, e.g.,
808 substituting memcpy for strcpy when the string length is known.
809 Note that some builtins expand into inline code that may not
810 be valid in GIMPLE. Callers must take care. */
812 tree
813 gimple_fold_builtin (gimple stmt)
815 tree result, val[3];
816 tree callee, a;
817 int arg_idx, type;
818 bitmap visited;
819 bool ignore;
820 int nargs;
821 location_t loc = gimple_location (stmt);
823 gcc_assert (is_gimple_call (stmt));
825 ignore = (gimple_call_lhs (stmt) == NULL);
827 /* First try the generic builtin folder. If that succeeds, return the
828 result directly. */
829 result = fold_call_stmt (stmt, ignore);
830 if (result)
832 if (ignore)
833 STRIP_NOPS (result);
834 return result;
837 /* Ignore MD builtins. */
838 callee = gimple_call_fndecl (stmt);
839 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
840 return NULL_TREE;
842 /* Give up for always_inline inline builtins until they are
843 inlined. */
844 if (avoid_folding_inline_builtin (callee))
845 return NULL_TREE;
847 /* If the builtin could not be folded, and it has no argument list,
848 we're done. */
849 nargs = gimple_call_num_args (stmt);
850 if (nargs == 0)
851 return NULL_TREE;
853 /* Limit the work only for builtins we know how to simplify. */
854 switch (DECL_FUNCTION_CODE (callee))
856 case BUILT_IN_STRLEN:
857 case BUILT_IN_FPUTS:
858 case BUILT_IN_FPUTS_UNLOCKED:
859 arg_idx = 0;
860 type = 0;
861 break;
862 case BUILT_IN_STRCPY:
863 case BUILT_IN_STRNCPY:
864 arg_idx = 1;
865 type = 0;
866 break;
867 case BUILT_IN_MEMCPY_CHK:
868 case BUILT_IN_MEMPCPY_CHK:
869 case BUILT_IN_MEMMOVE_CHK:
870 case BUILT_IN_MEMSET_CHK:
871 case BUILT_IN_STRNCPY_CHK:
872 case BUILT_IN_STPNCPY_CHK:
873 arg_idx = 2;
874 type = 2;
875 break;
876 case BUILT_IN_STRCPY_CHK:
877 case BUILT_IN_STPCPY_CHK:
878 arg_idx = 1;
879 type = 1;
880 break;
881 case BUILT_IN_SNPRINTF_CHK:
882 case BUILT_IN_VSNPRINTF_CHK:
883 arg_idx = 1;
884 type = 2;
885 break;
886 default:
887 return NULL_TREE;
890 if (arg_idx >= nargs)
891 return NULL_TREE;
893 /* Try to use the dataflow information gathered by the CCP process. */
894 visited = BITMAP_ALLOC (NULL);
895 bitmap_clear (visited);
897 memset (val, 0, sizeof (val));
898 a = gimple_call_arg (stmt, arg_idx);
899 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
900 val[arg_idx] = NULL_TREE;
902 BITMAP_FREE (visited);
904 result = NULL_TREE;
905 switch (DECL_FUNCTION_CODE (callee))
907 case BUILT_IN_STRLEN:
908 if (val[0] && nargs == 1)
910 tree new_val =
911 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
913 /* If the result is not a valid gimple value, or not a cast
914 of a valid gimple value, then we cannot use the result. */
915 if (is_gimple_val (new_val)
916 || (CONVERT_EXPR_P (new_val)
917 && is_gimple_val (TREE_OPERAND (new_val, 0))))
918 return new_val;
920 break;
922 case BUILT_IN_STRCPY:
923 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
924 result = fold_builtin_strcpy (loc, callee,
925 gimple_call_arg (stmt, 0),
926 gimple_call_arg (stmt, 1),
927 val[1]);
928 break;
930 case BUILT_IN_STRNCPY:
931 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
932 result = fold_builtin_strncpy (loc, callee,
933 gimple_call_arg (stmt, 0),
934 gimple_call_arg (stmt, 1),
935 gimple_call_arg (stmt, 2),
936 val[1]);
937 break;
939 case BUILT_IN_FPUTS:
940 if (nargs == 2)
941 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
942 gimple_call_arg (stmt, 1),
943 ignore, false, val[0]);
944 break;
946 case BUILT_IN_FPUTS_UNLOCKED:
947 if (nargs == 2)
948 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
949 gimple_call_arg (stmt, 1),
950 ignore, true, val[0]);
951 break;
953 case BUILT_IN_MEMCPY_CHK:
954 case BUILT_IN_MEMPCPY_CHK:
955 case BUILT_IN_MEMMOVE_CHK:
956 case BUILT_IN_MEMSET_CHK:
957 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
958 result = fold_builtin_memory_chk (loc, callee,
959 gimple_call_arg (stmt, 0),
960 gimple_call_arg (stmt, 1),
961 gimple_call_arg (stmt, 2),
962 gimple_call_arg (stmt, 3),
963 val[2], ignore,
964 DECL_FUNCTION_CODE (callee));
965 break;
967 case BUILT_IN_STRCPY_CHK:
968 case BUILT_IN_STPCPY_CHK:
969 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
970 result = fold_builtin_stxcpy_chk (loc, callee,
971 gimple_call_arg (stmt, 0),
972 gimple_call_arg (stmt, 1),
973 gimple_call_arg (stmt, 2),
974 val[1], ignore,
975 DECL_FUNCTION_CODE (callee));
976 break;
978 case BUILT_IN_STRNCPY_CHK:
979 case BUILT_IN_STPNCPY_CHK:
980 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
981 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
982 gimple_call_arg (stmt, 1),
983 gimple_call_arg (stmt, 2),
984 gimple_call_arg (stmt, 3),
985 val[2], ignore,
986 DECL_FUNCTION_CODE (callee));
987 break;
989 case BUILT_IN_SNPRINTF_CHK:
990 case BUILT_IN_VSNPRINTF_CHK:
991 if (val[1] && is_gimple_val (val[1]))
992 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
993 DECL_FUNCTION_CODE (callee));
994 break;
996 default:
997 gcc_unreachable ();
1000 if (result && ignore)
1001 result = fold_ignored_result (result);
1002 return result;
1006 /* Return a binfo to be used for devirtualization of calls based on an object
1007 represented by a declaration (i.e. a global or automatically allocated one)
1008 or NULL if it cannot be found or is not safe. CST is expected to be an
1009 ADDR_EXPR of such object or the function will return NULL. Currently it is
1010 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1012 tree
1013 gimple_extract_devirt_binfo_from_cst (tree cst)
1015 HOST_WIDE_INT offset, size, max_size;
1016 tree base, type, expected_type, binfo;
1017 bool last_artificial = false;
1019 if (!flag_devirtualize
1020 || TREE_CODE (cst) != ADDR_EXPR
1021 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1022 return NULL_TREE;
1024 cst = TREE_OPERAND (cst, 0);
1025 expected_type = TREE_TYPE (cst);
1026 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1027 type = TREE_TYPE (base);
1028 if (!DECL_P (base)
1029 || max_size == -1
1030 || max_size != size
1031 || TREE_CODE (type) != RECORD_TYPE)
1032 return NULL_TREE;
1034 /* Find the sub-object the constant actually refers to and mark whether it is
1035 an artificial one (as opposed to a user-defined one). */
1036 while (true)
1038 HOST_WIDE_INT pos, size;
1039 tree fld;
1041 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1042 break;
1043 if (offset < 0)
1044 return NULL_TREE;
1046 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1048 if (TREE_CODE (fld) != FIELD_DECL)
1049 continue;
1051 pos = int_bit_position (fld);
1052 size = tree_low_cst (DECL_SIZE (fld), 1);
1053 if (pos <= offset && (pos + size) > offset)
1054 break;
1056 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1057 return NULL_TREE;
1059 last_artificial = DECL_ARTIFICIAL (fld);
1060 type = TREE_TYPE (fld);
1061 offset -= pos;
1063 /* Artificial sub-objects are ancestors, we do not want to use them for
1064 devirtualization, at least not here. */
1065 if (last_artificial)
1066 return NULL_TREE;
1067 binfo = TYPE_BINFO (type);
1068 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1069 return NULL_TREE;
1070 else
1071 return binfo;
1074 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1075 The statement may be replaced by another statement, e.g., if the call
1076 simplifies to a constant value. Return true if any changes were made.
1077 It is assumed that the operands have been previously folded. */
1079 static bool
1080 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1082 gimple stmt = gsi_stmt (*gsi);
1083 tree callee;
1084 bool changed = false;
1085 unsigned i;
1087 /* Fold *& in call arguments. */
1088 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1089 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1091 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1092 if (tmp)
1094 gimple_call_set_arg (stmt, i, tmp);
1095 changed = true;
1099 /* Check for virtual calls that became direct calls. */
1100 callee = gimple_call_fn (stmt);
1101 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1103 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1105 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1106 changed = true;
1108 else
1110 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1111 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1112 if (binfo)
1114 HOST_WIDE_INT token
1115 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1116 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1117 if (fndecl)
1119 gimple_call_set_fndecl (stmt, fndecl);
1120 changed = true;
1126 if (inplace)
1127 return changed;
1129 /* Check for builtins that CCP can handle using information not
1130 available in the generic fold routines. */
1131 callee = gimple_call_fndecl (stmt);
1132 if (callee && DECL_BUILT_IN (callee))
1134 tree result = gimple_fold_builtin (stmt);
1135 if (result)
1137 if (!update_call_from_tree (gsi, result))
1138 gimplify_and_update_call_from_tree (gsi, result);
1139 changed = true;
1143 return changed;
1146 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1147 distinguishes both cases. */
1149 static bool
1150 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1152 bool changed = false;
1153 gimple stmt = gsi_stmt (*gsi);
1154 unsigned i;
1155 gimple_stmt_iterator gsinext = *gsi;
1156 gimple next_stmt;
1158 gsi_next (&gsinext);
1159 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1161 /* Fold the main computation performed by the statement. */
1162 switch (gimple_code (stmt))
1164 case GIMPLE_ASSIGN:
1166 unsigned old_num_ops = gimple_num_ops (stmt);
1167 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1168 tree lhs = gimple_assign_lhs (stmt);
1169 tree new_rhs;
1170 /* First canonicalize operand order. This avoids building new
1171 trees if this is the only thing fold would later do. */
1172 if ((commutative_tree_code (subcode)
1173 || commutative_ternary_tree_code (subcode))
1174 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1175 gimple_assign_rhs2 (stmt), false))
1177 tree tem = gimple_assign_rhs1 (stmt);
1178 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1179 gimple_assign_set_rhs2 (stmt, tem);
1180 changed = true;
1182 new_rhs = fold_gimple_assign (gsi);
1183 if (new_rhs
1184 && !useless_type_conversion_p (TREE_TYPE (lhs),
1185 TREE_TYPE (new_rhs)))
1186 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1187 if (new_rhs
1188 && (!inplace
1189 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1191 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1192 changed = true;
1194 break;
1197 case GIMPLE_COND:
1198 changed |= fold_gimple_cond (stmt);
1199 break;
1201 case GIMPLE_CALL:
1202 changed |= gimple_fold_call (gsi, inplace);
1203 break;
1205 case GIMPLE_ASM:
1206 /* Fold *& in asm operands. */
1208 size_t noutputs;
1209 const char **oconstraints;
1210 const char *constraint;
1211 bool allows_mem, allows_reg;
1213 noutputs = gimple_asm_noutputs (stmt);
1214 oconstraints = XALLOCAVEC (const char *, noutputs);
1216 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1218 tree link = gimple_asm_output_op (stmt, i);
1219 tree op = TREE_VALUE (link);
1220 oconstraints[i]
1221 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1222 if (REFERENCE_CLASS_P (op)
1223 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1225 TREE_VALUE (link) = op;
1226 changed = true;
1229 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1231 tree link = gimple_asm_input_op (stmt, i);
1232 tree op = TREE_VALUE (link);
1233 constraint
1234 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1235 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1236 oconstraints, &allows_mem, &allows_reg);
1237 if (REFERENCE_CLASS_P (op)
1238 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1239 != NULL_TREE)
1241 TREE_VALUE (link) = op;
1242 changed = true;
1246 break;
1248 case GIMPLE_DEBUG:
1249 if (gimple_debug_bind_p (stmt))
1251 tree val = gimple_debug_bind_get_value (stmt);
1252 if (val
1253 && REFERENCE_CLASS_P (val))
1255 tree tem = maybe_fold_reference (val, false);
1256 if (tem)
1258 gimple_debug_bind_set_value (stmt, tem);
1259 changed = true;
1262 else if (val
1263 && TREE_CODE (val) == ADDR_EXPR)
1265 tree ref = TREE_OPERAND (val, 0);
1266 tree tem = maybe_fold_reference (ref, false);
1267 if (tem)
1269 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1270 gimple_debug_bind_set_value (stmt, tem);
1271 changed = true;
1275 break;
1277 default:;
1280 /* If stmt folds into nothing and it was the last stmt in a bb,
1281 don't call gsi_stmt. */
1282 if (gsi_end_p (*gsi))
1284 gcc_assert (next_stmt == NULL);
1285 return changed;
1288 stmt = gsi_stmt (*gsi);
1290 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1291 as we'd changing the next stmt. */
1292 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1294 tree lhs = gimple_get_lhs (stmt);
1295 if (lhs && REFERENCE_CLASS_P (lhs))
1297 tree new_lhs = maybe_fold_reference (lhs, true);
1298 if (new_lhs)
1300 gimple_set_lhs (stmt, new_lhs);
1301 changed = true;
1306 return changed;
1309 /* Fold the statement pointed to by GSI. In some cases, this function may
1310 replace the whole statement with a new one. Returns true iff folding
1311 makes any changes.
1312 The statement pointed to by GSI should be in valid gimple form but may
1313 be in unfolded state as resulting from for example constant propagation
1314 which can produce *&x = 0. */
1316 bool
1317 fold_stmt (gimple_stmt_iterator *gsi)
1319 return fold_stmt_1 (gsi, false);
1322 /* Perform the minimal folding on statement *GSI. Only operations like
1323 *&x created by constant propagation are handled. The statement cannot
1324 be replaced with a new one. Return true if the statement was
1325 changed, false otherwise.
1326 The statement *GSI should be in valid gimple form but may
1327 be in unfolded state as resulting from for example constant propagation
1328 which can produce *&x = 0. */
1330 bool
1331 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1333 gimple stmt = gsi_stmt (*gsi);
1334 bool changed = fold_stmt_1 (gsi, true);
1335 gcc_assert (gsi_stmt (*gsi) == stmt);
1336 return changed;
1339 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1340 if EXPR is null or we don't know how.
1341 If non-null, the result always has boolean type. */
1343 static tree
1344 canonicalize_bool (tree expr, bool invert)
1346 if (!expr)
1347 return NULL_TREE;
1348 else if (invert)
1350 if (integer_nonzerop (expr))
1351 return boolean_false_node;
1352 else if (integer_zerop (expr))
1353 return boolean_true_node;
1354 else if (TREE_CODE (expr) == SSA_NAME)
1355 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1356 build_int_cst (TREE_TYPE (expr), 0));
1357 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1358 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1359 boolean_type_node,
1360 TREE_OPERAND (expr, 0),
1361 TREE_OPERAND (expr, 1));
1362 else
1363 return NULL_TREE;
1365 else
1367 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1368 return expr;
1369 if (integer_nonzerop (expr))
1370 return boolean_true_node;
1371 else if (integer_zerop (expr))
1372 return boolean_false_node;
1373 else if (TREE_CODE (expr) == SSA_NAME)
1374 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1375 build_int_cst (TREE_TYPE (expr), 0));
1376 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1377 return fold_build2 (TREE_CODE (expr),
1378 boolean_type_node,
1379 TREE_OPERAND (expr, 0),
1380 TREE_OPERAND (expr, 1));
1381 else
1382 return NULL_TREE;
1386 /* Check to see if a boolean expression EXPR is logically equivalent to the
1387 comparison (OP1 CODE OP2). Check for various identities involving
1388 SSA_NAMEs. */
1390 static bool
1391 same_bool_comparison_p (const_tree expr, enum tree_code code,
1392 const_tree op1, const_tree op2)
1394 gimple s;
1396 /* The obvious case. */
1397 if (TREE_CODE (expr) == code
1398 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1399 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1400 return true;
1402 /* Check for comparing (name, name != 0) and the case where expr
1403 is an SSA_NAME with a definition matching the comparison. */
1404 if (TREE_CODE (expr) == SSA_NAME
1405 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1407 if (operand_equal_p (expr, op1, 0))
1408 return ((code == NE_EXPR && integer_zerop (op2))
1409 || (code == EQ_EXPR && integer_nonzerop (op2)));
1410 s = SSA_NAME_DEF_STMT (expr);
1411 if (is_gimple_assign (s)
1412 && gimple_assign_rhs_code (s) == code
1413 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1414 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1415 return true;
1418 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1419 of name is a comparison, recurse. */
1420 if (TREE_CODE (op1) == SSA_NAME
1421 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1423 s = SSA_NAME_DEF_STMT (op1);
1424 if (is_gimple_assign (s)
1425 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1427 enum tree_code c = gimple_assign_rhs_code (s);
1428 if ((c == NE_EXPR && integer_zerop (op2))
1429 || (c == EQ_EXPR && integer_nonzerop (op2)))
1430 return same_bool_comparison_p (expr, c,
1431 gimple_assign_rhs1 (s),
1432 gimple_assign_rhs2 (s));
1433 if ((c == EQ_EXPR && integer_zerop (op2))
1434 || (c == NE_EXPR && integer_nonzerop (op2)))
1435 return same_bool_comparison_p (expr,
1436 invert_tree_comparison (c, false),
1437 gimple_assign_rhs1 (s),
1438 gimple_assign_rhs2 (s));
1441 return false;
1444 /* Check to see if two boolean expressions OP1 and OP2 are logically
1445 equivalent. */
1447 static bool
1448 same_bool_result_p (const_tree op1, const_tree op2)
1450 /* Simple cases first. */
1451 if (operand_equal_p (op1, op2, 0))
1452 return true;
1454 /* Check the cases where at least one of the operands is a comparison.
1455 These are a bit smarter than operand_equal_p in that they apply some
1456 identifies on SSA_NAMEs. */
1457 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1458 && same_bool_comparison_p (op1, TREE_CODE (op2),
1459 TREE_OPERAND (op2, 0),
1460 TREE_OPERAND (op2, 1)))
1461 return true;
1462 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1463 && same_bool_comparison_p (op2, TREE_CODE (op1),
1464 TREE_OPERAND (op1, 0),
1465 TREE_OPERAND (op1, 1)))
1466 return true;
1468 /* Default case. */
1469 return false;
1472 /* Forward declarations for some mutually recursive functions. */
1474 static tree
1475 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1476 enum tree_code code2, tree op2a, tree op2b);
1477 static tree
1478 and_var_with_comparison (tree var, bool invert,
1479 enum tree_code code2, tree op2a, tree op2b);
1480 static tree
1481 and_var_with_comparison_1 (gimple stmt,
1482 enum tree_code code2, tree op2a, tree op2b);
1483 static tree
1484 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1485 enum tree_code code2, tree op2a, tree op2b);
1486 static tree
1487 or_var_with_comparison (tree var, bool invert,
1488 enum tree_code code2, tree op2a, tree op2b);
1489 static tree
1490 or_var_with_comparison_1 (gimple stmt,
1491 enum tree_code code2, tree op2a, tree op2b);
1493 /* Helper function for and_comparisons_1: try to simplify the AND of the
1494 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1495 If INVERT is true, invert the value of the VAR before doing the AND.
1496 Return NULL_EXPR if we can't simplify this to a single expression. */
1498 static tree
1499 and_var_with_comparison (tree var, bool invert,
1500 enum tree_code code2, tree op2a, tree op2b)
1502 tree t;
1503 gimple stmt = SSA_NAME_DEF_STMT (var);
1505 /* We can only deal with variables whose definitions are assignments. */
1506 if (!is_gimple_assign (stmt))
1507 return NULL_TREE;
1509 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1510 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1511 Then we only have to consider the simpler non-inverted cases. */
1512 if (invert)
1513 t = or_var_with_comparison_1 (stmt,
1514 invert_tree_comparison (code2, false),
1515 op2a, op2b);
1516 else
1517 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1518 return canonicalize_bool (t, invert);
1521 /* Try to simplify the AND of the ssa variable defined by the assignment
1522 STMT with the comparison specified by (OP2A CODE2 OP2B).
1523 Return NULL_EXPR if we can't simplify this to a single expression. */
1525 static tree
1526 and_var_with_comparison_1 (gimple stmt,
1527 enum tree_code code2, tree op2a, tree op2b)
1529 tree var = gimple_assign_lhs (stmt);
1530 tree true_test_var = NULL_TREE;
1531 tree false_test_var = NULL_TREE;
1532 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1534 /* Check for identities like (var AND (var == 0)) => false. */
1535 if (TREE_CODE (op2a) == SSA_NAME
1536 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1538 if ((code2 == NE_EXPR && integer_zerop (op2b))
1539 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1541 true_test_var = op2a;
1542 if (var == true_test_var)
1543 return var;
1545 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1546 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1548 false_test_var = op2a;
1549 if (var == false_test_var)
1550 return boolean_false_node;
1554 /* If the definition is a comparison, recurse on it. */
1555 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1557 tree t = and_comparisons_1 (innercode,
1558 gimple_assign_rhs1 (stmt),
1559 gimple_assign_rhs2 (stmt),
1560 code2,
1561 op2a,
1562 op2b);
1563 if (t)
1564 return t;
1567 /* If the definition is an AND or OR expression, we may be able to
1568 simplify by reassociating. */
1569 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1570 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1572 tree inner1 = gimple_assign_rhs1 (stmt);
1573 tree inner2 = gimple_assign_rhs2 (stmt);
1574 gimple s;
1575 tree t;
1576 tree partial = NULL_TREE;
1577 bool is_and = (innercode == BIT_AND_EXPR);
1579 /* Check for boolean identities that don't require recursive examination
1580 of inner1/inner2:
1581 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1582 inner1 AND (inner1 OR inner2) => inner1
1583 !inner1 AND (inner1 AND inner2) => false
1584 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1585 Likewise for similar cases involving inner2. */
1586 if (inner1 == true_test_var)
1587 return (is_and ? var : inner1);
1588 else if (inner2 == true_test_var)
1589 return (is_and ? var : inner2);
1590 else if (inner1 == false_test_var)
1591 return (is_and
1592 ? boolean_false_node
1593 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1594 else if (inner2 == false_test_var)
1595 return (is_and
1596 ? boolean_false_node
1597 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1599 /* Next, redistribute/reassociate the AND across the inner tests.
1600 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1601 if (TREE_CODE (inner1) == SSA_NAME
1602 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1603 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1604 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1605 gimple_assign_rhs1 (s),
1606 gimple_assign_rhs2 (s),
1607 code2, op2a, op2b)))
1609 /* Handle the AND case, where we are reassociating:
1610 (inner1 AND inner2) AND (op2a code2 op2b)
1611 => (t AND inner2)
1612 If the partial result t is a constant, we win. Otherwise
1613 continue on to try reassociating with the other inner test. */
1614 if (is_and)
1616 if (integer_onep (t))
1617 return inner2;
1618 else if (integer_zerop (t))
1619 return boolean_false_node;
1622 /* Handle the OR case, where we are redistributing:
1623 (inner1 OR inner2) AND (op2a code2 op2b)
1624 => (t OR (inner2 AND (op2a code2 op2b))) */
1625 else if (integer_onep (t))
1626 return boolean_true_node;
1628 /* Save partial result for later. */
1629 partial = t;
1632 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1633 if (TREE_CODE (inner2) == SSA_NAME
1634 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1635 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1636 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1637 gimple_assign_rhs1 (s),
1638 gimple_assign_rhs2 (s),
1639 code2, op2a, op2b)))
1641 /* Handle the AND case, where we are reassociating:
1642 (inner1 AND inner2) AND (op2a code2 op2b)
1643 => (inner1 AND t) */
1644 if (is_and)
1646 if (integer_onep (t))
1647 return inner1;
1648 else if (integer_zerop (t))
1649 return boolean_false_node;
1650 /* If both are the same, we can apply the identity
1651 (x AND x) == x. */
1652 else if (partial && same_bool_result_p (t, partial))
1653 return t;
1656 /* Handle the OR case. where we are redistributing:
1657 (inner1 OR inner2) AND (op2a code2 op2b)
1658 => (t OR (inner1 AND (op2a code2 op2b)))
1659 => (t OR partial) */
1660 else
1662 if (integer_onep (t))
1663 return boolean_true_node;
1664 else if (partial)
1666 /* We already got a simplification for the other
1667 operand to the redistributed OR expression. The
1668 interesting case is when at least one is false.
1669 Or, if both are the same, we can apply the identity
1670 (x OR x) == x. */
1671 if (integer_zerop (partial))
1672 return t;
1673 else if (integer_zerop (t))
1674 return partial;
1675 else if (same_bool_result_p (t, partial))
1676 return t;
1681 return NULL_TREE;
1684 /* Try to simplify the AND of two comparisons defined by
1685 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1686 If this can be done without constructing an intermediate value,
1687 return the resulting tree; otherwise NULL_TREE is returned.
1688 This function is deliberately asymmetric as it recurses on SSA_DEFs
1689 in the first comparison but not the second. */
1691 static tree
1692 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1693 enum tree_code code2, tree op2a, tree op2b)
1695 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1697 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1698 if (operand_equal_p (op1a, op2a, 0)
1699 && operand_equal_p (op1b, op2b, 0))
1701 /* Result will be either NULL_TREE, or a combined comparison. */
1702 tree t = combine_comparisons (UNKNOWN_LOCATION,
1703 TRUTH_ANDIF_EXPR, code1, code2,
1704 truth_type, op1a, op1b);
1705 if (t)
1706 return t;
1709 /* Likewise the swapped case of the above. */
1710 if (operand_equal_p (op1a, op2b, 0)
1711 && operand_equal_p (op1b, op2a, 0))
1713 /* Result will be either NULL_TREE, or a combined comparison. */
1714 tree t = combine_comparisons (UNKNOWN_LOCATION,
1715 TRUTH_ANDIF_EXPR, code1,
1716 swap_tree_comparison (code2),
1717 truth_type, op1a, op1b);
1718 if (t)
1719 return t;
1722 /* If both comparisons are of the same value against constants, we might
1723 be able to merge them. */
1724 if (operand_equal_p (op1a, op2a, 0)
1725 && TREE_CODE (op1b) == INTEGER_CST
1726 && TREE_CODE (op2b) == INTEGER_CST)
1728 int cmp = tree_int_cst_compare (op1b, op2b);
1730 /* If we have (op1a == op1b), we should either be able to
1731 return that or FALSE, depending on whether the constant op1b
1732 also satisfies the other comparison against op2b. */
1733 if (code1 == EQ_EXPR)
1735 bool done = true;
1736 bool val;
1737 switch (code2)
1739 case EQ_EXPR: val = (cmp == 0); break;
1740 case NE_EXPR: val = (cmp != 0); break;
1741 case LT_EXPR: val = (cmp < 0); break;
1742 case GT_EXPR: val = (cmp > 0); break;
1743 case LE_EXPR: val = (cmp <= 0); break;
1744 case GE_EXPR: val = (cmp >= 0); break;
1745 default: done = false;
1747 if (done)
1749 if (val)
1750 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1751 else
1752 return boolean_false_node;
1755 /* Likewise if the second comparison is an == comparison. */
1756 else if (code2 == EQ_EXPR)
1758 bool done = true;
1759 bool val;
1760 switch (code1)
1762 case EQ_EXPR: val = (cmp == 0); break;
1763 case NE_EXPR: val = (cmp != 0); break;
1764 case LT_EXPR: val = (cmp > 0); break;
1765 case GT_EXPR: val = (cmp < 0); break;
1766 case LE_EXPR: val = (cmp >= 0); break;
1767 case GE_EXPR: val = (cmp <= 0); break;
1768 default: done = false;
1770 if (done)
1772 if (val)
1773 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1774 else
1775 return boolean_false_node;
1779 /* Same business with inequality tests. */
1780 else if (code1 == NE_EXPR)
1782 bool val;
1783 switch (code2)
1785 case EQ_EXPR: val = (cmp != 0); break;
1786 case NE_EXPR: val = (cmp == 0); break;
1787 case LT_EXPR: val = (cmp >= 0); break;
1788 case GT_EXPR: val = (cmp <= 0); break;
1789 case LE_EXPR: val = (cmp > 0); break;
1790 case GE_EXPR: val = (cmp < 0); break;
1791 default:
1792 val = false;
1794 if (val)
1795 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1797 else if (code2 == NE_EXPR)
1799 bool val;
1800 switch (code1)
1802 case EQ_EXPR: val = (cmp == 0); break;
1803 case NE_EXPR: val = (cmp != 0); break;
1804 case LT_EXPR: val = (cmp <= 0); break;
1805 case GT_EXPR: val = (cmp >= 0); break;
1806 case LE_EXPR: val = (cmp < 0); break;
1807 case GE_EXPR: val = (cmp > 0); break;
1808 default:
1809 val = false;
1811 if (val)
1812 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1815 /* Chose the more restrictive of two < or <= comparisons. */
1816 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1817 && (code2 == LT_EXPR || code2 == LE_EXPR))
1819 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1820 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1821 else
1822 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1825 /* Likewise chose the more restrictive of two > or >= comparisons. */
1826 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1827 && (code2 == GT_EXPR || code2 == GE_EXPR))
1829 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1830 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1831 else
1832 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1835 /* Check for singleton ranges. */
1836 else if (cmp == 0
1837 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1838 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1839 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1841 /* Check for disjoint ranges. */
1842 else if (cmp <= 0
1843 && (code1 == LT_EXPR || code1 == LE_EXPR)
1844 && (code2 == GT_EXPR || code2 == GE_EXPR))
1845 return boolean_false_node;
1846 else if (cmp >= 0
1847 && (code1 == GT_EXPR || code1 == GE_EXPR)
1848 && (code2 == LT_EXPR || code2 == LE_EXPR))
1849 return boolean_false_node;
1852 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1853 NAME's definition is a truth value. See if there are any simplifications
1854 that can be done against the NAME's definition. */
1855 if (TREE_CODE (op1a) == SSA_NAME
1856 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1857 && (integer_zerop (op1b) || integer_onep (op1b)))
1859 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1860 || (code1 == NE_EXPR && integer_onep (op1b)));
1861 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1862 switch (gimple_code (stmt))
1864 case GIMPLE_ASSIGN:
1865 /* Try to simplify by copy-propagating the definition. */
1866 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1868 case GIMPLE_PHI:
1869 /* If every argument to the PHI produces the same result when
1870 ANDed with the second comparison, we win.
1871 Do not do this unless the type is bool since we need a bool
1872 result here anyway. */
1873 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1875 tree result = NULL_TREE;
1876 unsigned i;
1877 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1879 tree arg = gimple_phi_arg_def (stmt, i);
1881 /* If this PHI has itself as an argument, ignore it.
1882 If all the other args produce the same result,
1883 we're still OK. */
1884 if (arg == gimple_phi_result (stmt))
1885 continue;
1886 else if (TREE_CODE (arg) == INTEGER_CST)
1888 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1890 if (!result)
1891 result = boolean_false_node;
1892 else if (!integer_zerop (result))
1893 return NULL_TREE;
1895 else if (!result)
1896 result = fold_build2 (code2, boolean_type_node,
1897 op2a, op2b);
1898 else if (!same_bool_comparison_p (result,
1899 code2, op2a, op2b))
1900 return NULL_TREE;
1902 else if (TREE_CODE (arg) == SSA_NAME
1903 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1905 tree temp;
1906 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1907 /* In simple cases we can look through PHI nodes,
1908 but we have to be careful with loops.
1909 See PR49073. */
1910 if (! dom_info_available_p (CDI_DOMINATORS)
1911 || gimple_bb (def_stmt) == gimple_bb (stmt)
1912 || dominated_by_p (CDI_DOMINATORS,
1913 gimple_bb (def_stmt),
1914 gimple_bb (stmt)))
1915 return NULL_TREE;
1916 temp = and_var_with_comparison (arg, invert, code2,
1917 op2a, op2b);
1918 if (!temp)
1919 return NULL_TREE;
1920 else if (!result)
1921 result = temp;
1922 else if (!same_bool_result_p (result, temp))
1923 return NULL_TREE;
1925 else
1926 return NULL_TREE;
1928 return result;
1931 default:
1932 break;
1935 return NULL_TREE;
1938 /* Try to simplify the AND of two comparisons, specified by
1939 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1940 If this can be simplified to a single expression (without requiring
1941 introducing more SSA variables to hold intermediate values),
1942 return the resulting tree. Otherwise return NULL_TREE.
1943 If the result expression is non-null, it has boolean type. */
1945 tree
1946 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1947 enum tree_code code2, tree op2a, tree op2b)
1949 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1950 if (t)
1951 return t;
1952 else
1953 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1956 /* Helper function for or_comparisons_1: try to simplify the OR of the
1957 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1958 If INVERT is true, invert the value of VAR before doing the OR.
1959 Return NULL_EXPR if we can't simplify this to a single expression. */
1961 static tree
1962 or_var_with_comparison (tree var, bool invert,
1963 enum tree_code code2, tree op2a, tree op2b)
1965 tree t;
1966 gimple stmt = SSA_NAME_DEF_STMT (var);
1968 /* We can only deal with variables whose definitions are assignments. */
1969 if (!is_gimple_assign (stmt))
1970 return NULL_TREE;
1972 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1973 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1974 Then we only have to consider the simpler non-inverted cases. */
1975 if (invert)
1976 t = and_var_with_comparison_1 (stmt,
1977 invert_tree_comparison (code2, false),
1978 op2a, op2b);
1979 else
1980 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1981 return canonicalize_bool (t, invert);
1984 /* Try to simplify the OR of the ssa variable defined by the assignment
1985 STMT with the comparison specified by (OP2A CODE2 OP2B).
1986 Return NULL_EXPR if we can't simplify this to a single expression. */
1988 static tree
1989 or_var_with_comparison_1 (gimple stmt,
1990 enum tree_code code2, tree op2a, tree op2b)
1992 tree var = gimple_assign_lhs (stmt);
1993 tree true_test_var = NULL_TREE;
1994 tree false_test_var = NULL_TREE;
1995 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1997 /* Check for identities like (var OR (var != 0)) => true . */
1998 if (TREE_CODE (op2a) == SSA_NAME
1999 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2001 if ((code2 == NE_EXPR && integer_zerop (op2b))
2002 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2004 true_test_var = op2a;
2005 if (var == true_test_var)
2006 return var;
2008 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2009 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2011 false_test_var = op2a;
2012 if (var == false_test_var)
2013 return boolean_true_node;
2017 /* If the definition is a comparison, recurse on it. */
2018 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2020 tree t = or_comparisons_1 (innercode,
2021 gimple_assign_rhs1 (stmt),
2022 gimple_assign_rhs2 (stmt),
2023 code2,
2024 op2a,
2025 op2b);
2026 if (t)
2027 return t;
2030 /* If the definition is an AND or OR expression, we may be able to
2031 simplify by reassociating. */
2032 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2033 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2035 tree inner1 = gimple_assign_rhs1 (stmt);
2036 tree inner2 = gimple_assign_rhs2 (stmt);
2037 gimple s;
2038 tree t;
2039 tree partial = NULL_TREE;
2040 bool is_or = (innercode == BIT_IOR_EXPR);
2042 /* Check for boolean identities that don't require recursive examination
2043 of inner1/inner2:
2044 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2045 inner1 OR (inner1 AND inner2) => inner1
2046 !inner1 OR (inner1 OR inner2) => true
2047 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2049 if (inner1 == true_test_var)
2050 return (is_or ? var : inner1);
2051 else if (inner2 == true_test_var)
2052 return (is_or ? var : inner2);
2053 else if (inner1 == false_test_var)
2054 return (is_or
2055 ? boolean_true_node
2056 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2057 else if (inner2 == false_test_var)
2058 return (is_or
2059 ? boolean_true_node
2060 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2062 /* Next, redistribute/reassociate the OR across the inner tests.
2063 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2064 if (TREE_CODE (inner1) == SSA_NAME
2065 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2066 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2067 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2068 gimple_assign_rhs1 (s),
2069 gimple_assign_rhs2 (s),
2070 code2, op2a, op2b)))
2072 /* Handle the OR case, where we are reassociating:
2073 (inner1 OR inner2) OR (op2a code2 op2b)
2074 => (t OR inner2)
2075 If the partial result t is a constant, we win. Otherwise
2076 continue on to try reassociating with the other inner test. */
2077 if (is_or)
2079 if (integer_onep (t))
2080 return boolean_true_node;
2081 else if (integer_zerop (t))
2082 return inner2;
2085 /* Handle the AND case, where we are redistributing:
2086 (inner1 AND inner2) OR (op2a code2 op2b)
2087 => (t AND (inner2 OR (op2a code op2b))) */
2088 else if (integer_zerop (t))
2089 return boolean_false_node;
2091 /* Save partial result for later. */
2092 partial = t;
2095 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2096 if (TREE_CODE (inner2) == SSA_NAME
2097 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2098 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2099 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2100 gimple_assign_rhs1 (s),
2101 gimple_assign_rhs2 (s),
2102 code2, op2a, op2b)))
2104 /* Handle the OR case, where we are reassociating:
2105 (inner1 OR inner2) OR (op2a code2 op2b)
2106 => (inner1 OR t)
2107 => (t OR partial) */
2108 if (is_or)
2110 if (integer_zerop (t))
2111 return inner1;
2112 else if (integer_onep (t))
2113 return boolean_true_node;
2114 /* If both are the same, we can apply the identity
2115 (x OR x) == x. */
2116 else if (partial && same_bool_result_p (t, partial))
2117 return t;
2120 /* Handle the AND case, where we are redistributing:
2121 (inner1 AND inner2) OR (op2a code2 op2b)
2122 => (t AND (inner1 OR (op2a code2 op2b)))
2123 => (t AND partial) */
2124 else
2126 if (integer_zerop (t))
2127 return boolean_false_node;
2128 else if (partial)
2130 /* We already got a simplification for the other
2131 operand to the redistributed AND expression. The
2132 interesting case is when at least one is true.
2133 Or, if both are the same, we can apply the identity
2134 (x AND x) == x. */
2135 if (integer_onep (partial))
2136 return t;
2137 else if (integer_onep (t))
2138 return partial;
2139 else if (same_bool_result_p (t, partial))
2140 return t;
2145 return NULL_TREE;
2148 /* Try to simplify the OR of two comparisons defined by
2149 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2150 If this can be done without constructing an intermediate value,
2151 return the resulting tree; otherwise NULL_TREE is returned.
2152 This function is deliberately asymmetric as it recurses on SSA_DEFs
2153 in the first comparison but not the second. */
2155 static tree
2156 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2157 enum tree_code code2, tree op2a, tree op2b)
2159 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2161 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2162 if (operand_equal_p (op1a, op2a, 0)
2163 && operand_equal_p (op1b, op2b, 0))
2165 /* Result will be either NULL_TREE, or a combined comparison. */
2166 tree t = combine_comparisons (UNKNOWN_LOCATION,
2167 TRUTH_ORIF_EXPR, code1, code2,
2168 truth_type, op1a, op1b);
2169 if (t)
2170 return t;
2173 /* Likewise the swapped case of the above. */
2174 if (operand_equal_p (op1a, op2b, 0)
2175 && operand_equal_p (op1b, op2a, 0))
2177 /* Result will be either NULL_TREE, or a combined comparison. */
2178 tree t = combine_comparisons (UNKNOWN_LOCATION,
2179 TRUTH_ORIF_EXPR, code1,
2180 swap_tree_comparison (code2),
2181 truth_type, op1a, op1b);
2182 if (t)
2183 return t;
2186 /* If both comparisons are of the same value against constants, we might
2187 be able to merge them. */
2188 if (operand_equal_p (op1a, op2a, 0)
2189 && TREE_CODE (op1b) == INTEGER_CST
2190 && TREE_CODE (op2b) == INTEGER_CST)
2192 int cmp = tree_int_cst_compare (op1b, op2b);
2194 /* If we have (op1a != op1b), we should either be able to
2195 return that or TRUE, depending on whether the constant op1b
2196 also satisfies the other comparison against op2b. */
2197 if (code1 == NE_EXPR)
2199 bool done = true;
2200 bool val;
2201 switch (code2)
2203 case EQ_EXPR: val = (cmp == 0); break;
2204 case NE_EXPR: val = (cmp != 0); break;
2205 case LT_EXPR: val = (cmp < 0); break;
2206 case GT_EXPR: val = (cmp > 0); break;
2207 case LE_EXPR: val = (cmp <= 0); break;
2208 case GE_EXPR: val = (cmp >= 0); break;
2209 default: done = false;
2211 if (done)
2213 if (val)
2214 return boolean_true_node;
2215 else
2216 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2219 /* Likewise if the second comparison is a != comparison. */
2220 else if (code2 == NE_EXPR)
2222 bool done = true;
2223 bool val;
2224 switch (code1)
2226 case EQ_EXPR: val = (cmp == 0); break;
2227 case NE_EXPR: val = (cmp != 0); break;
2228 case LT_EXPR: val = (cmp > 0); break;
2229 case GT_EXPR: val = (cmp < 0); break;
2230 case LE_EXPR: val = (cmp >= 0); break;
2231 case GE_EXPR: val = (cmp <= 0); break;
2232 default: done = false;
2234 if (done)
2236 if (val)
2237 return boolean_true_node;
2238 else
2239 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2243 /* See if an equality test is redundant with the other comparison. */
2244 else if (code1 == EQ_EXPR)
2246 bool val;
2247 switch (code2)
2249 case EQ_EXPR: val = (cmp == 0); break;
2250 case NE_EXPR: val = (cmp != 0); break;
2251 case LT_EXPR: val = (cmp < 0); break;
2252 case GT_EXPR: val = (cmp > 0); break;
2253 case LE_EXPR: val = (cmp <= 0); break;
2254 case GE_EXPR: val = (cmp >= 0); break;
2255 default:
2256 val = false;
2258 if (val)
2259 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2261 else if (code2 == EQ_EXPR)
2263 bool val;
2264 switch (code1)
2266 case EQ_EXPR: val = (cmp == 0); break;
2267 case NE_EXPR: val = (cmp != 0); break;
2268 case LT_EXPR: val = (cmp > 0); break;
2269 case GT_EXPR: val = (cmp < 0); break;
2270 case LE_EXPR: val = (cmp >= 0); break;
2271 case GE_EXPR: val = (cmp <= 0); break;
2272 default:
2273 val = false;
2275 if (val)
2276 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2279 /* Chose the less restrictive of two < or <= comparisons. */
2280 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2281 && (code2 == LT_EXPR || code2 == LE_EXPR))
2283 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2284 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2285 else
2286 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2289 /* Likewise chose the less restrictive of two > or >= comparisons. */
2290 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2291 && (code2 == GT_EXPR || code2 == GE_EXPR))
2293 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2294 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2295 else
2296 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2299 /* Check for singleton ranges. */
2300 else if (cmp == 0
2301 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2302 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2303 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2305 /* Check for less/greater pairs that don't restrict the range at all. */
2306 else if (cmp >= 0
2307 && (code1 == LT_EXPR || code1 == LE_EXPR)
2308 && (code2 == GT_EXPR || code2 == GE_EXPR))
2309 return boolean_true_node;
2310 else if (cmp <= 0
2311 && (code1 == GT_EXPR || code1 == GE_EXPR)
2312 && (code2 == LT_EXPR || code2 == LE_EXPR))
2313 return boolean_true_node;
2316 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2317 NAME's definition is a truth value. See if there are any simplifications
2318 that can be done against the NAME's definition. */
2319 if (TREE_CODE (op1a) == SSA_NAME
2320 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2321 && (integer_zerop (op1b) || integer_onep (op1b)))
2323 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2324 || (code1 == NE_EXPR && integer_onep (op1b)));
2325 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2326 switch (gimple_code (stmt))
2328 case GIMPLE_ASSIGN:
2329 /* Try to simplify by copy-propagating the definition. */
2330 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2332 case GIMPLE_PHI:
2333 /* If every argument to the PHI produces the same result when
2334 ORed with the second comparison, we win.
2335 Do not do this unless the type is bool since we need a bool
2336 result here anyway. */
2337 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2339 tree result = NULL_TREE;
2340 unsigned i;
2341 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2343 tree arg = gimple_phi_arg_def (stmt, i);
2345 /* If this PHI has itself as an argument, ignore it.
2346 If all the other args produce the same result,
2347 we're still OK. */
2348 if (arg == gimple_phi_result (stmt))
2349 continue;
2350 else if (TREE_CODE (arg) == INTEGER_CST)
2352 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2354 if (!result)
2355 result = boolean_true_node;
2356 else if (!integer_onep (result))
2357 return NULL_TREE;
2359 else if (!result)
2360 result = fold_build2 (code2, boolean_type_node,
2361 op2a, op2b);
2362 else if (!same_bool_comparison_p (result,
2363 code2, op2a, op2b))
2364 return NULL_TREE;
2366 else if (TREE_CODE (arg) == SSA_NAME
2367 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2369 tree temp;
2370 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2371 /* In simple cases we can look through PHI nodes,
2372 but we have to be careful with loops.
2373 See PR49073. */
2374 if (! dom_info_available_p (CDI_DOMINATORS)
2375 || gimple_bb (def_stmt) == gimple_bb (stmt)
2376 || dominated_by_p (CDI_DOMINATORS,
2377 gimple_bb (def_stmt),
2378 gimple_bb (stmt)))
2379 return NULL_TREE;
2380 temp = or_var_with_comparison (arg, invert, code2,
2381 op2a, op2b);
2382 if (!temp)
2383 return NULL_TREE;
2384 else if (!result)
2385 result = temp;
2386 else if (!same_bool_result_p (result, temp))
2387 return NULL_TREE;
2389 else
2390 return NULL_TREE;
2392 return result;
2395 default:
2396 break;
2399 return NULL_TREE;
2402 /* Try to simplify the OR of two comparisons, specified by
2403 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2404 If this can be simplified to a single expression (without requiring
2405 introducing more SSA variables to hold intermediate values),
2406 return the resulting tree. Otherwise return NULL_TREE.
2407 If the result expression is non-null, it has boolean type. */
2409 tree
2410 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2411 enum tree_code code2, tree op2a, tree op2b)
2413 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2414 if (t)
2415 return t;
2416 else
2417 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2421 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2423 Either NULL_TREE, a simplified but non-constant or a constant
2424 is returned.
2426 ??? This should go into a gimple-fold-inline.h file to be eventually
2427 privatized with the single valueize function used in the various TUs
2428 to avoid the indirect function call overhead. */
2430 tree
2431 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2433 location_t loc = gimple_location (stmt);
2434 switch (gimple_code (stmt))
2436 case GIMPLE_ASSIGN:
2438 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2440 switch (get_gimple_rhs_class (subcode))
2442 case GIMPLE_SINGLE_RHS:
2444 tree rhs = gimple_assign_rhs1 (stmt);
2445 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2447 if (TREE_CODE (rhs) == SSA_NAME)
2449 /* If the RHS is an SSA_NAME, return its known constant value,
2450 if any. */
2451 return (*valueize) (rhs);
2453 /* Handle propagating invariant addresses into address
2454 operations. */
2455 else if (TREE_CODE (rhs) == ADDR_EXPR
2456 && !is_gimple_min_invariant (rhs))
2458 HOST_WIDE_INT offset = 0;
2459 tree base;
2460 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2461 &offset,
2462 valueize);
2463 if (base
2464 && (CONSTANT_CLASS_P (base)
2465 || decl_address_invariant_p (base)))
2466 return build_invariant_address (TREE_TYPE (rhs),
2467 base, offset);
2469 else if (TREE_CODE (rhs) == CONSTRUCTOR
2470 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2471 && (CONSTRUCTOR_NELTS (rhs)
2472 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2474 unsigned i;
2475 tree val, *vec;
2477 vec = XALLOCAVEC (tree,
2478 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2479 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2481 val = (*valueize) (val);
2482 if (TREE_CODE (val) == INTEGER_CST
2483 || TREE_CODE (val) == REAL_CST
2484 || TREE_CODE (val) == FIXED_CST)
2485 vec[i] = val;
2486 else
2487 return NULL_TREE;
2490 return build_vector (TREE_TYPE (rhs), vec);
2493 if (kind == tcc_reference)
2495 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2496 || TREE_CODE (rhs) == REALPART_EXPR
2497 || TREE_CODE (rhs) == IMAGPART_EXPR)
2498 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2500 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2501 return fold_unary_loc (EXPR_LOCATION (rhs),
2502 TREE_CODE (rhs),
2503 TREE_TYPE (rhs), val);
2505 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2506 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2508 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2509 return fold_ternary_loc (EXPR_LOCATION (rhs),
2510 TREE_CODE (rhs),
2511 TREE_TYPE (rhs), val,
2512 TREE_OPERAND (rhs, 1),
2513 TREE_OPERAND (rhs, 2));
2515 else if (TREE_CODE (rhs) == MEM_REF
2516 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2518 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2519 if (TREE_CODE (val) == ADDR_EXPR
2520 && is_gimple_min_invariant (val))
2522 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2523 unshare_expr (val),
2524 TREE_OPERAND (rhs, 1));
2525 if (tem)
2526 rhs = tem;
2529 return fold_const_aggregate_ref_1 (rhs, valueize);
2531 else if (kind == tcc_declaration)
2532 return get_symbol_constant_value (rhs);
2533 return rhs;
2536 case GIMPLE_UNARY_RHS:
2538 /* Handle unary operators that can appear in GIMPLE form.
2539 Note that we know the single operand must be a constant,
2540 so this should almost always return a simplified RHS. */
2541 tree lhs = gimple_assign_lhs (stmt);
2542 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2544 /* Conversions are useless for CCP purposes if they are
2545 value-preserving. Thus the restrictions that
2546 useless_type_conversion_p places for restrict qualification
2547 of pointer types should not apply here.
2548 Substitution later will only substitute to allowed places. */
2549 if (CONVERT_EXPR_CODE_P (subcode)
2550 && POINTER_TYPE_P (TREE_TYPE (lhs))
2551 && POINTER_TYPE_P (TREE_TYPE (op0))
2552 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2553 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2554 && TYPE_MODE (TREE_TYPE (lhs))
2555 == TYPE_MODE (TREE_TYPE (op0)))
2556 return op0;
2558 return
2559 fold_unary_ignore_overflow_loc (loc, subcode,
2560 gimple_expr_type (stmt), op0);
2563 case GIMPLE_BINARY_RHS:
2565 /* Handle binary operators that can appear in GIMPLE form. */
2566 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2567 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2569 /* Translate &x + CST into an invariant form suitable for
2570 further propagation. */
2571 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2572 && TREE_CODE (op0) == ADDR_EXPR
2573 && TREE_CODE (op1) == INTEGER_CST)
2575 tree off = fold_convert (ptr_type_node, op1);
2576 return build_fold_addr_expr_loc
2577 (loc,
2578 fold_build2 (MEM_REF,
2579 TREE_TYPE (TREE_TYPE (op0)),
2580 unshare_expr (op0), off));
2583 return fold_binary_loc (loc, subcode,
2584 gimple_expr_type (stmt), op0, op1);
2587 case GIMPLE_TERNARY_RHS:
2589 /* Handle ternary operators that can appear in GIMPLE form. */
2590 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2591 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2592 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2594 /* Fold embedded expressions in ternary codes. */
2595 if ((subcode == COND_EXPR
2596 || subcode == VEC_COND_EXPR)
2597 && COMPARISON_CLASS_P (op0))
2599 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2600 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2601 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2602 TREE_TYPE (op0), op00, op01);
2603 if (tem)
2604 op0 = tem;
2607 return fold_ternary_loc (loc, subcode,
2608 gimple_expr_type (stmt), op0, op1, op2);
2611 default:
2612 gcc_unreachable ();
2616 case GIMPLE_CALL:
2618 tree fn;
2620 if (gimple_call_internal_p (stmt))
2621 /* No folding yet for these functions. */
2622 return NULL_TREE;
2624 fn = (*valueize) (gimple_call_fn (stmt));
2625 if (TREE_CODE (fn) == ADDR_EXPR
2626 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2627 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2629 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2630 tree call, retval;
2631 unsigned i;
2632 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2633 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2634 call = build_call_array_loc (loc,
2635 gimple_call_return_type (stmt),
2636 fn, gimple_call_num_args (stmt), args);
2637 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2638 if (retval)
2639 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2640 STRIP_NOPS (retval);
2641 return retval;
2643 return NULL_TREE;
2646 default:
2647 return NULL_TREE;
2651 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2652 Returns NULL_TREE if folding to a constant is not possible, otherwise
2653 returns a constant according to is_gimple_min_invariant. */
2655 tree
2656 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2658 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2659 if (res && is_gimple_min_invariant (res))
2660 return res;
2661 return NULL_TREE;
2665 /* The following set of functions are supposed to fold references using
2666 their constant initializers. */
2668 static tree fold_ctor_reference (tree type, tree ctor,
2669 unsigned HOST_WIDE_INT offset,
2670 unsigned HOST_WIDE_INT size, tree);
2672 /* See if we can find constructor defining value of BASE.
2673 When we know the consructor with constant offset (such as
2674 base is array[40] and we do know constructor of array), then
2675 BIT_OFFSET is adjusted accordingly.
2677 As a special case, return error_mark_node when constructor
2678 is not explicitly available, but it is known to be zero
2679 such as 'static const int a;'. */
2680 static tree
2681 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2682 tree (*valueize)(tree))
2684 HOST_WIDE_INT bit_offset2, size, max_size;
2685 if (TREE_CODE (base) == MEM_REF)
2687 if (!integer_zerop (TREE_OPERAND (base, 1)))
2689 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2690 return NULL_TREE;
2691 *bit_offset += (mem_ref_offset (base).low
2692 * BITS_PER_UNIT);
2695 if (valueize
2696 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2697 base = valueize (TREE_OPERAND (base, 0));
2698 if (!base || TREE_CODE (base) != ADDR_EXPR)
2699 return NULL_TREE;
2700 base = TREE_OPERAND (base, 0);
2703 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2704 DECL_INITIAL. If BASE is a nested reference into another
2705 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2706 the inner reference. */
2707 switch (TREE_CODE (base))
2709 case VAR_DECL:
2710 if (!const_value_known_p (base))
2711 return NULL_TREE;
2713 /* Fallthru. */
2714 case CONST_DECL:
2715 if (!DECL_INITIAL (base)
2716 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2717 return error_mark_node;
2718 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2719 as special marker (_not_ zero ...) for its own purposes. */
2720 if (DECL_INITIAL (base) == error_mark_node)
2721 return NULL_TREE;
2722 return DECL_INITIAL (base);
2724 case ARRAY_REF:
2725 case COMPONENT_REF:
2726 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2727 if (max_size == -1 || size != max_size)
2728 return NULL_TREE;
2729 *bit_offset += bit_offset2;
2730 return get_base_constructor (base, bit_offset, valueize);
2732 case STRING_CST:
2733 case CONSTRUCTOR:
2734 return base;
2736 default:
2737 return NULL_TREE;
2741 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2742 to the memory at bit OFFSET.
2744 We do only simple job of folding byte accesses. */
2746 static tree
2747 fold_string_cst_ctor_reference (tree type, tree ctor,
2748 unsigned HOST_WIDE_INT offset,
2749 unsigned HOST_WIDE_INT size)
2751 if (INTEGRAL_TYPE_P (type)
2752 && (TYPE_MODE (type)
2753 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2754 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2755 == MODE_INT)
2756 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2757 && size == BITS_PER_UNIT
2758 && !(offset % BITS_PER_UNIT))
2760 offset /= BITS_PER_UNIT;
2761 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2762 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2763 [offset]));
2764 /* Folding
2765 const char a[20]="hello";
2766 return a[10];
2768 might lead to offset greater than string length. In this case we
2769 know value is either initialized to 0 or out of bounds. Return 0
2770 in both cases. */
2771 return build_zero_cst (type);
2773 return NULL_TREE;
2776 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2777 SIZE to the memory at bit OFFSET. */
2779 static tree
2780 fold_array_ctor_reference (tree type, tree ctor,
2781 unsigned HOST_WIDE_INT offset,
2782 unsigned HOST_WIDE_INT size,
2783 tree from_decl)
2785 unsigned HOST_WIDE_INT cnt;
2786 tree cfield, cval;
2787 double_int low_bound, elt_size;
2788 double_int index, max_index;
2789 double_int access_index;
2790 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2791 HOST_WIDE_INT inner_offset;
2793 /* Compute low bound and elt size. */
2794 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2795 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2796 if (domain_type && TYPE_MIN_VALUE (domain_type))
2798 /* Static constructors for variably sized objects makes no sense. */
2799 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2800 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2801 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2803 else
2804 low_bound = double_int_zero;
2805 /* Static constructors for variably sized objects makes no sense. */
2806 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2807 == INTEGER_CST);
2808 elt_size =
2809 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2812 /* We can handle only constantly sized accesses that are known to not
2813 be larger than size of array element. */
2814 if (!TYPE_SIZE_UNIT (type)
2815 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2816 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2817 return NULL_TREE;
2819 /* Compute the array index we look for. */
2820 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2821 .udiv (elt_size, TRUNC_DIV_EXPR);
2822 access_index += low_bound;
2823 if (index_type)
2824 access_index = access_index.ext (TYPE_PRECISION (index_type),
2825 TYPE_UNSIGNED (index_type));
2827 /* And offset within the access. */
2828 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2830 /* See if the array field is large enough to span whole access. We do not
2831 care to fold accesses spanning multiple array indexes. */
2832 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2833 return NULL_TREE;
2835 index = low_bound - double_int_one;
2836 if (index_type)
2837 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2839 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2841 /* Array constructor might explicitely set index, or specify range
2842 or leave index NULL meaning that it is next index after previous
2843 one. */
2844 if (cfield)
2846 if (TREE_CODE (cfield) == INTEGER_CST)
2847 max_index = index = tree_to_double_int (cfield);
2848 else
2850 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2851 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2852 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2855 else
2857 index += double_int_one;
2858 if (index_type)
2859 index = index.ext (TYPE_PRECISION (index_type),
2860 TYPE_UNSIGNED (index_type));
2861 max_index = index;
2864 /* Do we have match? */
2865 if (access_index.cmp (index, 1) >= 0
2866 && access_index.cmp (max_index, 1) <= 0)
2867 return fold_ctor_reference (type, cval, inner_offset, size,
2868 from_decl);
2870 /* When memory is not explicitely mentioned in constructor,
2871 it is 0 (or out of range). */
2872 return build_zero_cst (type);
2875 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2876 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2878 static tree
2879 fold_nonarray_ctor_reference (tree type, tree ctor,
2880 unsigned HOST_WIDE_INT offset,
2881 unsigned HOST_WIDE_INT size,
2882 tree from_decl)
2884 unsigned HOST_WIDE_INT cnt;
2885 tree cfield, cval;
2887 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2888 cval)
2890 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2891 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2892 tree field_size = DECL_SIZE (cfield);
2893 double_int bitoffset;
2894 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2895 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2896 double_int bitoffset_end, access_end;
2898 /* Variable sized objects in static constructors makes no sense,
2899 but field_size can be NULL for flexible array members. */
2900 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2901 && TREE_CODE (byte_offset) == INTEGER_CST
2902 && (field_size != NULL_TREE
2903 ? TREE_CODE (field_size) == INTEGER_CST
2904 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2906 /* Compute bit offset of the field. */
2907 bitoffset = tree_to_double_int (field_offset)
2908 + byte_offset_cst * bits_per_unit_cst;
2909 /* Compute bit offset where the field ends. */
2910 if (field_size != NULL_TREE)
2911 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2912 else
2913 bitoffset_end = double_int_zero;
2915 access_end = double_int::from_uhwi (offset)
2916 + double_int::from_uhwi (size);
2918 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2919 [BITOFFSET, BITOFFSET_END)? */
2920 if (access_end.cmp (bitoffset, 0) > 0
2921 && (field_size == NULL_TREE
2922 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2924 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2925 /* We do have overlap. Now see if field is large enough to
2926 cover the access. Give up for accesses spanning multiple
2927 fields. */
2928 if (access_end.cmp (bitoffset_end, 0) > 0)
2929 return NULL_TREE;
2930 if (double_int::from_uhwi (offset).slt (bitoffset))
2931 return NULL_TREE;
2932 return fold_ctor_reference (type, cval,
2933 inner_offset.to_uhwi (), size,
2934 from_decl);
2937 /* When memory is not explicitely mentioned in constructor, it is 0. */
2938 return build_zero_cst (type);
2941 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2942 to the memory at bit OFFSET. */
2944 static tree
2945 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2946 unsigned HOST_WIDE_INT size, tree from_decl)
2948 tree ret;
2950 /* We found the field with exact match. */
2951 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2952 && !offset)
2953 return canonicalize_constructor_val (ctor, from_decl);
2955 /* We are at the end of walk, see if we can view convert the
2956 result. */
2957 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2958 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2959 && operand_equal_p (TYPE_SIZE (type),
2960 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2962 ret = canonicalize_constructor_val (ctor, from_decl);
2963 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2964 if (ret)
2965 STRIP_NOPS (ret);
2966 return ret;
2968 if (TREE_CODE (ctor) == STRING_CST)
2969 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2970 if (TREE_CODE (ctor) == CONSTRUCTOR)
2973 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2974 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2975 return fold_array_ctor_reference (type, ctor, offset, size,
2976 from_decl);
2977 else
2978 return fold_nonarray_ctor_reference (type, ctor, offset, size,
2979 from_decl);
2982 return NULL_TREE;
2985 /* Return the tree representing the element referenced by T if T is an
2986 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2987 names using VALUEIZE. Return NULL_TREE otherwise. */
2989 tree
2990 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2992 tree ctor, idx, base;
2993 HOST_WIDE_INT offset, size, max_size;
2994 tree tem;
2996 if (TREE_THIS_VOLATILE (t))
2997 return NULL_TREE;
2999 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3000 return get_symbol_constant_value (t);
3002 tem = fold_read_from_constant_string (t);
3003 if (tem)
3004 return tem;
3006 switch (TREE_CODE (t))
3008 case ARRAY_REF:
3009 case ARRAY_RANGE_REF:
3010 /* Constant indexes are handled well by get_base_constructor.
3011 Only special case variable offsets.
3012 FIXME: This code can't handle nested references with variable indexes
3013 (they will be handled only by iteration of ccp). Perhaps we can bring
3014 get_ref_base_and_extent here and make it use a valueize callback. */
3015 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3016 && valueize
3017 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3018 && TREE_CODE (idx) == INTEGER_CST)
3020 tree low_bound, unit_size;
3021 double_int doffset;
3023 /* If the resulting bit-offset is constant, track it. */
3024 if ((low_bound = array_ref_low_bound (t),
3025 TREE_CODE (low_bound) == INTEGER_CST)
3026 && (unit_size = array_ref_element_size (t),
3027 host_integerp (unit_size, 1))
3028 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3029 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3030 doffset.fits_shwi ()))
3032 offset = doffset.to_shwi ();
3033 offset *= TREE_INT_CST_LOW (unit_size);
3034 offset *= BITS_PER_UNIT;
3036 base = TREE_OPERAND (t, 0);
3037 ctor = get_base_constructor (base, &offset, valueize);
3038 /* Empty constructor. Always fold to 0. */
3039 if (ctor == error_mark_node)
3040 return build_zero_cst (TREE_TYPE (t));
3041 /* Out of bound array access. Value is undefined,
3042 but don't fold. */
3043 if (offset < 0)
3044 return NULL_TREE;
3045 /* We can not determine ctor. */
3046 if (!ctor)
3047 return NULL_TREE;
3048 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3049 TREE_INT_CST_LOW (unit_size)
3050 * BITS_PER_UNIT,
3051 base);
3054 /* Fallthru. */
3056 case COMPONENT_REF:
3057 case BIT_FIELD_REF:
3058 case TARGET_MEM_REF:
3059 case MEM_REF:
3060 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3061 ctor = get_base_constructor (base, &offset, valueize);
3063 /* Empty constructor. Always fold to 0. */
3064 if (ctor == error_mark_node)
3065 return build_zero_cst (TREE_TYPE (t));
3066 /* We do not know precise address. */
3067 if (max_size == -1 || max_size != size)
3068 return NULL_TREE;
3069 /* We can not determine ctor. */
3070 if (!ctor)
3071 return NULL_TREE;
3073 /* Out of bound array access. Value is undefined, but don't fold. */
3074 if (offset < 0)
3075 return NULL_TREE;
3077 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3078 base);
3080 case REALPART_EXPR:
3081 case IMAGPART_EXPR:
3083 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3084 if (c && TREE_CODE (c) == COMPLEX_CST)
3085 return fold_build1_loc (EXPR_LOCATION (t),
3086 TREE_CODE (t), TREE_TYPE (t), c);
3087 break;
3090 default:
3091 break;
3094 return NULL_TREE;
3097 tree
3098 fold_const_aggregate_ref (tree t)
3100 return fold_const_aggregate_ref_1 (t, NULL);
3103 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3104 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3105 KNOWN_BINFO carries the binfo describing the true type of
3106 OBJ_TYPE_REF_OBJECT(REF). */
3108 tree
3109 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3111 unsigned HOST_WIDE_INT offset, size;
3112 tree v, fn, vtable;
3114 vtable = v = BINFO_VTABLE (known_binfo);
3115 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3116 if (!v)
3117 return NULL_TREE;
3119 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3121 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3122 v = TREE_OPERAND (v, 0);
3124 else
3125 offset = 0;
3127 if (TREE_CODE (v) != ADDR_EXPR)
3128 return NULL_TREE;
3129 v = TREE_OPERAND (v, 0);
3131 if (TREE_CODE (v) != VAR_DECL
3132 || !DECL_VIRTUAL_P (v)
3133 || !DECL_INITIAL (v)
3134 || DECL_INITIAL (v) == error_mark_node)
3135 return NULL_TREE;
3136 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3137 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3138 offset += token * size;
3139 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3140 offset, size, vtable);
3141 if (!fn || integer_zerop (fn))
3142 return NULL_TREE;
3143 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3144 || TREE_CODE (fn) == FDESC_EXPR);
3145 fn = TREE_OPERAND (fn, 0);
3146 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3148 /* When cgraph node is missing and function is not public, we cannot
3149 devirtualize. This can happen in WHOPR when the actual method
3150 ends up in other partition, because we found devirtualization
3151 possibility too late. */
3152 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3153 return NULL_TREE;
3155 /* Make sure we create a cgraph node for functions we'll reference.
3156 They can be non-existent if the reference comes from an entry
3157 of an external vtable for example. */
3158 cgraph_get_create_node (fn);
3160 return fn;
3163 /* Return true iff VAL is a gimple expression that is known to be
3164 non-negative. Restricted to floating-point inputs. */
3166 bool
3167 gimple_val_nonnegative_real_p (tree val)
3169 gimple def_stmt;
3171 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3173 /* Use existing logic for non-gimple trees. */
3174 if (tree_expr_nonnegative_p (val))
3175 return true;
3177 if (TREE_CODE (val) != SSA_NAME)
3178 return false;
3180 /* Currently we look only at the immediately defining statement
3181 to make this determination, since recursion on defining
3182 statements of operands can lead to quadratic behavior in the
3183 worst case. This is expected to catch almost all occurrences
3184 in practice. It would be possible to implement limited-depth
3185 recursion if important cases are lost. Alternatively, passes
3186 that need this information (such as the pow/powi lowering code
3187 in the cse_sincos pass) could be revised to provide it through
3188 dataflow propagation. */
3190 def_stmt = SSA_NAME_DEF_STMT (val);
3192 if (is_gimple_assign (def_stmt))
3194 tree op0, op1;
3196 /* See fold-const.c:tree_expr_nonnegative_p for additional
3197 cases that could be handled with recursion. */
3199 switch (gimple_assign_rhs_code (def_stmt))
3201 case ABS_EXPR:
3202 /* Always true for floating-point operands. */
3203 return true;
3205 case MULT_EXPR:
3206 /* True if the two operands are identical (since we are
3207 restricted to floating-point inputs). */
3208 op0 = gimple_assign_rhs1 (def_stmt);
3209 op1 = gimple_assign_rhs2 (def_stmt);
3211 if (op0 == op1
3212 || operand_equal_p (op0, op1, 0))
3213 return true;
3215 default:
3216 return false;
3219 else if (is_gimple_call (def_stmt))
3221 tree fndecl = gimple_call_fndecl (def_stmt);
3222 if (fndecl
3223 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3225 tree arg1;
3227 switch (DECL_FUNCTION_CODE (fndecl))
3229 CASE_FLT_FN (BUILT_IN_ACOS):
3230 CASE_FLT_FN (BUILT_IN_ACOSH):
3231 CASE_FLT_FN (BUILT_IN_CABS):
3232 CASE_FLT_FN (BUILT_IN_COSH):
3233 CASE_FLT_FN (BUILT_IN_ERFC):
3234 CASE_FLT_FN (BUILT_IN_EXP):
3235 CASE_FLT_FN (BUILT_IN_EXP10):
3236 CASE_FLT_FN (BUILT_IN_EXP2):
3237 CASE_FLT_FN (BUILT_IN_FABS):
3238 CASE_FLT_FN (BUILT_IN_FDIM):
3239 CASE_FLT_FN (BUILT_IN_HYPOT):
3240 CASE_FLT_FN (BUILT_IN_POW10):
3241 return true;
3243 CASE_FLT_FN (BUILT_IN_SQRT):
3244 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3245 nonnegative inputs. */
3246 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3247 return true;
3249 break;
3251 CASE_FLT_FN (BUILT_IN_POWI):
3252 /* True if the second argument is an even integer. */
3253 arg1 = gimple_call_arg (def_stmt, 1);
3255 if (TREE_CODE (arg1) == INTEGER_CST
3256 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3257 return true;
3259 break;
3261 CASE_FLT_FN (BUILT_IN_POW):
3262 /* True if the second argument is an even integer-valued
3263 real. */
3264 arg1 = gimple_call_arg (def_stmt, 1);
3266 if (TREE_CODE (arg1) == REAL_CST)
3268 REAL_VALUE_TYPE c;
3269 HOST_WIDE_INT n;
3271 c = TREE_REAL_CST (arg1);
3272 n = real_to_integer (&c);
3274 if ((n & 1) == 0)
3276 REAL_VALUE_TYPE cint;
3277 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3278 if (real_identical (&c, &cint))
3279 return true;
3283 break;
3285 default:
3286 return false;
3291 return false;