2012-03-17 Janne Blomqvist <jb@gcc.gnu.org>
[official-gcc.git] / gcc / gimple-fold.c
bloba1eba65e042f0ee2091bc0b55d0a8fbecc9a3e2a
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
109 return true;
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
115 tree
116 canonicalize_constructor_val (tree cval)
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
127 ptr,
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
131 if (TREE_CODE (cval) == ADDR_EXPR)
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
134 if (!base)
135 return NULL_TREE;
137 if ((TREE_CODE (base) == VAR_DECL
138 || TREE_CODE (base) == FUNCTION_DECL)
139 && !can_refer_decl_in_current_unit_p (base))
140 return NULL_TREE;
141 if (TREE_CODE (base) == VAR_DECL)
143 TREE_ADDRESSABLE (base) = 1;
144 if (cfun && gimple_referenced_vars (cfun))
145 add_referenced_var (base);
147 else if (TREE_CODE (base) == FUNCTION_DECL)
149 /* Make sure we create a cgraph node for functions we'll reference.
150 They can be non-existent if the reference comes from an entry
151 of an external vtable for example. */
152 cgraph_get_create_node (base);
154 /* Fixup types in global initializers. */
155 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
156 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
158 return cval;
161 /* If SYM is a constant variable with known value, return the value.
162 NULL_TREE is returned otherwise. */
164 tree
165 get_symbol_constant_value (tree sym)
167 if (const_value_known_p (sym))
169 tree val = DECL_INITIAL (sym);
170 if (val)
172 val = canonicalize_constructor_val (val);
173 if (val && is_gimple_min_invariant (val))
174 return val;
175 else
176 return NULL_TREE;
178 /* Variables declared 'const' without an initializer
179 have zero as the initializer if they may not be
180 overridden at link or run time. */
181 if (!val
182 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
183 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
184 return build_zero_cst (TREE_TYPE (sym));
187 return NULL_TREE;
192 /* Subroutine of fold_stmt. We perform several simplifications of the
193 memory reference tree EXPR and make sure to re-gimplify them properly
194 after propagation of constant addresses. IS_LHS is true if the
195 reference is supposed to be an lvalue. */
197 static tree
198 maybe_fold_reference (tree expr, bool is_lhs)
200 tree *t = &expr;
201 tree result;
203 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
204 || TREE_CODE (expr) == REALPART_EXPR
205 || TREE_CODE (expr) == IMAGPART_EXPR)
206 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
207 return fold_unary_loc (EXPR_LOCATION (expr),
208 TREE_CODE (expr),
209 TREE_TYPE (expr),
210 TREE_OPERAND (expr, 0));
211 else if (TREE_CODE (expr) == BIT_FIELD_REF
212 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
213 return fold_ternary_loc (EXPR_LOCATION (expr),
214 TREE_CODE (expr),
215 TREE_TYPE (expr),
216 TREE_OPERAND (expr, 0),
217 TREE_OPERAND (expr, 1),
218 TREE_OPERAND (expr, 2));
220 while (handled_component_p (*t))
221 t = &TREE_OPERAND (*t, 0);
223 /* Canonicalize MEM_REFs invariant address operand. Do this first
224 to avoid feeding non-canonical MEM_REFs elsewhere. */
225 if (TREE_CODE (*t) == MEM_REF
226 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
228 bool volatile_p = TREE_THIS_VOLATILE (*t);
229 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
230 TREE_OPERAND (*t, 0),
231 TREE_OPERAND (*t, 1));
232 if (tem)
234 TREE_THIS_VOLATILE (tem) = volatile_p;
235 *t = tem;
236 tem = maybe_fold_reference (expr, is_lhs);
237 if (tem)
238 return tem;
239 return expr;
243 if (!is_lhs
244 && (result = fold_const_aggregate_ref (expr))
245 && is_gimple_min_invariant (result))
246 return result;
248 /* Fold back MEM_REFs to reference trees. */
249 if (TREE_CODE (*t) == MEM_REF
250 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
251 && integer_zerop (TREE_OPERAND (*t, 1))
252 && (TREE_THIS_VOLATILE (*t)
253 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
254 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
255 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
256 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
257 /* We have to look out here to not drop a required conversion
258 from the rhs to the lhs if is_lhs, but we don't have the
259 rhs here to verify that. Thus require strict type
260 compatibility. */
261 && types_compatible_p (TREE_TYPE (*t),
262 TREE_TYPE (TREE_OPERAND
263 (TREE_OPERAND (*t, 0), 0))))
265 tree tem;
266 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
267 tem = maybe_fold_reference (expr, is_lhs);
268 if (tem)
269 return tem;
270 return expr;
272 else if (TREE_CODE (*t) == TARGET_MEM_REF)
274 tree tem = maybe_fold_tmr (*t);
275 if (tem)
277 *t = tem;
278 tem = maybe_fold_reference (expr, is_lhs);
279 if (tem)
280 return tem;
281 return expr;
285 return NULL_TREE;
289 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
290 replacement rhs for the statement or NULL_TREE if no simplification
291 could be made. It is assumed that the operands have been previously
292 folded. */
294 static tree
295 fold_gimple_assign (gimple_stmt_iterator *si)
297 gimple stmt = gsi_stmt (*si);
298 enum tree_code subcode = gimple_assign_rhs_code (stmt);
299 location_t loc = gimple_location (stmt);
301 tree result = NULL_TREE;
303 switch (get_gimple_rhs_class (subcode))
305 case GIMPLE_SINGLE_RHS:
307 tree rhs = gimple_assign_rhs1 (stmt);
309 if (REFERENCE_CLASS_P (rhs))
310 return maybe_fold_reference (rhs, false);
312 else if (TREE_CODE (rhs) == ADDR_EXPR)
314 tree ref = TREE_OPERAND (rhs, 0);
315 tree tem = maybe_fold_reference (ref, true);
316 if (tem
317 && TREE_CODE (tem) == MEM_REF
318 && integer_zerop (TREE_OPERAND (tem, 1)))
319 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
320 else if (tem)
321 result = fold_convert (TREE_TYPE (rhs),
322 build_fold_addr_expr_loc (loc, tem));
323 else if (TREE_CODE (ref) == MEM_REF
324 && integer_zerop (TREE_OPERAND (ref, 1)))
325 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
328 else if (TREE_CODE (rhs) == CONSTRUCTOR
329 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
330 && (CONSTRUCTOR_NELTS (rhs)
331 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
333 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
334 unsigned i;
335 tree val;
337 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
338 if (TREE_CODE (val) != INTEGER_CST
339 && TREE_CODE (val) != REAL_CST
340 && TREE_CODE (val) != FIXED_CST)
341 return NULL_TREE;
343 return build_vector_from_ctor (TREE_TYPE (rhs),
344 CONSTRUCTOR_ELTS (rhs));
347 else if (DECL_P (rhs))
348 return unshare_expr (get_symbol_constant_value (rhs));
350 /* If we couldn't fold the RHS, hand over to the generic
351 fold routines. */
352 if (result == NULL_TREE)
353 result = fold (rhs);
355 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
356 that may have been added by fold, and "useless" type
357 conversions that might now be apparent due to propagation. */
358 STRIP_USELESS_TYPE_CONVERSION (result);
360 if (result != rhs && valid_gimple_rhs_p (result))
361 return result;
363 return NULL_TREE;
365 break;
367 case GIMPLE_UNARY_RHS:
369 tree rhs = gimple_assign_rhs1 (stmt);
371 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
372 if (result)
374 /* If the operation was a conversion do _not_ mark a
375 resulting constant with TREE_OVERFLOW if the original
376 constant was not. These conversions have implementation
377 defined behavior and retaining the TREE_OVERFLOW flag
378 here would confuse later passes such as VRP. */
379 if (CONVERT_EXPR_CODE_P (subcode)
380 && TREE_CODE (result) == INTEGER_CST
381 && TREE_CODE (rhs) == INTEGER_CST)
382 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
384 STRIP_USELESS_TYPE_CONVERSION (result);
385 if (valid_gimple_rhs_p (result))
386 return result;
389 break;
391 case GIMPLE_BINARY_RHS:
392 /* Try to canonicalize for boolean-typed X the comparisons
393 X == 0, X == 1, X != 0, and X != 1. */
394 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
395 || gimple_assign_rhs_code (stmt) == NE_EXPR)
397 tree lhs = gimple_assign_lhs (stmt);
398 tree op1 = gimple_assign_rhs1 (stmt);
399 tree op2 = gimple_assign_rhs2 (stmt);
400 tree type = TREE_TYPE (op1);
402 /* Check whether the comparison operands are of the same boolean
403 type as the result type is.
404 Check that second operand is an integer-constant with value
405 one or zero. */
406 if (TREE_CODE (op2) == INTEGER_CST
407 && (integer_zerop (op2) || integer_onep (op2))
408 && useless_type_conversion_p (TREE_TYPE (lhs), type))
410 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
411 bool is_logical_not = false;
413 /* X == 0 and X != 1 is a logical-not.of X
414 X == 1 and X != 0 is X */
415 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
416 || (cmp_code == NE_EXPR && integer_onep (op2)))
417 is_logical_not = true;
419 if (is_logical_not == false)
420 result = op1;
421 /* Only for one-bit precision typed X the transformation
422 !X -> ~X is valied. */
423 else if (TYPE_PRECISION (type) == 1)
424 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
425 type, op1);
426 /* Otherwise we use !X -> X ^ 1. */
427 else
428 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
429 type, op1, build_int_cst (type, 1));
434 if (!result)
435 result = fold_binary_loc (loc, subcode,
436 TREE_TYPE (gimple_assign_lhs (stmt)),
437 gimple_assign_rhs1 (stmt),
438 gimple_assign_rhs2 (stmt));
440 if (result)
442 STRIP_USELESS_TYPE_CONVERSION (result);
443 if (valid_gimple_rhs_p (result))
444 return result;
446 break;
448 case GIMPLE_TERNARY_RHS:
449 /* Try to fold a conditional expression. */
450 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
452 tree op0 = gimple_assign_rhs1 (stmt);
453 tree tem;
454 bool set = false;
455 location_t cond_loc = gimple_location (stmt);
457 if (COMPARISON_CLASS_P (op0))
459 fold_defer_overflow_warnings ();
460 tem = fold_binary_loc (cond_loc,
461 TREE_CODE (op0), TREE_TYPE (op0),
462 TREE_OPERAND (op0, 0),
463 TREE_OPERAND (op0, 1));
464 /* This is actually a conditional expression, not a GIMPLE
465 conditional statement, however, the valid_gimple_rhs_p
466 test still applies. */
467 set = (tem && is_gimple_condexpr (tem)
468 && valid_gimple_rhs_p (tem));
469 fold_undefer_overflow_warnings (set, stmt, 0);
471 else if (is_gimple_min_invariant (op0))
473 tem = op0;
474 set = true;
476 else
477 return NULL_TREE;
479 if (set)
480 result = fold_build3_loc (cond_loc, COND_EXPR,
481 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
482 gimple_assign_rhs2 (stmt),
483 gimple_assign_rhs3 (stmt));
486 if (!result)
487 result = fold_ternary_loc (loc, subcode,
488 TREE_TYPE (gimple_assign_lhs (stmt)),
489 gimple_assign_rhs1 (stmt),
490 gimple_assign_rhs2 (stmt),
491 gimple_assign_rhs3 (stmt));
493 if (result)
495 STRIP_USELESS_TYPE_CONVERSION (result);
496 if (valid_gimple_rhs_p (result))
497 return result;
499 break;
501 case GIMPLE_INVALID_RHS:
502 gcc_unreachable ();
505 return NULL_TREE;
508 /* Attempt to fold a conditional statement. Return true if any changes were
509 made. We only attempt to fold the condition expression, and do not perform
510 any transformation that would require alteration of the cfg. It is
511 assumed that the operands have been previously folded. */
513 static bool
514 fold_gimple_cond (gimple stmt)
516 tree result = fold_binary_loc (gimple_location (stmt),
517 gimple_cond_code (stmt),
518 boolean_type_node,
519 gimple_cond_lhs (stmt),
520 gimple_cond_rhs (stmt));
522 if (result)
524 STRIP_USELESS_TYPE_CONVERSION (result);
525 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
527 gimple_cond_set_condition_from_tree (stmt, result);
528 return true;
532 return false;
535 /* Convert EXPR into a GIMPLE value suitable for substitution on the
536 RHS of an assignment. Insert the necessary statements before
537 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
538 is replaced. If the call is expected to produces a result, then it
539 is replaced by an assignment of the new RHS to the result variable.
540 If the result is to be ignored, then the call is replaced by a
541 GIMPLE_NOP. A proper VDEF chain is retained by making the first
542 VUSE and the last VDEF of the whole sequence be the same as the replaced
543 statement and using new SSA names for stores in between. */
545 void
546 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
548 tree lhs;
549 gimple stmt, new_stmt;
550 gimple_stmt_iterator i;
551 gimple_seq stmts = gimple_seq_alloc();
552 struct gimplify_ctx gctx;
553 gimple last;
554 gimple laststore;
555 tree reaching_vuse;
557 stmt = gsi_stmt (*si_p);
559 gcc_assert (is_gimple_call (stmt));
561 push_gimplify_context (&gctx);
562 gctx.into_ssa = gimple_in_ssa_p (cfun);
564 lhs = gimple_call_lhs (stmt);
565 if (lhs == NULL_TREE)
567 gimplify_and_add (expr, &stmts);
568 /* We can end up with folding a memcpy of an empty class assignment
569 which gets optimized away by C++ gimplification. */
570 if (gimple_seq_empty_p (stmts))
572 pop_gimplify_context (NULL);
573 if (gimple_in_ssa_p (cfun))
575 unlink_stmt_vdef (stmt);
576 release_defs (stmt);
578 gsi_remove (si_p, true);
579 return;
582 else
584 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
585 new_stmt = gimple_build_assign (lhs, tmp);
586 i = gsi_last (stmts);
587 gsi_insert_after_without_update (&i, new_stmt,
588 GSI_CONTINUE_LINKING);
591 pop_gimplify_context (NULL);
593 if (gimple_has_location (stmt))
594 annotate_all_with_location (stmts, gimple_location (stmt));
596 /* First iterate over the replacement statements backward, assigning
597 virtual operands to their defining statements. */
598 laststore = NULL;
599 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
601 new_stmt = gsi_stmt (i);
602 if ((gimple_assign_single_p (new_stmt)
603 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
604 || (is_gimple_call (new_stmt)
605 && (gimple_call_flags (new_stmt)
606 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
608 tree vdef;
609 if (!laststore)
610 vdef = gimple_vdef (stmt);
611 else
612 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
613 gimple_set_vdef (new_stmt, vdef);
614 if (vdef && TREE_CODE (vdef) == SSA_NAME)
615 SSA_NAME_DEF_STMT (vdef) = new_stmt;
616 laststore = new_stmt;
620 /* Second iterate over the statements forward, assigning virtual
621 operands to their uses. */
622 last = NULL;
623 reaching_vuse = gimple_vuse (stmt);
624 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
626 /* Do not insert the last stmt in this loop but remember it
627 for replacing the original statement. */
628 if (last)
630 gsi_insert_before (si_p, last, GSI_NEW_STMT);
631 gsi_next (si_p);
633 new_stmt = gsi_stmt (i);
634 /* The replacement can expose previously unreferenced variables. */
635 if (gimple_in_ssa_p (cfun))
636 find_new_referenced_vars (new_stmt);
637 /* If the new statement possibly has a VUSE, update it with exact SSA
638 name we know will reach this one. */
639 if (gimple_has_mem_ops (new_stmt))
640 gimple_set_vuse (new_stmt, reaching_vuse);
641 gimple_set_modified (new_stmt, true);
642 if (gimple_vdef (new_stmt))
643 reaching_vuse = gimple_vdef (new_stmt);
644 last = new_stmt;
647 /* If the new sequence does not do a store release the virtual
648 definition of the original statement. */
649 if (reaching_vuse
650 && reaching_vuse == gimple_vuse (stmt))
652 tree vdef = gimple_vdef (stmt);
653 if (vdef
654 && TREE_CODE (vdef) == SSA_NAME)
656 unlink_stmt_vdef (stmt);
657 release_ssa_name (vdef);
661 /* Finally replace rhe original statement with the last. */
662 gsi_replace (si_p, last, false);
665 /* Return the string length, maximum string length or maximum value of
666 ARG in LENGTH.
667 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
668 is not NULL and, for TYPE == 0, its value is not equal to the length
669 we determine or if we are unable to determine the length or value,
670 return false. VISITED is a bitmap of visited variables.
671 TYPE is 0 if string length should be returned, 1 for maximum string
672 length and 2 for maximum value ARG can have. */
674 static bool
675 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
677 tree var, val;
678 gimple def_stmt;
680 if (TREE_CODE (arg) != SSA_NAME)
682 if (TREE_CODE (arg) == COND_EXPR)
683 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
684 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
685 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
686 else if (TREE_CODE (arg) == ADDR_EXPR
687 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
688 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
690 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
691 if (TREE_CODE (aop0) == INDIRECT_REF
692 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
693 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
694 length, visited, type);
697 if (type == 2)
699 val = arg;
700 if (TREE_CODE (val) != INTEGER_CST
701 || tree_int_cst_sgn (val) < 0)
702 return false;
704 else
705 val = c_strlen (arg, 1);
706 if (!val)
707 return false;
709 if (*length)
711 if (type > 0)
713 if (TREE_CODE (*length) != INTEGER_CST
714 || TREE_CODE (val) != INTEGER_CST)
715 return false;
717 if (tree_int_cst_lt (*length, val))
718 *length = val;
719 return true;
721 else if (simple_cst_equal (val, *length) != 1)
722 return false;
725 *length = val;
726 return true;
729 /* If we were already here, break the infinite cycle. */
730 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
731 return true;
733 var = arg;
734 def_stmt = SSA_NAME_DEF_STMT (var);
736 switch (gimple_code (def_stmt))
738 case GIMPLE_ASSIGN:
739 /* The RHS of the statement defining VAR must either have a
740 constant length or come from another SSA_NAME with a constant
741 length. */
742 if (gimple_assign_single_p (def_stmt)
743 || gimple_assign_unary_nop_p (def_stmt))
745 tree rhs = gimple_assign_rhs1 (def_stmt);
746 return get_maxval_strlen (rhs, length, visited, type);
748 return false;
750 case GIMPLE_PHI:
752 /* All the arguments of the PHI node must have the same constant
753 length. */
754 unsigned i;
756 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
758 tree arg = gimple_phi_arg (def_stmt, i)->def;
760 /* If this PHI has itself as an argument, we cannot
761 determine the string length of this argument. However,
762 if we can find a constant string length for the other
763 PHI args then we can still be sure that this is a
764 constant string length. So be optimistic and just
765 continue with the next argument. */
766 if (arg == gimple_phi_result (def_stmt))
767 continue;
769 if (!get_maxval_strlen (arg, length, visited, type))
770 return false;
773 return true;
775 default:
776 return false;
781 /* Fold builtin call in statement STMT. Returns a simplified tree.
782 We may return a non-constant expression, including another call
783 to a different function and with different arguments, e.g.,
784 substituting memcpy for strcpy when the string length is known.
785 Note that some builtins expand into inline code that may not
786 be valid in GIMPLE. Callers must take care. */
788 tree
789 gimple_fold_builtin (gimple stmt)
791 tree result, val[3];
792 tree callee, a;
793 int arg_idx, type;
794 bitmap visited;
795 bool ignore;
796 int nargs;
797 location_t loc = gimple_location (stmt);
799 gcc_assert (is_gimple_call (stmt));
801 ignore = (gimple_call_lhs (stmt) == NULL);
803 /* First try the generic builtin folder. If that succeeds, return the
804 result directly. */
805 result = fold_call_stmt (stmt, ignore);
806 if (result)
808 if (ignore)
809 STRIP_NOPS (result);
810 return result;
813 /* Ignore MD builtins. */
814 callee = gimple_call_fndecl (stmt);
815 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
816 return NULL_TREE;
818 /* Give up for always_inline inline builtins until they are
819 inlined. */
820 if (avoid_folding_inline_builtin (callee))
821 return NULL_TREE;
823 /* If the builtin could not be folded, and it has no argument list,
824 we're done. */
825 nargs = gimple_call_num_args (stmt);
826 if (nargs == 0)
827 return NULL_TREE;
829 /* Limit the work only for builtins we know how to simplify. */
830 switch (DECL_FUNCTION_CODE (callee))
832 case BUILT_IN_STRLEN:
833 case BUILT_IN_FPUTS:
834 case BUILT_IN_FPUTS_UNLOCKED:
835 arg_idx = 0;
836 type = 0;
837 break;
838 case BUILT_IN_STRCPY:
839 case BUILT_IN_STRNCPY:
840 arg_idx = 1;
841 type = 0;
842 break;
843 case BUILT_IN_MEMCPY_CHK:
844 case BUILT_IN_MEMPCPY_CHK:
845 case BUILT_IN_MEMMOVE_CHK:
846 case BUILT_IN_MEMSET_CHK:
847 case BUILT_IN_STRNCPY_CHK:
848 case BUILT_IN_STPNCPY_CHK:
849 arg_idx = 2;
850 type = 2;
851 break;
852 case BUILT_IN_STRCPY_CHK:
853 case BUILT_IN_STPCPY_CHK:
854 arg_idx = 1;
855 type = 1;
856 break;
857 case BUILT_IN_SNPRINTF_CHK:
858 case BUILT_IN_VSNPRINTF_CHK:
859 arg_idx = 1;
860 type = 2;
861 break;
862 default:
863 return NULL_TREE;
866 if (arg_idx >= nargs)
867 return NULL_TREE;
869 /* Try to use the dataflow information gathered by the CCP process. */
870 visited = BITMAP_ALLOC (NULL);
871 bitmap_clear (visited);
873 memset (val, 0, sizeof (val));
874 a = gimple_call_arg (stmt, arg_idx);
875 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
876 val[arg_idx] = NULL_TREE;
878 BITMAP_FREE (visited);
880 result = NULL_TREE;
881 switch (DECL_FUNCTION_CODE (callee))
883 case BUILT_IN_STRLEN:
884 if (val[0] && nargs == 1)
886 tree new_val =
887 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
889 /* If the result is not a valid gimple value, or not a cast
890 of a valid gimple value, then we cannot use the result. */
891 if (is_gimple_val (new_val)
892 || (CONVERT_EXPR_P (new_val)
893 && is_gimple_val (TREE_OPERAND (new_val, 0))))
894 return new_val;
896 break;
898 case BUILT_IN_STRCPY:
899 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
900 result = fold_builtin_strcpy (loc, callee,
901 gimple_call_arg (stmt, 0),
902 gimple_call_arg (stmt, 1),
903 val[1]);
904 break;
906 case BUILT_IN_STRNCPY:
907 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
908 result = fold_builtin_strncpy (loc, callee,
909 gimple_call_arg (stmt, 0),
910 gimple_call_arg (stmt, 1),
911 gimple_call_arg (stmt, 2),
912 val[1]);
913 break;
915 case BUILT_IN_FPUTS:
916 if (nargs == 2)
917 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
918 gimple_call_arg (stmt, 1),
919 ignore, false, val[0]);
920 break;
922 case BUILT_IN_FPUTS_UNLOCKED:
923 if (nargs == 2)
924 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
925 gimple_call_arg (stmt, 1),
926 ignore, true, val[0]);
927 break;
929 case BUILT_IN_MEMCPY_CHK:
930 case BUILT_IN_MEMPCPY_CHK:
931 case BUILT_IN_MEMMOVE_CHK:
932 case BUILT_IN_MEMSET_CHK:
933 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
934 result = fold_builtin_memory_chk (loc, callee,
935 gimple_call_arg (stmt, 0),
936 gimple_call_arg (stmt, 1),
937 gimple_call_arg (stmt, 2),
938 gimple_call_arg (stmt, 3),
939 val[2], ignore,
940 DECL_FUNCTION_CODE (callee));
941 break;
943 case BUILT_IN_STRCPY_CHK:
944 case BUILT_IN_STPCPY_CHK:
945 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
946 result = fold_builtin_stxcpy_chk (loc, callee,
947 gimple_call_arg (stmt, 0),
948 gimple_call_arg (stmt, 1),
949 gimple_call_arg (stmt, 2),
950 val[1], ignore,
951 DECL_FUNCTION_CODE (callee));
952 break;
954 case BUILT_IN_STRNCPY_CHK:
955 case BUILT_IN_STPNCPY_CHK:
956 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
957 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
958 gimple_call_arg (stmt, 1),
959 gimple_call_arg (stmt, 2),
960 gimple_call_arg (stmt, 3),
961 val[2], ignore,
962 DECL_FUNCTION_CODE (callee));
963 break;
965 case BUILT_IN_SNPRINTF_CHK:
966 case BUILT_IN_VSNPRINTF_CHK:
967 if (val[1] && is_gimple_val (val[1]))
968 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
969 DECL_FUNCTION_CODE (callee));
970 break;
972 default:
973 gcc_unreachable ();
976 if (result && ignore)
977 result = fold_ignored_result (result);
978 return result;
981 /* Generate code adjusting the first parameter of a call statement determined
982 by GSI by DELTA. */
984 void
985 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
987 gimple call_stmt = gsi_stmt (*gsi);
988 tree parm, tmp;
989 gimple new_stmt;
991 delta = convert_to_ptrofftype (delta);
992 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
993 parm = gimple_call_arg (call_stmt, 0);
994 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
995 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
996 add_referenced_var (tmp);
998 tmp = make_ssa_name (tmp, NULL);
999 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1000 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1001 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1002 gimple_call_set_arg (call_stmt, 0, tmp);
1005 /* Return a binfo to be used for devirtualization of calls based on an object
1006 represented by a declaration (i.e. a global or automatically allocated one)
1007 or NULL if it cannot be found or is not safe. CST is expected to be an
1008 ADDR_EXPR of such object or the function will return NULL. Currently it is
1009 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1011 tree
1012 gimple_extract_devirt_binfo_from_cst (tree cst)
1014 HOST_WIDE_INT offset, size, max_size;
1015 tree base, type, expected_type, binfo;
1016 bool last_artificial = false;
1018 if (!flag_devirtualize
1019 || TREE_CODE (cst) != ADDR_EXPR
1020 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1021 return NULL_TREE;
1023 cst = TREE_OPERAND (cst, 0);
1024 expected_type = TREE_TYPE (cst);
1025 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1026 type = TREE_TYPE (base);
1027 if (!DECL_P (base)
1028 || max_size == -1
1029 || max_size != size
1030 || TREE_CODE (type) != RECORD_TYPE)
1031 return NULL_TREE;
1033 /* Find the sub-object the constant actually refers to and mark whether it is
1034 an artificial one (as opposed to a user-defined one). */
1035 while (true)
1037 HOST_WIDE_INT pos, size;
1038 tree fld;
1040 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1041 break;
1042 if (offset < 0)
1043 return NULL_TREE;
1045 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1047 if (TREE_CODE (fld) != FIELD_DECL)
1048 continue;
1050 pos = int_bit_position (fld);
1051 size = tree_low_cst (DECL_SIZE (fld), 1);
1052 if (pos <= offset && (pos + size) > offset)
1053 break;
1055 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1056 return NULL_TREE;
1058 last_artificial = DECL_ARTIFICIAL (fld);
1059 type = TREE_TYPE (fld);
1060 offset -= pos;
1062 /* Artifical sub-objects are ancestors, we do not want to use them for
1063 devirtualization, at least not here. */
1064 if (last_artificial)
1065 return NULL_TREE;
1066 binfo = TYPE_BINFO (type);
1067 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1068 return NULL_TREE;
1069 else
1070 return binfo;
1073 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1074 The statement may be replaced by another statement, e.g., if the call
1075 simplifies to a constant value. Return true if any changes were made.
1076 It is assumed that the operands have been previously folded. */
1078 static bool
1079 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1081 gimple stmt = gsi_stmt (*gsi);
1082 tree callee;
1083 bool changed = false;
1084 unsigned i;
1086 /* Fold *& in call arguments. */
1087 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1088 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1090 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1091 if (tmp)
1093 gimple_call_set_arg (stmt, i, tmp);
1094 changed = true;
1098 /* Check for virtual calls that became direct calls. */
1099 callee = gimple_call_fn (stmt);
1100 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1102 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1104 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1105 changed = true;
1107 else
1109 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1110 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1111 if (binfo)
1113 HOST_WIDE_INT token
1114 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1115 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1116 if (fndecl)
1118 gimple_call_set_fndecl (stmt, fndecl);
1119 changed = true;
1125 if (inplace)
1126 return changed;
1128 /* Check for builtins that CCP can handle using information not
1129 available in the generic fold routines. */
1130 callee = gimple_call_fndecl (stmt);
1131 if (callee && DECL_BUILT_IN (callee))
1133 tree result = gimple_fold_builtin (stmt);
1134 if (result)
1136 if (!update_call_from_tree (gsi, result))
1137 gimplify_and_update_call_from_tree (gsi, result);
1138 changed = true;
1142 return changed;
1145 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1146 distinguishes both cases. */
1148 static bool
1149 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1151 bool changed = false;
1152 gimple stmt = gsi_stmt (*gsi);
1153 unsigned i;
1154 gimple_stmt_iterator gsinext = *gsi;
1155 gimple next_stmt;
1157 gsi_next (&gsinext);
1158 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1160 /* Fold the main computation performed by the statement. */
1161 switch (gimple_code (stmt))
1163 case GIMPLE_ASSIGN:
1165 unsigned old_num_ops = gimple_num_ops (stmt);
1166 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1167 tree lhs = gimple_assign_lhs (stmt);
1168 tree new_rhs;
1169 /* First canonicalize operand order. This avoids building new
1170 trees if this is the only thing fold would later do. */
1171 if ((commutative_tree_code (subcode)
1172 || commutative_ternary_tree_code (subcode))
1173 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1174 gimple_assign_rhs2 (stmt), false))
1176 tree tem = gimple_assign_rhs1 (stmt);
1177 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1178 gimple_assign_set_rhs2 (stmt, tem);
1179 changed = true;
1181 new_rhs = fold_gimple_assign (gsi);
1182 if (new_rhs
1183 && !useless_type_conversion_p (TREE_TYPE (lhs),
1184 TREE_TYPE (new_rhs)))
1185 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1186 if (new_rhs
1187 && (!inplace
1188 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1190 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1191 changed = true;
1193 break;
1196 case GIMPLE_COND:
1197 changed |= fold_gimple_cond (stmt);
1198 break;
1200 case GIMPLE_CALL:
1201 changed |= gimple_fold_call (gsi, inplace);
1202 break;
1204 case GIMPLE_ASM:
1205 /* Fold *& in asm operands. */
1207 size_t noutputs;
1208 const char **oconstraints;
1209 const char *constraint;
1210 bool allows_mem, allows_reg;
1212 noutputs = gimple_asm_noutputs (stmt);
1213 oconstraints = XALLOCAVEC (const char *, noutputs);
1215 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1217 tree link = gimple_asm_output_op (stmt, i);
1218 tree op = TREE_VALUE (link);
1219 oconstraints[i]
1220 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1221 if (REFERENCE_CLASS_P (op)
1222 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1224 TREE_VALUE (link) = op;
1225 changed = true;
1228 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1230 tree link = gimple_asm_input_op (stmt, i);
1231 tree op = TREE_VALUE (link);
1232 constraint
1233 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1234 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1235 oconstraints, &allows_mem, &allows_reg);
1236 if (REFERENCE_CLASS_P (op)
1237 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1238 != NULL_TREE)
1240 TREE_VALUE (link) = op;
1241 changed = true;
1245 break;
1247 case GIMPLE_DEBUG:
1248 if (gimple_debug_bind_p (stmt))
1250 tree val = gimple_debug_bind_get_value (stmt);
1251 if (val
1252 && REFERENCE_CLASS_P (val))
1254 tree tem = maybe_fold_reference (val, false);
1255 if (tem)
1257 gimple_debug_bind_set_value (stmt, tem);
1258 changed = true;
1261 else if (val
1262 && TREE_CODE (val) == ADDR_EXPR)
1264 tree ref = TREE_OPERAND (val, 0);
1265 tree tem = maybe_fold_reference (ref, false);
1266 if (tem)
1268 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1269 gimple_debug_bind_set_value (stmt, tem);
1270 changed = true;
1274 break;
1276 default:;
1279 /* If stmt folds into nothing and it was the last stmt in a bb,
1280 don't call gsi_stmt. */
1281 if (gsi_end_p (*gsi))
1283 gcc_assert (next_stmt == NULL);
1284 return changed;
1287 stmt = gsi_stmt (*gsi);
1289 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1290 as we'd changing the next stmt. */
1291 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1293 tree lhs = gimple_get_lhs (stmt);
1294 if (lhs && REFERENCE_CLASS_P (lhs))
1296 tree new_lhs = maybe_fold_reference (lhs, true);
1297 if (new_lhs)
1299 gimple_set_lhs (stmt, new_lhs);
1300 changed = true;
1305 return changed;
1308 /* Fold the statement pointed to by GSI. In some cases, this function may
1309 replace the whole statement with a new one. Returns true iff folding
1310 makes any changes.
1311 The statement pointed to by GSI should be in valid gimple form but may
1312 be in unfolded state as resulting from for example constant propagation
1313 which can produce *&x = 0. */
1315 bool
1316 fold_stmt (gimple_stmt_iterator *gsi)
1318 return fold_stmt_1 (gsi, false);
1321 /* Perform the minimal folding on statement *GSI. Only operations like
1322 *&x created by constant propagation are handled. The statement cannot
1323 be replaced with a new one. Return true if the statement was
1324 changed, false otherwise.
1325 The statement *GSI should be in valid gimple form but may
1326 be in unfolded state as resulting from for example constant propagation
1327 which can produce *&x = 0. */
1329 bool
1330 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1332 gimple stmt = gsi_stmt (*gsi);
1333 bool changed = fold_stmt_1 (gsi, true);
1334 gcc_assert (gsi_stmt (*gsi) == stmt);
1335 return changed;
1338 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1339 if EXPR is null or we don't know how.
1340 If non-null, the result always has boolean type. */
1342 static tree
1343 canonicalize_bool (tree expr, bool invert)
1345 if (!expr)
1346 return NULL_TREE;
1347 else if (invert)
1349 if (integer_nonzerop (expr))
1350 return boolean_false_node;
1351 else if (integer_zerop (expr))
1352 return boolean_true_node;
1353 else if (TREE_CODE (expr) == SSA_NAME)
1354 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1355 build_int_cst (TREE_TYPE (expr), 0));
1356 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1357 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1358 boolean_type_node,
1359 TREE_OPERAND (expr, 0),
1360 TREE_OPERAND (expr, 1));
1361 else
1362 return NULL_TREE;
1364 else
1366 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1367 return expr;
1368 if (integer_nonzerop (expr))
1369 return boolean_true_node;
1370 else if (integer_zerop (expr))
1371 return boolean_false_node;
1372 else if (TREE_CODE (expr) == SSA_NAME)
1373 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1374 build_int_cst (TREE_TYPE (expr), 0));
1375 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1376 return fold_build2 (TREE_CODE (expr),
1377 boolean_type_node,
1378 TREE_OPERAND (expr, 0),
1379 TREE_OPERAND (expr, 1));
1380 else
1381 return NULL_TREE;
1385 /* Check to see if a boolean expression EXPR is logically equivalent to the
1386 comparison (OP1 CODE OP2). Check for various identities involving
1387 SSA_NAMEs. */
1389 static bool
1390 same_bool_comparison_p (const_tree expr, enum tree_code code,
1391 const_tree op1, const_tree op2)
1393 gimple s;
1395 /* The obvious case. */
1396 if (TREE_CODE (expr) == code
1397 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1398 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1399 return true;
1401 /* Check for comparing (name, name != 0) and the case where expr
1402 is an SSA_NAME with a definition matching the comparison. */
1403 if (TREE_CODE (expr) == SSA_NAME
1404 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1406 if (operand_equal_p (expr, op1, 0))
1407 return ((code == NE_EXPR && integer_zerop (op2))
1408 || (code == EQ_EXPR && integer_nonzerop (op2)));
1409 s = SSA_NAME_DEF_STMT (expr);
1410 if (is_gimple_assign (s)
1411 && gimple_assign_rhs_code (s) == code
1412 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1413 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1414 return true;
1417 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1418 of name is a comparison, recurse. */
1419 if (TREE_CODE (op1) == SSA_NAME
1420 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1422 s = SSA_NAME_DEF_STMT (op1);
1423 if (is_gimple_assign (s)
1424 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1426 enum tree_code c = gimple_assign_rhs_code (s);
1427 if ((c == NE_EXPR && integer_zerop (op2))
1428 || (c == EQ_EXPR && integer_nonzerop (op2)))
1429 return same_bool_comparison_p (expr, c,
1430 gimple_assign_rhs1 (s),
1431 gimple_assign_rhs2 (s));
1432 if ((c == EQ_EXPR && integer_zerop (op2))
1433 || (c == NE_EXPR && integer_nonzerop (op2)))
1434 return same_bool_comparison_p (expr,
1435 invert_tree_comparison (c, false),
1436 gimple_assign_rhs1 (s),
1437 gimple_assign_rhs2 (s));
1440 return false;
1443 /* Check to see if two boolean expressions OP1 and OP2 are logically
1444 equivalent. */
1446 static bool
1447 same_bool_result_p (const_tree op1, const_tree op2)
1449 /* Simple cases first. */
1450 if (operand_equal_p (op1, op2, 0))
1451 return true;
1453 /* Check the cases where at least one of the operands is a comparison.
1454 These are a bit smarter than operand_equal_p in that they apply some
1455 identifies on SSA_NAMEs. */
1456 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1457 && same_bool_comparison_p (op1, TREE_CODE (op2),
1458 TREE_OPERAND (op2, 0),
1459 TREE_OPERAND (op2, 1)))
1460 return true;
1461 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1462 && same_bool_comparison_p (op2, TREE_CODE (op1),
1463 TREE_OPERAND (op1, 0),
1464 TREE_OPERAND (op1, 1)))
1465 return true;
1467 /* Default case. */
1468 return false;
1471 /* Forward declarations for some mutually recursive functions. */
1473 static tree
1474 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1475 enum tree_code code2, tree op2a, tree op2b);
1476 static tree
1477 and_var_with_comparison (tree var, bool invert,
1478 enum tree_code code2, tree op2a, tree op2b);
1479 static tree
1480 and_var_with_comparison_1 (gimple stmt,
1481 enum tree_code code2, tree op2a, tree op2b);
1482 static tree
1483 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1484 enum tree_code code2, tree op2a, tree op2b);
1485 static tree
1486 or_var_with_comparison (tree var, bool invert,
1487 enum tree_code code2, tree op2a, tree op2b);
1488 static tree
1489 or_var_with_comparison_1 (gimple stmt,
1490 enum tree_code code2, tree op2a, tree op2b);
1492 /* Helper function for and_comparisons_1: try to simplify the AND of the
1493 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1494 If INVERT is true, invert the value of the VAR before doing the AND.
1495 Return NULL_EXPR if we can't simplify this to a single expression. */
1497 static tree
1498 and_var_with_comparison (tree var, bool invert,
1499 enum tree_code code2, tree op2a, tree op2b)
1501 tree t;
1502 gimple stmt = SSA_NAME_DEF_STMT (var);
1504 /* We can only deal with variables whose definitions are assignments. */
1505 if (!is_gimple_assign (stmt))
1506 return NULL_TREE;
1508 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1509 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1510 Then we only have to consider the simpler non-inverted cases. */
1511 if (invert)
1512 t = or_var_with_comparison_1 (stmt,
1513 invert_tree_comparison (code2, false),
1514 op2a, op2b);
1515 else
1516 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1517 return canonicalize_bool (t, invert);
1520 /* Try to simplify the AND of the ssa variable defined by the assignment
1521 STMT with the comparison specified by (OP2A CODE2 OP2B).
1522 Return NULL_EXPR if we can't simplify this to a single expression. */
1524 static tree
1525 and_var_with_comparison_1 (gimple stmt,
1526 enum tree_code code2, tree op2a, tree op2b)
1528 tree var = gimple_assign_lhs (stmt);
1529 tree true_test_var = NULL_TREE;
1530 tree false_test_var = NULL_TREE;
1531 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1533 /* Check for identities like (var AND (var == 0)) => false. */
1534 if (TREE_CODE (op2a) == SSA_NAME
1535 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1537 if ((code2 == NE_EXPR && integer_zerop (op2b))
1538 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1540 true_test_var = op2a;
1541 if (var == true_test_var)
1542 return var;
1544 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1545 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1547 false_test_var = op2a;
1548 if (var == false_test_var)
1549 return boolean_false_node;
1553 /* If the definition is a comparison, recurse on it. */
1554 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1556 tree t = and_comparisons_1 (innercode,
1557 gimple_assign_rhs1 (stmt),
1558 gimple_assign_rhs2 (stmt),
1559 code2,
1560 op2a,
1561 op2b);
1562 if (t)
1563 return t;
1566 /* If the definition is an AND or OR expression, we may be able to
1567 simplify by reassociating. */
1568 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1569 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1571 tree inner1 = gimple_assign_rhs1 (stmt);
1572 tree inner2 = gimple_assign_rhs2 (stmt);
1573 gimple s;
1574 tree t;
1575 tree partial = NULL_TREE;
1576 bool is_and = (innercode == BIT_AND_EXPR);
1578 /* Check for boolean identities that don't require recursive examination
1579 of inner1/inner2:
1580 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1581 inner1 AND (inner1 OR inner2) => inner1
1582 !inner1 AND (inner1 AND inner2) => false
1583 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1584 Likewise for similar cases involving inner2. */
1585 if (inner1 == true_test_var)
1586 return (is_and ? var : inner1);
1587 else if (inner2 == true_test_var)
1588 return (is_and ? var : inner2);
1589 else if (inner1 == false_test_var)
1590 return (is_and
1591 ? boolean_false_node
1592 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1593 else if (inner2 == false_test_var)
1594 return (is_and
1595 ? boolean_false_node
1596 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1598 /* Next, redistribute/reassociate the AND across the inner tests.
1599 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1600 if (TREE_CODE (inner1) == SSA_NAME
1601 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1602 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1603 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1604 gimple_assign_rhs1 (s),
1605 gimple_assign_rhs2 (s),
1606 code2, op2a, op2b)))
1608 /* Handle the AND case, where we are reassociating:
1609 (inner1 AND inner2) AND (op2a code2 op2b)
1610 => (t AND inner2)
1611 If the partial result t is a constant, we win. Otherwise
1612 continue on to try reassociating with the other inner test. */
1613 if (is_and)
1615 if (integer_onep (t))
1616 return inner2;
1617 else if (integer_zerop (t))
1618 return boolean_false_node;
1621 /* Handle the OR case, where we are redistributing:
1622 (inner1 OR inner2) AND (op2a code2 op2b)
1623 => (t OR (inner2 AND (op2a code2 op2b))) */
1624 else if (integer_onep (t))
1625 return boolean_true_node;
1627 /* Save partial result for later. */
1628 partial = t;
1631 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1632 if (TREE_CODE (inner2) == SSA_NAME
1633 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1634 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1635 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1636 gimple_assign_rhs1 (s),
1637 gimple_assign_rhs2 (s),
1638 code2, op2a, op2b)))
1640 /* Handle the AND case, where we are reassociating:
1641 (inner1 AND inner2) AND (op2a code2 op2b)
1642 => (inner1 AND t) */
1643 if (is_and)
1645 if (integer_onep (t))
1646 return inner1;
1647 else if (integer_zerop (t))
1648 return boolean_false_node;
1649 /* If both are the same, we can apply the identity
1650 (x AND x) == x. */
1651 else if (partial && same_bool_result_p (t, partial))
1652 return t;
1655 /* Handle the OR case. where we are redistributing:
1656 (inner1 OR inner2) AND (op2a code2 op2b)
1657 => (t OR (inner1 AND (op2a code2 op2b)))
1658 => (t OR partial) */
1659 else
1661 if (integer_onep (t))
1662 return boolean_true_node;
1663 else if (partial)
1665 /* We already got a simplification for the other
1666 operand to the redistributed OR expression. The
1667 interesting case is when at least one is false.
1668 Or, if both are the same, we can apply the identity
1669 (x OR x) == x. */
1670 if (integer_zerop (partial))
1671 return t;
1672 else if (integer_zerop (t))
1673 return partial;
1674 else if (same_bool_result_p (t, partial))
1675 return t;
1680 return NULL_TREE;
1683 /* Try to simplify the AND of two comparisons defined by
1684 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1685 If this can be done without constructing an intermediate value,
1686 return the resulting tree; otherwise NULL_TREE is returned.
1687 This function is deliberately asymmetric as it recurses on SSA_DEFs
1688 in the first comparison but not the second. */
1690 static tree
1691 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1692 enum tree_code code2, tree op2a, tree op2b)
1694 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1695 if (operand_equal_p (op1a, op2a, 0)
1696 && operand_equal_p (op1b, op2b, 0))
1698 /* Result will be either NULL_TREE, or a combined comparison. */
1699 tree t = combine_comparisons (UNKNOWN_LOCATION,
1700 TRUTH_ANDIF_EXPR, code1, code2,
1701 boolean_type_node, op1a, op1b);
1702 if (t)
1703 return t;
1706 /* Likewise the swapped case of the above. */
1707 if (operand_equal_p (op1a, op2b, 0)
1708 && operand_equal_p (op1b, op2a, 0))
1710 /* Result will be either NULL_TREE, or a combined comparison. */
1711 tree t = combine_comparisons (UNKNOWN_LOCATION,
1712 TRUTH_ANDIF_EXPR, code1,
1713 swap_tree_comparison (code2),
1714 boolean_type_node, op1a, op1b);
1715 if (t)
1716 return t;
1719 /* If both comparisons are of the same value against constants, we might
1720 be able to merge them. */
1721 if (operand_equal_p (op1a, op2a, 0)
1722 && TREE_CODE (op1b) == INTEGER_CST
1723 && TREE_CODE (op2b) == INTEGER_CST)
1725 int cmp = tree_int_cst_compare (op1b, op2b);
1727 /* If we have (op1a == op1b), we should either be able to
1728 return that or FALSE, depending on whether the constant op1b
1729 also satisfies the other comparison against op2b. */
1730 if (code1 == EQ_EXPR)
1732 bool done = true;
1733 bool val;
1734 switch (code2)
1736 case EQ_EXPR: val = (cmp == 0); break;
1737 case NE_EXPR: val = (cmp != 0); break;
1738 case LT_EXPR: val = (cmp < 0); break;
1739 case GT_EXPR: val = (cmp > 0); break;
1740 case LE_EXPR: val = (cmp <= 0); break;
1741 case GE_EXPR: val = (cmp >= 0); break;
1742 default: done = false;
1744 if (done)
1746 if (val)
1747 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1748 else
1749 return boolean_false_node;
1752 /* Likewise if the second comparison is an == comparison. */
1753 else if (code2 == EQ_EXPR)
1755 bool done = true;
1756 bool val;
1757 switch (code1)
1759 case EQ_EXPR: val = (cmp == 0); break;
1760 case NE_EXPR: val = (cmp != 0); break;
1761 case LT_EXPR: val = (cmp > 0); break;
1762 case GT_EXPR: val = (cmp < 0); break;
1763 case LE_EXPR: val = (cmp >= 0); break;
1764 case GE_EXPR: val = (cmp <= 0); break;
1765 default: done = false;
1767 if (done)
1769 if (val)
1770 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1771 else
1772 return boolean_false_node;
1776 /* Same business with inequality tests. */
1777 else if (code1 == NE_EXPR)
1779 bool val;
1780 switch (code2)
1782 case EQ_EXPR: val = (cmp != 0); break;
1783 case NE_EXPR: val = (cmp == 0); break;
1784 case LT_EXPR: val = (cmp >= 0); break;
1785 case GT_EXPR: val = (cmp <= 0); break;
1786 case LE_EXPR: val = (cmp > 0); break;
1787 case GE_EXPR: val = (cmp < 0); break;
1788 default:
1789 val = false;
1791 if (val)
1792 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1794 else if (code2 == NE_EXPR)
1796 bool val;
1797 switch (code1)
1799 case EQ_EXPR: val = (cmp == 0); break;
1800 case NE_EXPR: val = (cmp != 0); break;
1801 case LT_EXPR: val = (cmp <= 0); break;
1802 case GT_EXPR: val = (cmp >= 0); break;
1803 case LE_EXPR: val = (cmp < 0); break;
1804 case GE_EXPR: val = (cmp > 0); break;
1805 default:
1806 val = false;
1808 if (val)
1809 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1812 /* Chose the more restrictive of two < or <= comparisons. */
1813 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1814 && (code2 == LT_EXPR || code2 == LE_EXPR))
1816 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1817 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1818 else
1819 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1822 /* Likewise chose the more restrictive of two > or >= comparisons. */
1823 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1824 && (code2 == GT_EXPR || code2 == GE_EXPR))
1826 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1827 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1828 else
1829 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1832 /* Check for singleton ranges. */
1833 else if (cmp == 0
1834 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1835 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1836 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1838 /* Check for disjoint ranges. */
1839 else if (cmp <= 0
1840 && (code1 == LT_EXPR || code1 == LE_EXPR)
1841 && (code2 == GT_EXPR || code2 == GE_EXPR))
1842 return boolean_false_node;
1843 else if (cmp >= 0
1844 && (code1 == GT_EXPR || code1 == GE_EXPR)
1845 && (code2 == LT_EXPR || code2 == LE_EXPR))
1846 return boolean_false_node;
1849 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1850 NAME's definition is a truth value. See if there are any simplifications
1851 that can be done against the NAME's definition. */
1852 if (TREE_CODE (op1a) == SSA_NAME
1853 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1854 && (integer_zerop (op1b) || integer_onep (op1b)))
1856 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1857 || (code1 == NE_EXPR && integer_onep (op1b)));
1858 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1859 switch (gimple_code (stmt))
1861 case GIMPLE_ASSIGN:
1862 /* Try to simplify by copy-propagating the definition. */
1863 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1865 case GIMPLE_PHI:
1866 /* If every argument to the PHI produces the same result when
1867 ANDed with the second comparison, we win.
1868 Do not do this unless the type is bool since we need a bool
1869 result here anyway. */
1870 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1872 tree result = NULL_TREE;
1873 unsigned i;
1874 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1876 tree arg = gimple_phi_arg_def (stmt, i);
1878 /* If this PHI has itself as an argument, ignore it.
1879 If all the other args produce the same result,
1880 we're still OK. */
1881 if (arg == gimple_phi_result (stmt))
1882 continue;
1883 else if (TREE_CODE (arg) == INTEGER_CST)
1885 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1887 if (!result)
1888 result = boolean_false_node;
1889 else if (!integer_zerop (result))
1890 return NULL_TREE;
1892 else if (!result)
1893 result = fold_build2 (code2, boolean_type_node,
1894 op2a, op2b);
1895 else if (!same_bool_comparison_p (result,
1896 code2, op2a, op2b))
1897 return NULL_TREE;
1899 else if (TREE_CODE (arg) == SSA_NAME
1900 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1902 tree temp;
1903 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1904 /* In simple cases we can look through PHI nodes,
1905 but we have to be careful with loops.
1906 See PR49073. */
1907 if (! dom_info_available_p (CDI_DOMINATORS)
1908 || gimple_bb (def_stmt) == gimple_bb (stmt)
1909 || dominated_by_p (CDI_DOMINATORS,
1910 gimple_bb (def_stmt),
1911 gimple_bb (stmt)))
1912 return NULL_TREE;
1913 temp = and_var_with_comparison (arg, invert, code2,
1914 op2a, op2b);
1915 if (!temp)
1916 return NULL_TREE;
1917 else if (!result)
1918 result = temp;
1919 else if (!same_bool_result_p (result, temp))
1920 return NULL_TREE;
1922 else
1923 return NULL_TREE;
1925 return result;
1928 default:
1929 break;
1932 return NULL_TREE;
1935 /* Try to simplify the AND of two comparisons, specified by
1936 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1937 If this can be simplified to a single expression (without requiring
1938 introducing more SSA variables to hold intermediate values),
1939 return the resulting tree. Otherwise return NULL_TREE.
1940 If the result expression is non-null, it has boolean type. */
1942 tree
1943 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1944 enum tree_code code2, tree op2a, tree op2b)
1946 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1947 if (t)
1948 return t;
1949 else
1950 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1953 /* Helper function for or_comparisons_1: try to simplify the OR of the
1954 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1955 If INVERT is true, invert the value of VAR before doing the OR.
1956 Return NULL_EXPR if we can't simplify this to a single expression. */
1958 static tree
1959 or_var_with_comparison (tree var, bool invert,
1960 enum tree_code code2, tree op2a, tree op2b)
1962 tree t;
1963 gimple stmt = SSA_NAME_DEF_STMT (var);
1965 /* We can only deal with variables whose definitions are assignments. */
1966 if (!is_gimple_assign (stmt))
1967 return NULL_TREE;
1969 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1970 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1971 Then we only have to consider the simpler non-inverted cases. */
1972 if (invert)
1973 t = and_var_with_comparison_1 (stmt,
1974 invert_tree_comparison (code2, false),
1975 op2a, op2b);
1976 else
1977 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1978 return canonicalize_bool (t, invert);
1981 /* Try to simplify the OR of the ssa variable defined by the assignment
1982 STMT with the comparison specified by (OP2A CODE2 OP2B).
1983 Return NULL_EXPR if we can't simplify this to a single expression. */
1985 static tree
1986 or_var_with_comparison_1 (gimple stmt,
1987 enum tree_code code2, tree op2a, tree op2b)
1989 tree var = gimple_assign_lhs (stmt);
1990 tree true_test_var = NULL_TREE;
1991 tree false_test_var = NULL_TREE;
1992 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1994 /* Check for identities like (var OR (var != 0)) => true . */
1995 if (TREE_CODE (op2a) == SSA_NAME
1996 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1998 if ((code2 == NE_EXPR && integer_zerop (op2b))
1999 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2001 true_test_var = op2a;
2002 if (var == true_test_var)
2003 return var;
2005 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2006 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2008 false_test_var = op2a;
2009 if (var == false_test_var)
2010 return boolean_true_node;
2014 /* If the definition is a comparison, recurse on it. */
2015 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2017 tree t = or_comparisons_1 (innercode,
2018 gimple_assign_rhs1 (stmt),
2019 gimple_assign_rhs2 (stmt),
2020 code2,
2021 op2a,
2022 op2b);
2023 if (t)
2024 return t;
2027 /* If the definition is an AND or OR expression, we may be able to
2028 simplify by reassociating. */
2029 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2030 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2032 tree inner1 = gimple_assign_rhs1 (stmt);
2033 tree inner2 = gimple_assign_rhs2 (stmt);
2034 gimple s;
2035 tree t;
2036 tree partial = NULL_TREE;
2037 bool is_or = (innercode == BIT_IOR_EXPR);
2039 /* Check for boolean identities that don't require recursive examination
2040 of inner1/inner2:
2041 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2042 inner1 OR (inner1 AND inner2) => inner1
2043 !inner1 OR (inner1 OR inner2) => true
2044 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2046 if (inner1 == true_test_var)
2047 return (is_or ? var : inner1);
2048 else if (inner2 == true_test_var)
2049 return (is_or ? var : inner2);
2050 else if (inner1 == false_test_var)
2051 return (is_or
2052 ? boolean_true_node
2053 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2054 else if (inner2 == false_test_var)
2055 return (is_or
2056 ? boolean_true_node
2057 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2059 /* Next, redistribute/reassociate the OR across the inner tests.
2060 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2061 if (TREE_CODE (inner1) == SSA_NAME
2062 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2063 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2064 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2065 gimple_assign_rhs1 (s),
2066 gimple_assign_rhs2 (s),
2067 code2, op2a, op2b)))
2069 /* Handle the OR case, where we are reassociating:
2070 (inner1 OR inner2) OR (op2a code2 op2b)
2071 => (t OR inner2)
2072 If the partial result t is a constant, we win. Otherwise
2073 continue on to try reassociating with the other inner test. */
2074 if (is_or)
2076 if (integer_onep (t))
2077 return boolean_true_node;
2078 else if (integer_zerop (t))
2079 return inner2;
2082 /* Handle the AND case, where we are redistributing:
2083 (inner1 AND inner2) OR (op2a code2 op2b)
2084 => (t AND (inner2 OR (op2a code op2b))) */
2085 else if (integer_zerop (t))
2086 return boolean_false_node;
2088 /* Save partial result for later. */
2089 partial = t;
2092 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2093 if (TREE_CODE (inner2) == SSA_NAME
2094 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2095 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2096 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2097 gimple_assign_rhs1 (s),
2098 gimple_assign_rhs2 (s),
2099 code2, op2a, op2b)))
2101 /* Handle the OR case, where we are reassociating:
2102 (inner1 OR inner2) OR (op2a code2 op2b)
2103 => (inner1 OR t)
2104 => (t OR partial) */
2105 if (is_or)
2107 if (integer_zerop (t))
2108 return inner1;
2109 else if (integer_onep (t))
2110 return boolean_true_node;
2111 /* If both are the same, we can apply the identity
2112 (x OR x) == x. */
2113 else if (partial && same_bool_result_p (t, partial))
2114 return t;
2117 /* Handle the AND case, where we are redistributing:
2118 (inner1 AND inner2) OR (op2a code2 op2b)
2119 => (t AND (inner1 OR (op2a code2 op2b)))
2120 => (t AND partial) */
2121 else
2123 if (integer_zerop (t))
2124 return boolean_false_node;
2125 else if (partial)
2127 /* We already got a simplification for the other
2128 operand to the redistributed AND expression. The
2129 interesting case is when at least one is true.
2130 Or, if both are the same, we can apply the identity
2131 (x AND x) == x. */
2132 if (integer_onep (partial))
2133 return t;
2134 else if (integer_onep (t))
2135 return partial;
2136 else if (same_bool_result_p (t, partial))
2137 return t;
2142 return NULL_TREE;
2145 /* Try to simplify the OR of two comparisons defined by
2146 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2147 If this can be done without constructing an intermediate value,
2148 return the resulting tree; otherwise NULL_TREE is returned.
2149 This function is deliberately asymmetric as it recurses on SSA_DEFs
2150 in the first comparison but not the second. */
2152 static tree
2153 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2154 enum tree_code code2, tree op2a, tree op2b)
2156 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2157 if (operand_equal_p (op1a, op2a, 0)
2158 && operand_equal_p (op1b, op2b, 0))
2160 /* Result will be either NULL_TREE, or a combined comparison. */
2161 tree t = combine_comparisons (UNKNOWN_LOCATION,
2162 TRUTH_ORIF_EXPR, code1, code2,
2163 boolean_type_node, op1a, op1b);
2164 if (t)
2165 return t;
2168 /* Likewise the swapped case of the above. */
2169 if (operand_equal_p (op1a, op2b, 0)
2170 && operand_equal_p (op1b, op2a, 0))
2172 /* Result will be either NULL_TREE, or a combined comparison. */
2173 tree t = combine_comparisons (UNKNOWN_LOCATION,
2174 TRUTH_ORIF_EXPR, code1,
2175 swap_tree_comparison (code2),
2176 boolean_type_node, op1a, op1b);
2177 if (t)
2178 return t;
2181 /* If both comparisons are of the same value against constants, we might
2182 be able to merge them. */
2183 if (operand_equal_p (op1a, op2a, 0)
2184 && TREE_CODE (op1b) == INTEGER_CST
2185 && TREE_CODE (op2b) == INTEGER_CST)
2187 int cmp = tree_int_cst_compare (op1b, op2b);
2189 /* If we have (op1a != op1b), we should either be able to
2190 return that or TRUE, depending on whether the constant op1b
2191 also satisfies the other comparison against op2b. */
2192 if (code1 == NE_EXPR)
2194 bool done = true;
2195 bool val;
2196 switch (code2)
2198 case EQ_EXPR: val = (cmp == 0); break;
2199 case NE_EXPR: val = (cmp != 0); break;
2200 case LT_EXPR: val = (cmp < 0); break;
2201 case GT_EXPR: val = (cmp > 0); break;
2202 case LE_EXPR: val = (cmp <= 0); break;
2203 case GE_EXPR: val = (cmp >= 0); break;
2204 default: done = false;
2206 if (done)
2208 if (val)
2209 return boolean_true_node;
2210 else
2211 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2214 /* Likewise if the second comparison is a != comparison. */
2215 else if (code2 == NE_EXPR)
2217 bool done = true;
2218 bool val;
2219 switch (code1)
2221 case EQ_EXPR: val = (cmp == 0); break;
2222 case NE_EXPR: val = (cmp != 0); break;
2223 case LT_EXPR: val = (cmp > 0); break;
2224 case GT_EXPR: val = (cmp < 0); break;
2225 case LE_EXPR: val = (cmp >= 0); break;
2226 case GE_EXPR: val = (cmp <= 0); break;
2227 default: done = false;
2229 if (done)
2231 if (val)
2232 return boolean_true_node;
2233 else
2234 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2238 /* See if an equality test is redundant with the other comparison. */
2239 else if (code1 == EQ_EXPR)
2241 bool val;
2242 switch (code2)
2244 case EQ_EXPR: val = (cmp == 0); break;
2245 case NE_EXPR: val = (cmp != 0); break;
2246 case LT_EXPR: val = (cmp < 0); break;
2247 case GT_EXPR: val = (cmp > 0); break;
2248 case LE_EXPR: val = (cmp <= 0); break;
2249 case GE_EXPR: val = (cmp >= 0); break;
2250 default:
2251 val = false;
2253 if (val)
2254 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2256 else if (code2 == EQ_EXPR)
2258 bool val;
2259 switch (code1)
2261 case EQ_EXPR: val = (cmp == 0); break;
2262 case NE_EXPR: val = (cmp != 0); break;
2263 case LT_EXPR: val = (cmp > 0); break;
2264 case GT_EXPR: val = (cmp < 0); break;
2265 case LE_EXPR: val = (cmp >= 0); break;
2266 case GE_EXPR: val = (cmp <= 0); break;
2267 default:
2268 val = false;
2270 if (val)
2271 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2274 /* Chose the less restrictive of two < or <= comparisons. */
2275 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2276 && (code2 == LT_EXPR || code2 == LE_EXPR))
2278 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2279 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2280 else
2281 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2284 /* Likewise chose the less restrictive of two > or >= comparisons. */
2285 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2286 && (code2 == GT_EXPR || code2 == GE_EXPR))
2288 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2289 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2290 else
2291 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2294 /* Check for singleton ranges. */
2295 else if (cmp == 0
2296 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2297 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2298 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2300 /* Check for less/greater pairs that don't restrict the range at all. */
2301 else if (cmp >= 0
2302 && (code1 == LT_EXPR || code1 == LE_EXPR)
2303 && (code2 == GT_EXPR || code2 == GE_EXPR))
2304 return boolean_true_node;
2305 else if (cmp <= 0
2306 && (code1 == GT_EXPR || code1 == GE_EXPR)
2307 && (code2 == LT_EXPR || code2 == LE_EXPR))
2308 return boolean_true_node;
2311 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2312 NAME's definition is a truth value. See if there are any simplifications
2313 that can be done against the NAME's definition. */
2314 if (TREE_CODE (op1a) == SSA_NAME
2315 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2316 && (integer_zerop (op1b) || integer_onep (op1b)))
2318 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2319 || (code1 == NE_EXPR && integer_onep (op1b)));
2320 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2321 switch (gimple_code (stmt))
2323 case GIMPLE_ASSIGN:
2324 /* Try to simplify by copy-propagating the definition. */
2325 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2327 case GIMPLE_PHI:
2328 /* If every argument to the PHI produces the same result when
2329 ORed with the second comparison, we win.
2330 Do not do this unless the type is bool since we need a bool
2331 result here anyway. */
2332 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2334 tree result = NULL_TREE;
2335 unsigned i;
2336 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2338 tree arg = gimple_phi_arg_def (stmt, i);
2340 /* If this PHI has itself as an argument, ignore it.
2341 If all the other args produce the same result,
2342 we're still OK. */
2343 if (arg == gimple_phi_result (stmt))
2344 continue;
2345 else if (TREE_CODE (arg) == INTEGER_CST)
2347 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2349 if (!result)
2350 result = boolean_true_node;
2351 else if (!integer_onep (result))
2352 return NULL_TREE;
2354 else if (!result)
2355 result = fold_build2 (code2, boolean_type_node,
2356 op2a, op2b);
2357 else if (!same_bool_comparison_p (result,
2358 code2, op2a, op2b))
2359 return NULL_TREE;
2361 else if (TREE_CODE (arg) == SSA_NAME
2362 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2364 tree temp;
2365 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2366 /* In simple cases we can look through PHI nodes,
2367 but we have to be careful with loops.
2368 See PR49073. */
2369 if (! dom_info_available_p (CDI_DOMINATORS)
2370 || gimple_bb (def_stmt) == gimple_bb (stmt)
2371 || dominated_by_p (CDI_DOMINATORS,
2372 gimple_bb (def_stmt),
2373 gimple_bb (stmt)))
2374 return NULL_TREE;
2375 temp = or_var_with_comparison (arg, invert, code2,
2376 op2a, op2b);
2377 if (!temp)
2378 return NULL_TREE;
2379 else if (!result)
2380 result = temp;
2381 else if (!same_bool_result_p (result, temp))
2382 return NULL_TREE;
2384 else
2385 return NULL_TREE;
2387 return result;
2390 default:
2391 break;
2394 return NULL_TREE;
2397 /* Try to simplify the OR of two comparisons, specified by
2398 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2399 If this can be simplified to a single expression (without requiring
2400 introducing more SSA variables to hold intermediate values),
2401 return the resulting tree. Otherwise return NULL_TREE.
2402 If the result expression is non-null, it has boolean type. */
2404 tree
2405 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2406 enum tree_code code2, tree op2a, tree op2b)
2408 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2409 if (t)
2410 return t;
2411 else
2412 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2416 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2418 Either NULL_TREE, a simplified but non-constant or a constant
2419 is returned.
2421 ??? This should go into a gimple-fold-inline.h file to be eventually
2422 privatized with the single valueize function used in the various TUs
2423 to avoid the indirect function call overhead. */
2425 tree
2426 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2428 location_t loc = gimple_location (stmt);
2429 switch (gimple_code (stmt))
2431 case GIMPLE_ASSIGN:
2433 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2435 switch (get_gimple_rhs_class (subcode))
2437 case GIMPLE_SINGLE_RHS:
2439 tree rhs = gimple_assign_rhs1 (stmt);
2440 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2442 if (TREE_CODE (rhs) == SSA_NAME)
2444 /* If the RHS is an SSA_NAME, return its known constant value,
2445 if any. */
2446 return (*valueize) (rhs);
2448 /* Handle propagating invariant addresses into address
2449 operations. */
2450 else if (TREE_CODE (rhs) == ADDR_EXPR
2451 && !is_gimple_min_invariant (rhs))
2453 HOST_WIDE_INT offset;
2454 tree base;
2455 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2456 &offset,
2457 valueize);
2458 if (base
2459 && (CONSTANT_CLASS_P (base)
2460 || decl_address_invariant_p (base)))
2461 return build_invariant_address (TREE_TYPE (rhs),
2462 base, offset);
2464 else if (TREE_CODE (rhs) == CONSTRUCTOR
2465 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2466 && (CONSTRUCTOR_NELTS (rhs)
2467 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2469 unsigned i;
2470 tree val, *vec;
2472 vec = XALLOCAVEC (tree,
2473 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2474 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2476 val = (*valueize) (val);
2477 if (TREE_CODE (val) == INTEGER_CST
2478 || TREE_CODE (val) == REAL_CST
2479 || TREE_CODE (val) == FIXED_CST)
2480 vec[i] = val;
2481 else
2482 return NULL_TREE;
2485 return build_vector (TREE_TYPE (rhs), vec);
2488 if (kind == tcc_reference)
2490 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2491 || TREE_CODE (rhs) == REALPART_EXPR
2492 || TREE_CODE (rhs) == IMAGPART_EXPR)
2493 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2495 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2496 return fold_unary_loc (EXPR_LOCATION (rhs),
2497 TREE_CODE (rhs),
2498 TREE_TYPE (rhs), val);
2500 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2501 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2503 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2504 return fold_ternary_loc (EXPR_LOCATION (rhs),
2505 TREE_CODE (rhs),
2506 TREE_TYPE (rhs), val,
2507 TREE_OPERAND (rhs, 1),
2508 TREE_OPERAND (rhs, 2));
2510 else if (TREE_CODE (rhs) == MEM_REF
2511 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2513 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2514 if (TREE_CODE (val) == ADDR_EXPR
2515 && is_gimple_min_invariant (val))
2517 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2518 unshare_expr (val),
2519 TREE_OPERAND (rhs, 1));
2520 if (tem)
2521 rhs = tem;
2524 return fold_const_aggregate_ref_1 (rhs, valueize);
2526 else if (kind == tcc_declaration)
2527 return get_symbol_constant_value (rhs);
2528 return rhs;
2531 case GIMPLE_UNARY_RHS:
2533 /* Handle unary operators that can appear in GIMPLE form.
2534 Note that we know the single operand must be a constant,
2535 so this should almost always return a simplified RHS. */
2536 tree lhs = gimple_assign_lhs (stmt);
2537 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2539 /* Conversions are useless for CCP purposes if they are
2540 value-preserving. Thus the restrictions that
2541 useless_type_conversion_p places for restrict qualification
2542 of pointer types should not apply here.
2543 Substitution later will only substitute to allowed places. */
2544 if (CONVERT_EXPR_CODE_P (subcode)
2545 && POINTER_TYPE_P (TREE_TYPE (lhs))
2546 && POINTER_TYPE_P (TREE_TYPE (op0))
2547 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2548 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2549 && TYPE_MODE (TREE_TYPE (lhs))
2550 == TYPE_MODE (TREE_TYPE (op0)))
2551 return op0;
2553 return
2554 fold_unary_ignore_overflow_loc (loc, subcode,
2555 gimple_expr_type (stmt), op0);
2558 case GIMPLE_BINARY_RHS:
2560 /* Handle binary operators that can appear in GIMPLE form. */
2561 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2562 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2564 /* Translate &x + CST into an invariant form suitable for
2565 further propagation. */
2566 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2567 && TREE_CODE (op0) == ADDR_EXPR
2568 && TREE_CODE (op1) == INTEGER_CST)
2570 tree off = fold_convert (ptr_type_node, op1);
2571 return build_fold_addr_expr_loc
2572 (loc,
2573 fold_build2 (MEM_REF,
2574 TREE_TYPE (TREE_TYPE (op0)),
2575 unshare_expr (op0), off));
2578 return fold_binary_loc (loc, subcode,
2579 gimple_expr_type (stmt), op0, op1);
2582 case GIMPLE_TERNARY_RHS:
2584 /* Handle ternary operators that can appear in GIMPLE form. */
2585 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2586 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2587 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2589 /* Fold embedded expressions in ternary codes. */
2590 if ((subcode == COND_EXPR
2591 || subcode == VEC_COND_EXPR)
2592 && COMPARISON_CLASS_P (op0))
2594 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2595 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2596 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2597 TREE_TYPE (op0), op00, op01);
2598 if (tem)
2599 op0 = tem;
2602 return fold_ternary_loc (loc, subcode,
2603 gimple_expr_type (stmt), op0, op1, op2);
2606 default:
2607 gcc_unreachable ();
2611 case GIMPLE_CALL:
2613 tree fn;
2615 if (gimple_call_internal_p (stmt))
2616 /* No folding yet for these functions. */
2617 return NULL_TREE;
2619 fn = (*valueize) (gimple_call_fn (stmt));
2620 if (TREE_CODE (fn) == ADDR_EXPR
2621 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2622 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2624 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2625 tree call, retval;
2626 unsigned i;
2627 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2628 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2629 call = build_call_array_loc (loc,
2630 gimple_call_return_type (stmt),
2631 fn, gimple_call_num_args (stmt), args);
2632 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2633 if (retval)
2634 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2635 STRIP_NOPS (retval);
2636 return retval;
2638 return NULL_TREE;
2641 default:
2642 return NULL_TREE;
2646 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2647 Returns NULL_TREE if folding to a constant is not possible, otherwise
2648 returns a constant according to is_gimple_min_invariant. */
2650 tree
2651 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2653 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2654 if (res && is_gimple_min_invariant (res))
2655 return res;
2656 return NULL_TREE;
2660 /* The following set of functions are supposed to fold references using
2661 their constant initializers. */
2663 static tree fold_ctor_reference (tree type, tree ctor,
2664 unsigned HOST_WIDE_INT offset,
2665 unsigned HOST_WIDE_INT size);
2667 /* See if we can find constructor defining value of BASE.
2668 When we know the consructor with constant offset (such as
2669 base is array[40] and we do know constructor of array), then
2670 BIT_OFFSET is adjusted accordingly.
2672 As a special case, return error_mark_node when constructor
2673 is not explicitly available, but it is known to be zero
2674 such as 'static const int a;'. */
2675 static tree
2676 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2677 tree (*valueize)(tree))
2679 HOST_WIDE_INT bit_offset2, size, max_size;
2680 if (TREE_CODE (base) == MEM_REF)
2682 if (!integer_zerop (TREE_OPERAND (base, 1)))
2684 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2685 return NULL_TREE;
2686 *bit_offset += (mem_ref_offset (base).low
2687 * BITS_PER_UNIT);
2690 if (valueize
2691 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2692 base = valueize (TREE_OPERAND (base, 0));
2693 if (!base || TREE_CODE (base) != ADDR_EXPR)
2694 return NULL_TREE;
2695 base = TREE_OPERAND (base, 0);
2698 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2699 DECL_INITIAL. If BASE is a nested reference into another
2700 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2701 the inner reference. */
2702 switch (TREE_CODE (base))
2704 case VAR_DECL:
2705 if (!const_value_known_p (base))
2706 return NULL_TREE;
2708 /* Fallthru. */
2709 case CONST_DECL:
2710 if (!DECL_INITIAL (base)
2711 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2712 return error_mark_node;
2713 return DECL_INITIAL (base);
2715 case ARRAY_REF:
2716 case COMPONENT_REF:
2717 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2718 if (max_size == -1 || size != max_size)
2719 return NULL_TREE;
2720 *bit_offset += bit_offset2;
2721 return get_base_constructor (base, bit_offset, valueize);
2723 case STRING_CST:
2724 case CONSTRUCTOR:
2725 return base;
2727 default:
2728 return NULL_TREE;
2732 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2733 to the memory at bit OFFSET.
2735 We do only simple job of folding byte accesses. */
2737 static tree
2738 fold_string_cst_ctor_reference (tree type, tree ctor,
2739 unsigned HOST_WIDE_INT offset,
2740 unsigned HOST_WIDE_INT size)
2742 if (INTEGRAL_TYPE_P (type)
2743 && (TYPE_MODE (type)
2744 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2745 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2746 == MODE_INT)
2747 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2748 && size == BITS_PER_UNIT
2749 && !(offset % BITS_PER_UNIT))
2751 offset /= BITS_PER_UNIT;
2752 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2753 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2754 [offset]));
2755 /* Folding
2756 const char a[20]="hello";
2757 return a[10];
2759 might lead to offset greater than string length. In this case we
2760 know value is either initialized to 0 or out of bounds. Return 0
2761 in both cases. */
2762 return build_zero_cst (type);
2764 return NULL_TREE;
2767 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2768 SIZE to the memory at bit OFFSET. */
2770 static tree
2771 fold_array_ctor_reference (tree type, tree ctor,
2772 unsigned HOST_WIDE_INT offset,
2773 unsigned HOST_WIDE_INT size)
2775 unsigned HOST_WIDE_INT cnt;
2776 tree cfield, cval;
2777 double_int low_bound, elt_size;
2778 double_int index, max_index;
2779 double_int access_index;
2780 tree domain_type = NULL_TREE;
2781 HOST_WIDE_INT inner_offset;
2783 /* Compute low bound and elt size. */
2784 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2785 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2786 if (domain_type && TYPE_MIN_VALUE (domain_type))
2788 /* Static constructors for variably sized objects makes no sense. */
2789 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2790 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2792 else
2793 low_bound = double_int_zero;
2794 /* Static constructors for variably sized objects makes no sense. */
2795 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2796 == INTEGER_CST);
2797 elt_size =
2798 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2801 /* We can handle only constantly sized accesses that are known to not
2802 be larger than size of array element. */
2803 if (!TYPE_SIZE_UNIT (type)
2804 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2805 || double_int_cmp (elt_size,
2806 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2807 return NULL_TREE;
2809 /* Compute the array index we look for. */
2810 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2811 elt_size, TRUNC_DIV_EXPR);
2812 access_index = double_int_add (access_index, low_bound);
2814 /* And offset within the access. */
2815 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2817 /* See if the array field is large enough to span whole access. We do not
2818 care to fold accesses spanning multiple array indexes. */
2819 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2820 return NULL_TREE;
2822 index = double_int_sub (low_bound, double_int_one);
2823 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2825 /* Array constructor might explicitely set index, or specify range
2826 or leave index NULL meaning that it is next index after previous
2827 one. */
2828 if (cfield)
2830 if (TREE_CODE (cfield) == INTEGER_CST)
2831 max_index = index = tree_to_double_int (cfield);
2832 else
2834 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2835 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2836 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2839 else
2840 max_index = index = double_int_add (index, double_int_one);
2842 /* Do we have match? */
2843 if (double_int_cmp (access_index, index, 1) >= 0
2844 && double_int_cmp (access_index, max_index, 1) <= 0)
2845 return fold_ctor_reference (type, cval, inner_offset, size);
2847 /* When memory is not explicitely mentioned in constructor,
2848 it is 0 (or out of range). */
2849 return build_zero_cst (type);
2852 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2853 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2855 static tree
2856 fold_nonarray_ctor_reference (tree type, tree ctor,
2857 unsigned HOST_WIDE_INT offset,
2858 unsigned HOST_WIDE_INT size)
2860 unsigned HOST_WIDE_INT cnt;
2861 tree cfield, cval;
2863 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2864 cval)
2866 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2867 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2868 tree field_size = DECL_SIZE (cfield);
2869 double_int bitoffset;
2870 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2871 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2872 double_int bitoffset_end, access_end;
2874 /* Variable sized objects in static constructors makes no sense,
2875 but field_size can be NULL for flexible array members. */
2876 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2877 && TREE_CODE (byte_offset) == INTEGER_CST
2878 && (field_size != NULL_TREE
2879 ? TREE_CODE (field_size) == INTEGER_CST
2880 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2882 /* Compute bit offset of the field. */
2883 bitoffset = double_int_add (tree_to_double_int (field_offset),
2884 double_int_mul (byte_offset_cst,
2885 bits_per_unit_cst));
2886 /* Compute bit offset where the field ends. */
2887 if (field_size != NULL_TREE)
2888 bitoffset_end = double_int_add (bitoffset,
2889 tree_to_double_int (field_size));
2890 else
2891 bitoffset_end = double_int_zero;
2893 access_end = double_int_add (uhwi_to_double_int (offset),
2894 uhwi_to_double_int (size));
2896 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2897 [BITOFFSET, BITOFFSET_END)? */
2898 if (double_int_cmp (access_end, bitoffset, 0) > 0
2899 && (field_size == NULL_TREE
2900 || double_int_cmp (uhwi_to_double_int (offset),
2901 bitoffset_end, 0) < 0))
2903 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2904 bitoffset);
2905 /* We do have overlap. Now see if field is large enough to
2906 cover the access. Give up for accesses spanning multiple
2907 fields. */
2908 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2909 return NULL_TREE;
2910 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2911 return NULL_TREE;
2912 return fold_ctor_reference (type, cval,
2913 double_int_to_uhwi (inner_offset), size);
2916 /* When memory is not explicitely mentioned in constructor, it is 0. */
2917 return build_zero_cst (type);
2920 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2921 to the memory at bit OFFSET. */
2923 static tree
2924 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2925 unsigned HOST_WIDE_INT size)
2927 tree ret;
2929 /* We found the field with exact match. */
2930 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2931 && !offset)
2932 return canonicalize_constructor_val (ctor);
2934 /* We are at the end of walk, see if we can view convert the
2935 result. */
2936 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2937 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2938 && operand_equal_p (TYPE_SIZE (type),
2939 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2941 ret = canonicalize_constructor_val (ctor);
2942 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2943 if (ret)
2944 STRIP_NOPS (ret);
2945 return ret;
2947 if (TREE_CODE (ctor) == STRING_CST)
2948 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2949 if (TREE_CODE (ctor) == CONSTRUCTOR)
2952 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2953 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2954 return fold_array_ctor_reference (type, ctor, offset, size);
2955 else
2956 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2959 return NULL_TREE;
2962 /* Return the tree representing the element referenced by T if T is an
2963 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2964 names using VALUEIZE. Return NULL_TREE otherwise. */
2966 tree
2967 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2969 tree ctor, idx, base;
2970 HOST_WIDE_INT offset, size, max_size;
2971 tree tem;
2973 if (TREE_THIS_VOLATILE (t))
2974 return NULL_TREE;
2976 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2977 return get_symbol_constant_value (t);
2979 tem = fold_read_from_constant_string (t);
2980 if (tem)
2981 return tem;
2983 switch (TREE_CODE (t))
2985 case ARRAY_REF:
2986 case ARRAY_RANGE_REF:
2987 /* Constant indexes are handled well by get_base_constructor.
2988 Only special case variable offsets.
2989 FIXME: This code can't handle nested references with variable indexes
2990 (they will be handled only by iteration of ccp). Perhaps we can bring
2991 get_ref_base_and_extent here and make it use a valueize callback. */
2992 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2993 && valueize
2994 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2995 && host_integerp (idx, 0))
2997 tree low_bound, unit_size;
2999 /* If the resulting bit-offset is constant, track it. */
3000 if ((low_bound = array_ref_low_bound (t),
3001 host_integerp (low_bound, 0))
3002 && (unit_size = array_ref_element_size (t),
3003 host_integerp (unit_size, 1)))
3005 offset = TREE_INT_CST_LOW (idx);
3006 offset -= TREE_INT_CST_LOW (low_bound);
3007 offset *= TREE_INT_CST_LOW (unit_size);
3008 offset *= BITS_PER_UNIT;
3010 base = TREE_OPERAND (t, 0);
3011 ctor = get_base_constructor (base, &offset, valueize);
3012 /* Empty constructor. Always fold to 0. */
3013 if (ctor == error_mark_node)
3014 return build_zero_cst (TREE_TYPE (t));
3015 /* Out of bound array access. Value is undefined,
3016 but don't fold. */
3017 if (offset < 0)
3018 return NULL_TREE;
3019 /* We can not determine ctor. */
3020 if (!ctor)
3021 return NULL_TREE;
3022 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3023 TREE_INT_CST_LOW (unit_size)
3024 * BITS_PER_UNIT);
3027 /* Fallthru. */
3029 case COMPONENT_REF:
3030 case BIT_FIELD_REF:
3031 case TARGET_MEM_REF:
3032 case MEM_REF:
3033 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3034 ctor = get_base_constructor (base, &offset, valueize);
3036 /* Empty constructor. Always fold to 0. */
3037 if (ctor == error_mark_node)
3038 return build_zero_cst (TREE_TYPE (t));
3039 /* We do not know precise address. */
3040 if (max_size == -1 || max_size != size)
3041 return NULL_TREE;
3042 /* We can not determine ctor. */
3043 if (!ctor)
3044 return NULL_TREE;
3046 /* Out of bound array access. Value is undefined, but don't fold. */
3047 if (offset < 0)
3048 return NULL_TREE;
3050 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3052 case REALPART_EXPR:
3053 case IMAGPART_EXPR:
3055 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3056 if (c && TREE_CODE (c) == COMPLEX_CST)
3057 return fold_build1_loc (EXPR_LOCATION (t),
3058 TREE_CODE (t), TREE_TYPE (t), c);
3059 break;
3062 default:
3063 break;
3066 return NULL_TREE;
3069 tree
3070 fold_const_aggregate_ref (tree t)
3072 return fold_const_aggregate_ref_1 (t, NULL);
3075 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3076 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3077 KNOWN_BINFO carries the binfo describing the true type of
3078 OBJ_TYPE_REF_OBJECT(REF). */
3080 tree
3081 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3083 unsigned HOST_WIDE_INT offset, size;
3084 tree v, fn;
3086 v = BINFO_VTABLE (known_binfo);
3087 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3088 if (!v)
3089 return NULL_TREE;
3091 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3093 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3094 v = TREE_OPERAND (v, 0);
3096 else
3097 offset = 0;
3099 if (TREE_CODE (v) != ADDR_EXPR)
3100 return NULL_TREE;
3101 v = TREE_OPERAND (v, 0);
3103 if (TREE_CODE (v) != VAR_DECL
3104 || !DECL_VIRTUAL_P (v)
3105 || !DECL_INITIAL (v)
3106 || DECL_INITIAL (v) == error_mark_node)
3107 return NULL_TREE;
3108 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3109 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3110 offset += token * size;
3111 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3112 offset, size);
3113 if (!fn)
3114 return NULL_TREE;
3115 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3116 || TREE_CODE (fn) == FDESC_EXPR);
3117 fn = TREE_OPERAND (fn, 0);
3118 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3120 /* When cgraph node is missing and function is not public, we cannot
3121 devirtualize. This can happen in WHOPR when the actual method
3122 ends up in other partition, because we found devirtualization
3123 possibility too late. */
3124 if (!can_refer_decl_in_current_unit_p (fn))
3125 return NULL_TREE;
3127 /* Make sure we create a cgraph node for functions we'll reference.
3128 They can be non-existent if the reference comes from an entry
3129 of an external vtable for example. */
3130 cgraph_get_create_node (fn);
3132 return fn;
3135 /* Return true iff VAL is a gimple expression that is known to be
3136 non-negative. Restricted to floating-point inputs. */
3138 bool
3139 gimple_val_nonnegative_real_p (tree val)
3141 gimple def_stmt;
3143 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3145 /* Use existing logic for non-gimple trees. */
3146 if (tree_expr_nonnegative_p (val))
3147 return true;
3149 if (TREE_CODE (val) != SSA_NAME)
3150 return false;
3152 /* Currently we look only at the immediately defining statement
3153 to make this determination, since recursion on defining
3154 statements of operands can lead to quadratic behavior in the
3155 worst case. This is expected to catch almost all occurrences
3156 in practice. It would be possible to implement limited-depth
3157 recursion if important cases are lost. Alternatively, passes
3158 that need this information (such as the pow/powi lowering code
3159 in the cse_sincos pass) could be revised to provide it through
3160 dataflow propagation. */
3162 def_stmt = SSA_NAME_DEF_STMT (val);
3164 if (is_gimple_assign (def_stmt))
3166 tree op0, op1;
3168 /* See fold-const.c:tree_expr_nonnegative_p for additional
3169 cases that could be handled with recursion. */
3171 switch (gimple_assign_rhs_code (def_stmt))
3173 case ABS_EXPR:
3174 /* Always true for floating-point operands. */
3175 return true;
3177 case MULT_EXPR:
3178 /* True if the two operands are identical (since we are
3179 restricted to floating-point inputs). */
3180 op0 = gimple_assign_rhs1 (def_stmt);
3181 op1 = gimple_assign_rhs2 (def_stmt);
3183 if (op0 == op1
3184 || operand_equal_p (op0, op1, 0))
3185 return true;
3187 default:
3188 return false;
3191 else if (is_gimple_call (def_stmt))
3193 tree fndecl = gimple_call_fndecl (def_stmt);
3194 if (fndecl
3195 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3197 tree arg1;
3199 switch (DECL_FUNCTION_CODE (fndecl))
3201 CASE_FLT_FN (BUILT_IN_ACOS):
3202 CASE_FLT_FN (BUILT_IN_ACOSH):
3203 CASE_FLT_FN (BUILT_IN_CABS):
3204 CASE_FLT_FN (BUILT_IN_COSH):
3205 CASE_FLT_FN (BUILT_IN_ERFC):
3206 CASE_FLT_FN (BUILT_IN_EXP):
3207 CASE_FLT_FN (BUILT_IN_EXP10):
3208 CASE_FLT_FN (BUILT_IN_EXP2):
3209 CASE_FLT_FN (BUILT_IN_FABS):
3210 CASE_FLT_FN (BUILT_IN_FDIM):
3211 CASE_FLT_FN (BUILT_IN_HYPOT):
3212 CASE_FLT_FN (BUILT_IN_POW10):
3213 return true;
3215 CASE_FLT_FN (BUILT_IN_SQRT):
3216 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3217 nonnegative inputs. */
3218 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3219 return true;
3221 break;
3223 CASE_FLT_FN (BUILT_IN_POWI):
3224 /* True if the second argument is an even integer. */
3225 arg1 = gimple_call_arg (def_stmt, 1);
3227 if (TREE_CODE (arg1) == INTEGER_CST
3228 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3229 return true;
3231 break;
3233 CASE_FLT_FN (BUILT_IN_POW):
3234 /* True if the second argument is an even integer-valued
3235 real. */
3236 arg1 = gimple_call_arg (def_stmt, 1);
3238 if (TREE_CODE (arg1) == REAL_CST)
3240 REAL_VALUE_TYPE c;
3241 HOST_WIDE_INT n;
3243 c = TREE_REAL_CST (arg1);
3244 n = real_to_integer (&c);
3246 if ((n & 1) == 0)
3248 REAL_VALUE_TYPE cint;
3249 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3250 if (real_identical (&c, &cint))
3251 return true;
3255 break;
3257 default:
3258 return false;
3263 return false;