kernel - Deal with lost IPIs (VM related)
[dragonfly.git] / contrib / gcc-4.7 / gcc / gimple-fold.c
blob0210ccfedb9b2b305194f1593699dcda62559b65
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
109 return true;
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
115 tree
116 canonicalize_constructor_val (tree cval)
118 tree orig_cval = cval;
119 STRIP_NOPS (cval);
120 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
121 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
123 tree ptr = TREE_OPERAND (cval, 0);
124 if (is_gimple_min_invariant (ptr))
125 cval = build1_loc (EXPR_LOCATION (cval),
126 ADDR_EXPR, TREE_TYPE (ptr),
127 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
128 ptr,
129 fold_convert (ptr_type_node,
130 TREE_OPERAND (cval, 1))));
132 if (TREE_CODE (cval) == ADDR_EXPR)
134 tree base = get_base_address (TREE_OPERAND (cval, 0));
136 if (base
137 && (TREE_CODE (base) == VAR_DECL
138 || TREE_CODE (base) == FUNCTION_DECL)
139 && !can_refer_decl_in_current_unit_p (base))
140 return NULL_TREE;
141 if (base && TREE_CODE (base) == VAR_DECL)
143 TREE_ADDRESSABLE (base) = 1;
144 if (cfun && gimple_referenced_vars (cfun))
145 add_referenced_var (base);
147 /* Fixup types in global initializers. */
148 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
149 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
151 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
152 cval = fold_convert (TREE_TYPE (orig_cval), cval);
153 return cval;
155 return orig_cval;
158 /* If SYM is a constant variable with known value, return the value.
159 NULL_TREE is returned otherwise. */
161 tree
162 get_symbol_constant_value (tree sym)
164 if (const_value_known_p (sym))
166 tree val = DECL_INITIAL (sym);
167 if (val)
169 val = canonicalize_constructor_val (val);
170 if (val && is_gimple_min_invariant (val))
171 return val;
172 else
173 return NULL_TREE;
175 /* Variables declared 'const' without an initializer
176 have zero as the initializer if they may not be
177 overridden at link or run time. */
178 if (!val
179 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
180 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
181 return build_zero_cst (TREE_TYPE (sym));
184 return NULL_TREE;
189 /* Subroutine of fold_stmt. We perform several simplifications of the
190 memory reference tree EXPR and make sure to re-gimplify them properly
191 after propagation of constant addresses. IS_LHS is true if the
192 reference is supposed to be an lvalue. */
194 static tree
195 maybe_fold_reference (tree expr, bool is_lhs)
197 tree *t = &expr;
198 tree result;
200 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
201 || TREE_CODE (expr) == REALPART_EXPR
202 || TREE_CODE (expr) == IMAGPART_EXPR)
203 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
204 return fold_unary_loc (EXPR_LOCATION (expr),
205 TREE_CODE (expr),
206 TREE_TYPE (expr),
207 TREE_OPERAND (expr, 0));
208 else if (TREE_CODE (expr) == BIT_FIELD_REF
209 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
210 return fold_ternary_loc (EXPR_LOCATION (expr),
211 TREE_CODE (expr),
212 TREE_TYPE (expr),
213 TREE_OPERAND (expr, 0),
214 TREE_OPERAND (expr, 1),
215 TREE_OPERAND (expr, 2));
217 while (handled_component_p (*t))
218 t = &TREE_OPERAND (*t, 0);
220 /* Canonicalize MEM_REFs invariant address operand. Do this first
221 to avoid feeding non-canonical MEM_REFs elsewhere. */
222 if (TREE_CODE (*t) == MEM_REF
223 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
225 bool volatile_p = TREE_THIS_VOLATILE (*t);
226 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
227 TREE_OPERAND (*t, 0),
228 TREE_OPERAND (*t, 1));
229 if (tem)
231 TREE_THIS_VOLATILE (tem) = volatile_p;
232 *t = tem;
233 tem = maybe_fold_reference (expr, is_lhs);
234 if (tem)
235 return tem;
236 return expr;
240 if (!is_lhs
241 && (result = fold_const_aggregate_ref (expr))
242 && is_gimple_min_invariant (result))
243 return result;
245 /* Fold back MEM_REFs to reference trees. */
246 if (TREE_CODE (*t) == MEM_REF
247 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
248 && integer_zerop (TREE_OPERAND (*t, 1))
249 && (TREE_THIS_VOLATILE (*t)
250 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
251 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
252 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
253 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
254 /* We have to look out here to not drop a required conversion
255 from the rhs to the lhs if is_lhs, but we don't have the
256 rhs here to verify that. Thus require strict type
257 compatibility. */
258 && types_compatible_p (TREE_TYPE (*t),
259 TREE_TYPE (TREE_OPERAND
260 (TREE_OPERAND (*t, 0), 0))))
262 tree tem;
263 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
264 tem = maybe_fold_reference (expr, is_lhs);
265 if (tem)
266 return tem;
267 return expr;
269 else if (TREE_CODE (*t) == TARGET_MEM_REF)
271 tree tem = maybe_fold_tmr (*t);
272 if (tem)
274 *t = tem;
275 tem = maybe_fold_reference (expr, is_lhs);
276 if (tem)
277 return tem;
278 return expr;
282 return NULL_TREE;
286 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
287 replacement rhs for the statement or NULL_TREE if no simplification
288 could be made. It is assumed that the operands have been previously
289 folded. */
291 static tree
292 fold_gimple_assign (gimple_stmt_iterator *si)
294 gimple stmt = gsi_stmt (*si);
295 enum tree_code subcode = gimple_assign_rhs_code (stmt);
296 location_t loc = gimple_location (stmt);
298 tree result = NULL_TREE;
300 switch (get_gimple_rhs_class (subcode))
302 case GIMPLE_SINGLE_RHS:
304 tree rhs = gimple_assign_rhs1 (stmt);
306 if (REFERENCE_CLASS_P (rhs))
307 return maybe_fold_reference (rhs, false);
309 else if (TREE_CODE (rhs) == ADDR_EXPR)
311 tree ref = TREE_OPERAND (rhs, 0);
312 tree tem = maybe_fold_reference (ref, true);
313 if (tem
314 && TREE_CODE (tem) == MEM_REF
315 && integer_zerop (TREE_OPERAND (tem, 1)))
316 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
317 else if (tem)
318 result = fold_convert (TREE_TYPE (rhs),
319 build_fold_addr_expr_loc (loc, tem));
320 else if (TREE_CODE (ref) == MEM_REF
321 && integer_zerop (TREE_OPERAND (ref, 1)))
322 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
325 else if (TREE_CODE (rhs) == CONSTRUCTOR
326 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
327 && (CONSTRUCTOR_NELTS (rhs)
328 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
330 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
331 unsigned i;
332 tree val;
334 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
335 if (TREE_CODE (val) != INTEGER_CST
336 && TREE_CODE (val) != REAL_CST
337 && TREE_CODE (val) != FIXED_CST)
338 return NULL_TREE;
340 return build_vector_from_ctor (TREE_TYPE (rhs),
341 CONSTRUCTOR_ELTS (rhs));
344 else if (DECL_P (rhs))
345 return unshare_expr (get_symbol_constant_value (rhs));
347 /* If we couldn't fold the RHS, hand over to the generic
348 fold routines. */
349 if (result == NULL_TREE)
350 result = fold (rhs);
352 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
353 that may have been added by fold, and "useless" type
354 conversions that might now be apparent due to propagation. */
355 STRIP_USELESS_TYPE_CONVERSION (result);
357 if (result != rhs && valid_gimple_rhs_p (result))
358 return result;
360 return NULL_TREE;
362 break;
364 case GIMPLE_UNARY_RHS:
366 tree rhs = gimple_assign_rhs1 (stmt);
368 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
369 if (result)
371 /* If the operation was a conversion do _not_ mark a
372 resulting constant with TREE_OVERFLOW if the original
373 constant was not. These conversions have implementation
374 defined behavior and retaining the TREE_OVERFLOW flag
375 here would confuse later passes such as VRP. */
376 if (CONVERT_EXPR_CODE_P (subcode)
377 && TREE_CODE (result) == INTEGER_CST
378 && TREE_CODE (rhs) == INTEGER_CST)
379 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
381 STRIP_USELESS_TYPE_CONVERSION (result);
382 if (valid_gimple_rhs_p (result))
383 return result;
386 break;
388 case GIMPLE_BINARY_RHS:
389 /* Try to canonicalize for boolean-typed X the comparisons
390 X == 0, X == 1, X != 0, and X != 1. */
391 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
392 || gimple_assign_rhs_code (stmt) == NE_EXPR)
394 tree lhs = gimple_assign_lhs (stmt);
395 tree op1 = gimple_assign_rhs1 (stmt);
396 tree op2 = gimple_assign_rhs2 (stmt);
397 tree type = TREE_TYPE (op1);
399 /* Check whether the comparison operands are of the same boolean
400 type as the result type is.
401 Check that second operand is an integer-constant with value
402 one or zero. */
403 if (TREE_CODE (op2) == INTEGER_CST
404 && (integer_zerop (op2) || integer_onep (op2))
405 && useless_type_conversion_p (TREE_TYPE (lhs), type))
407 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
408 bool is_logical_not = false;
410 /* X == 0 and X != 1 is a logical-not.of X
411 X == 1 and X != 0 is X */
412 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
413 || (cmp_code == NE_EXPR && integer_onep (op2)))
414 is_logical_not = true;
416 if (is_logical_not == false)
417 result = op1;
418 /* Only for one-bit precision typed X the transformation
419 !X -> ~X is valied. */
420 else if (TYPE_PRECISION (type) == 1)
421 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
422 type, op1);
423 /* Otherwise we use !X -> X ^ 1. */
424 else
425 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
426 type, op1, build_int_cst (type, 1));
431 if (!result)
432 result = fold_binary_loc (loc, subcode,
433 TREE_TYPE (gimple_assign_lhs (stmt)),
434 gimple_assign_rhs1 (stmt),
435 gimple_assign_rhs2 (stmt));
437 if (result)
439 STRIP_USELESS_TYPE_CONVERSION (result);
440 if (valid_gimple_rhs_p (result))
441 return result;
443 break;
445 case GIMPLE_TERNARY_RHS:
446 /* Try to fold a conditional expression. */
447 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
449 tree op0 = gimple_assign_rhs1 (stmt);
450 tree tem;
451 bool set = false;
452 location_t cond_loc = gimple_location (stmt);
454 if (COMPARISON_CLASS_P (op0))
456 fold_defer_overflow_warnings ();
457 tem = fold_binary_loc (cond_loc,
458 TREE_CODE (op0), TREE_TYPE (op0),
459 TREE_OPERAND (op0, 0),
460 TREE_OPERAND (op0, 1));
461 /* This is actually a conditional expression, not a GIMPLE
462 conditional statement, however, the valid_gimple_rhs_p
463 test still applies. */
464 set = (tem && is_gimple_condexpr (tem)
465 && valid_gimple_rhs_p (tem));
466 fold_undefer_overflow_warnings (set, stmt, 0);
468 else if (is_gimple_min_invariant (op0))
470 tem = op0;
471 set = true;
473 else
474 return NULL_TREE;
476 if (set)
477 result = fold_build3_loc (cond_loc, COND_EXPR,
478 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
479 gimple_assign_rhs2 (stmt),
480 gimple_assign_rhs3 (stmt));
483 if (!result)
484 result = fold_ternary_loc (loc, subcode,
485 TREE_TYPE (gimple_assign_lhs (stmt)),
486 gimple_assign_rhs1 (stmt),
487 gimple_assign_rhs2 (stmt),
488 gimple_assign_rhs3 (stmt));
490 if (result)
492 STRIP_USELESS_TYPE_CONVERSION (result);
493 if (valid_gimple_rhs_p (result))
494 return result;
496 break;
498 case GIMPLE_INVALID_RHS:
499 gcc_unreachable ();
502 return NULL_TREE;
505 /* Attempt to fold a conditional statement. Return true if any changes were
506 made. We only attempt to fold the condition expression, and do not perform
507 any transformation that would require alteration of the cfg. It is
508 assumed that the operands have been previously folded. */
510 static bool
511 fold_gimple_cond (gimple stmt)
513 tree result = fold_binary_loc (gimple_location (stmt),
514 gimple_cond_code (stmt),
515 boolean_type_node,
516 gimple_cond_lhs (stmt),
517 gimple_cond_rhs (stmt));
519 if (result)
521 STRIP_USELESS_TYPE_CONVERSION (result);
522 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
524 gimple_cond_set_condition_from_tree (stmt, result);
525 return true;
529 return false;
532 /* Convert EXPR into a GIMPLE value suitable for substitution on the
533 RHS of an assignment. Insert the necessary statements before
534 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
535 is replaced. If the call is expected to produces a result, then it
536 is replaced by an assignment of the new RHS to the result variable.
537 If the result is to be ignored, then the call is replaced by a
538 GIMPLE_NOP. A proper VDEF chain is retained by making the first
539 VUSE and the last VDEF of the whole sequence be the same as the replaced
540 statement and using new SSA names for stores in between. */
542 void
543 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
545 tree lhs;
546 gimple stmt, new_stmt;
547 gimple_stmt_iterator i;
548 gimple_seq stmts = gimple_seq_alloc();
549 struct gimplify_ctx gctx;
550 gimple last;
551 gimple laststore;
552 tree reaching_vuse;
554 stmt = gsi_stmt (*si_p);
556 gcc_assert (is_gimple_call (stmt));
558 push_gimplify_context (&gctx);
559 gctx.into_ssa = gimple_in_ssa_p (cfun);
561 lhs = gimple_call_lhs (stmt);
562 if (lhs == NULL_TREE)
564 gimplify_and_add (expr, &stmts);
565 /* We can end up with folding a memcpy of an empty class assignment
566 which gets optimized away by C++ gimplification. */
567 if (gimple_seq_empty_p (stmts))
569 pop_gimplify_context (NULL);
570 if (gimple_in_ssa_p (cfun))
572 unlink_stmt_vdef (stmt);
573 release_defs (stmt);
575 gsi_replace (si_p, gimple_build_nop (), true);
576 return;
579 else
581 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
582 new_stmt = gimple_build_assign (lhs, tmp);
583 i = gsi_last (stmts);
584 gsi_insert_after_without_update (&i, new_stmt,
585 GSI_CONTINUE_LINKING);
588 pop_gimplify_context (NULL);
590 if (gimple_has_location (stmt))
591 annotate_all_with_location (stmts, gimple_location (stmt));
593 /* First iterate over the replacement statements backward, assigning
594 virtual operands to their defining statements. */
595 laststore = NULL;
596 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
598 new_stmt = gsi_stmt (i);
599 if ((gimple_assign_single_p (new_stmt)
600 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
601 || (is_gimple_call (new_stmt)
602 && (gimple_call_flags (new_stmt)
603 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
605 tree vdef;
606 if (!laststore)
607 vdef = gimple_vdef (stmt);
608 else
609 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
610 gimple_set_vdef (new_stmt, vdef);
611 if (vdef && TREE_CODE (vdef) == SSA_NAME)
612 SSA_NAME_DEF_STMT (vdef) = new_stmt;
613 laststore = new_stmt;
617 /* Second iterate over the statements forward, assigning virtual
618 operands to their uses. */
619 last = NULL;
620 reaching_vuse = gimple_vuse (stmt);
621 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
623 /* Do not insert the last stmt in this loop but remember it
624 for replacing the original statement. */
625 if (last)
627 gsi_insert_before (si_p, last, GSI_NEW_STMT);
628 gsi_next (si_p);
630 new_stmt = gsi_stmt (i);
631 /* The replacement can expose previously unreferenced variables. */
632 if (gimple_in_ssa_p (cfun))
633 find_new_referenced_vars (new_stmt);
634 /* If the new statement possibly has a VUSE, update it with exact SSA
635 name we know will reach this one. */
636 if (gimple_has_mem_ops (new_stmt))
637 gimple_set_vuse (new_stmt, reaching_vuse);
638 gimple_set_modified (new_stmt, true);
639 if (gimple_vdef (new_stmt))
640 reaching_vuse = gimple_vdef (new_stmt);
641 last = new_stmt;
644 /* If the new sequence does not do a store release the virtual
645 definition of the original statement. */
646 if (reaching_vuse
647 && reaching_vuse == gimple_vuse (stmt))
649 tree vdef = gimple_vdef (stmt);
650 if (vdef
651 && TREE_CODE (vdef) == SSA_NAME)
653 unlink_stmt_vdef (stmt);
654 release_ssa_name (vdef);
658 /* Finally replace rhe original statement with the last. */
659 gsi_replace (si_p, last, false);
662 /* Return the string length, maximum string length or maximum value of
663 ARG in LENGTH.
664 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
665 is not NULL and, for TYPE == 0, its value is not equal to the length
666 we determine or if we are unable to determine the length or value,
667 return false. VISITED is a bitmap of visited variables.
668 TYPE is 0 if string length should be returned, 1 for maximum string
669 length and 2 for maximum value ARG can have. */
671 static bool
672 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
674 tree var, val;
675 gimple def_stmt;
677 if (TREE_CODE (arg) != SSA_NAME)
679 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
680 if (TREE_CODE (arg) == ADDR_EXPR
681 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
682 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
684 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
685 if (TREE_CODE (aop0) == INDIRECT_REF
686 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
687 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
688 length, visited, type);
691 if (type == 2)
693 val = arg;
694 if (TREE_CODE (val) != INTEGER_CST
695 || tree_int_cst_sgn (val) < 0)
696 return false;
698 else
699 val = c_strlen (arg, 1);
700 if (!val)
701 return false;
703 if (*length)
705 if (type > 0)
707 if (TREE_CODE (*length) != INTEGER_CST
708 || TREE_CODE (val) != INTEGER_CST)
709 return false;
711 if (tree_int_cst_lt (*length, val))
712 *length = val;
713 return true;
715 else if (simple_cst_equal (val, *length) != 1)
716 return false;
719 *length = val;
720 return true;
723 /* If we were already here, break the infinite cycle. */
724 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
725 return true;
727 var = arg;
728 def_stmt = SSA_NAME_DEF_STMT (var);
730 switch (gimple_code (def_stmt))
732 case GIMPLE_ASSIGN:
733 /* The RHS of the statement defining VAR must either have a
734 constant length or come from another SSA_NAME with a constant
735 length. */
736 if (gimple_assign_single_p (def_stmt)
737 || gimple_assign_unary_nop_p (def_stmt))
739 tree rhs = gimple_assign_rhs1 (def_stmt);
740 return get_maxval_strlen (rhs, length, visited, type);
742 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
744 tree op2 = gimple_assign_rhs2 (def_stmt);
745 tree op3 = gimple_assign_rhs3 (def_stmt);
746 return get_maxval_strlen (op2, length, visited, type)
747 && get_maxval_strlen (op3, length, visited, type);
749 return false;
751 case GIMPLE_PHI:
753 /* All the arguments of the PHI node must have the same constant
754 length. */
755 unsigned i;
757 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
759 tree arg = gimple_phi_arg (def_stmt, i)->def;
761 /* If this PHI has itself as an argument, we cannot
762 determine the string length of this argument. However,
763 if we can find a constant string length for the other
764 PHI args then we can still be sure that this is a
765 constant string length. So be optimistic and just
766 continue with the next argument. */
767 if (arg == gimple_phi_result (def_stmt))
768 continue;
770 if (!get_maxval_strlen (arg, length, visited, type))
771 return false;
774 return true;
776 default:
777 return false;
782 /* Fold builtin call in statement STMT. Returns a simplified tree.
783 We may return a non-constant expression, including another call
784 to a different function and with different arguments, e.g.,
785 substituting memcpy for strcpy when the string length is known.
786 Note that some builtins expand into inline code that may not
787 be valid in GIMPLE. Callers must take care. */
789 tree
790 gimple_fold_builtin (gimple stmt)
792 tree result, val[3];
793 tree callee, a;
794 int arg_idx, type;
795 bitmap visited;
796 bool ignore;
797 int nargs;
798 location_t loc = gimple_location (stmt);
800 gcc_assert (is_gimple_call (stmt));
802 ignore = (gimple_call_lhs (stmt) == NULL);
804 /* First try the generic builtin folder. If that succeeds, return the
805 result directly. */
806 result = fold_call_stmt (stmt, ignore);
807 if (result)
809 if (ignore)
810 STRIP_NOPS (result);
811 return result;
814 /* Ignore MD builtins. */
815 callee = gimple_call_fndecl (stmt);
816 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
817 return NULL_TREE;
819 /* Give up for always_inline inline builtins until they are
820 inlined. */
821 if (avoid_folding_inline_builtin (callee))
822 return NULL_TREE;
824 /* If the builtin could not be folded, and it has no argument list,
825 we're done. */
826 nargs = gimple_call_num_args (stmt);
827 if (nargs == 0)
828 return NULL_TREE;
830 /* Limit the work only for builtins we know how to simplify. */
831 switch (DECL_FUNCTION_CODE (callee))
833 case BUILT_IN_STRLEN:
834 case BUILT_IN_FPUTS:
835 case BUILT_IN_FPUTS_UNLOCKED:
836 arg_idx = 0;
837 type = 0;
838 break;
839 case BUILT_IN_STRCPY:
840 case BUILT_IN_STRNCPY:
841 arg_idx = 1;
842 type = 0;
843 break;
844 case BUILT_IN_MEMCPY_CHK:
845 case BUILT_IN_MEMPCPY_CHK:
846 case BUILT_IN_MEMMOVE_CHK:
847 case BUILT_IN_MEMSET_CHK:
848 case BUILT_IN_STRNCPY_CHK:
849 case BUILT_IN_STPNCPY_CHK:
850 arg_idx = 2;
851 type = 2;
852 break;
853 case BUILT_IN_STRCPY_CHK:
854 case BUILT_IN_STPCPY_CHK:
855 arg_idx = 1;
856 type = 1;
857 break;
858 case BUILT_IN_SNPRINTF_CHK:
859 case BUILT_IN_VSNPRINTF_CHK:
860 arg_idx = 1;
861 type = 2;
862 break;
863 default:
864 return NULL_TREE;
867 if (arg_idx >= nargs)
868 return NULL_TREE;
870 /* Try to use the dataflow information gathered by the CCP process. */
871 visited = BITMAP_ALLOC (NULL);
872 bitmap_clear (visited);
874 memset (val, 0, sizeof (val));
875 a = gimple_call_arg (stmt, arg_idx);
876 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
877 val[arg_idx] = NULL_TREE;
879 BITMAP_FREE (visited);
881 result = NULL_TREE;
882 switch (DECL_FUNCTION_CODE (callee))
884 case BUILT_IN_STRLEN:
885 if (val[0] && nargs == 1)
887 tree new_val =
888 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
890 /* If the result is not a valid gimple value, or not a cast
891 of a valid gimple value, then we cannot use the result. */
892 if (is_gimple_val (new_val)
893 || (CONVERT_EXPR_P (new_val)
894 && is_gimple_val (TREE_OPERAND (new_val, 0))))
895 return new_val;
897 break;
899 case BUILT_IN_STRCPY:
900 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
901 result = fold_builtin_strcpy (loc, callee,
902 gimple_call_arg (stmt, 0),
903 gimple_call_arg (stmt, 1),
904 val[1]);
905 break;
907 case BUILT_IN_STRNCPY:
908 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
909 result = fold_builtin_strncpy (loc, callee,
910 gimple_call_arg (stmt, 0),
911 gimple_call_arg (stmt, 1),
912 gimple_call_arg (stmt, 2),
913 val[1]);
914 break;
916 case BUILT_IN_FPUTS:
917 if (nargs == 2)
918 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
919 gimple_call_arg (stmt, 1),
920 ignore, false, val[0]);
921 break;
923 case BUILT_IN_FPUTS_UNLOCKED:
924 if (nargs == 2)
925 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
926 gimple_call_arg (stmt, 1),
927 ignore, true, val[0]);
928 break;
930 case BUILT_IN_MEMCPY_CHK:
931 case BUILT_IN_MEMPCPY_CHK:
932 case BUILT_IN_MEMMOVE_CHK:
933 case BUILT_IN_MEMSET_CHK:
934 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
935 result = fold_builtin_memory_chk (loc, callee,
936 gimple_call_arg (stmt, 0),
937 gimple_call_arg (stmt, 1),
938 gimple_call_arg (stmt, 2),
939 gimple_call_arg (stmt, 3),
940 val[2], ignore,
941 DECL_FUNCTION_CODE (callee));
942 break;
944 case BUILT_IN_STRCPY_CHK:
945 case BUILT_IN_STPCPY_CHK:
946 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
947 result = fold_builtin_stxcpy_chk (loc, callee,
948 gimple_call_arg (stmt, 0),
949 gimple_call_arg (stmt, 1),
950 gimple_call_arg (stmt, 2),
951 val[1], ignore,
952 DECL_FUNCTION_CODE (callee));
953 break;
955 case BUILT_IN_STRNCPY_CHK:
956 case BUILT_IN_STPNCPY_CHK:
957 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
958 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
959 gimple_call_arg (stmt, 1),
960 gimple_call_arg (stmt, 2),
961 gimple_call_arg (stmt, 3),
962 val[2], ignore,
963 DECL_FUNCTION_CODE (callee));
964 break;
966 case BUILT_IN_SNPRINTF_CHK:
967 case BUILT_IN_VSNPRINTF_CHK:
968 if (val[1] && is_gimple_val (val[1]))
969 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
970 DECL_FUNCTION_CODE (callee));
971 break;
973 default:
974 gcc_unreachable ();
977 if (result && ignore)
978 result = fold_ignored_result (result);
979 return result;
982 /* Generate code adjusting the first parameter of a call statement determined
983 by GSI by DELTA. */
985 void
986 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
988 gimple call_stmt = gsi_stmt (*gsi);
989 tree parm, tmp;
990 gimple new_stmt;
992 delta = convert_to_ptrofftype (delta);
993 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
994 parm = gimple_call_arg (call_stmt, 0);
995 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
996 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
997 add_referenced_var (tmp);
999 tmp = make_ssa_name (tmp, NULL);
1000 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1001 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1002 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1003 gimple_call_set_arg (call_stmt, 0, tmp);
1006 /* Return a binfo to be used for devirtualization of calls based on an object
1007 represented by a declaration (i.e. a global or automatically allocated one)
1008 or NULL if it cannot be found or is not safe. CST is expected to be an
1009 ADDR_EXPR of such object or the function will return NULL. Currently it is
1010 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1012 tree
1013 gimple_extract_devirt_binfo_from_cst (tree cst)
1015 HOST_WIDE_INT offset, size, max_size;
1016 tree base, type, expected_type, binfo;
1017 bool last_artificial = false;
1019 if (!flag_devirtualize
1020 || TREE_CODE (cst) != ADDR_EXPR
1021 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1022 return NULL_TREE;
1024 cst = TREE_OPERAND (cst, 0);
1025 expected_type = TREE_TYPE (cst);
1026 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1027 type = TREE_TYPE (base);
1028 if (!DECL_P (base)
1029 || max_size == -1
1030 || max_size != size
1031 || TREE_CODE (type) != RECORD_TYPE)
1032 return NULL_TREE;
1034 /* Find the sub-object the constant actually refers to and mark whether it is
1035 an artificial one (as opposed to a user-defined one). */
1036 while (true)
1038 HOST_WIDE_INT pos, size;
1039 tree fld;
1041 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1042 break;
1043 if (offset < 0)
1044 return NULL_TREE;
1046 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1048 if (TREE_CODE (fld) != FIELD_DECL)
1049 continue;
1051 pos = int_bit_position (fld);
1052 size = tree_low_cst (DECL_SIZE (fld), 1);
1053 if (pos <= offset && (pos + size) > offset)
1054 break;
1056 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1057 return NULL_TREE;
1059 last_artificial = DECL_ARTIFICIAL (fld);
1060 type = TREE_TYPE (fld);
1061 offset -= pos;
1063 /* Artifical sub-objects are ancestors, we do not want to use them for
1064 devirtualization, at least not here. */
1065 if (last_artificial)
1066 return NULL_TREE;
1067 binfo = TYPE_BINFO (type);
1068 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1069 return NULL_TREE;
1070 else
1071 return binfo;
1074 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1075 The statement may be replaced by another statement, e.g., if the call
1076 simplifies to a constant value. Return true if any changes were made.
1077 It is assumed that the operands have been previously folded. */
1079 static bool
1080 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1082 gimple stmt = gsi_stmt (*gsi);
1083 tree callee;
1084 bool changed = false;
1085 unsigned i;
1087 /* Fold *& in call arguments. */
1088 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1089 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1091 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1092 if (tmp)
1094 gimple_call_set_arg (stmt, i, tmp);
1095 changed = true;
1099 /* Check for virtual calls that became direct calls. */
1100 callee = gimple_call_fn (stmt);
1101 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1103 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1105 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1106 changed = true;
1108 else
1110 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1111 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1112 if (binfo)
1114 HOST_WIDE_INT token
1115 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1116 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1117 if (fndecl)
1119 gimple_call_set_fndecl (stmt, fndecl);
1120 changed = true;
1126 if (inplace)
1127 return changed;
1129 /* Check for builtins that CCP can handle using information not
1130 available in the generic fold routines. */
1131 callee = gimple_call_fndecl (stmt);
1132 if (callee && DECL_BUILT_IN (callee))
1134 tree result = gimple_fold_builtin (stmt);
1135 if (result)
1137 if (!update_call_from_tree (gsi, result))
1138 gimplify_and_update_call_from_tree (gsi, result);
1139 changed = true;
1143 return changed;
1146 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1147 distinguishes both cases. */
1149 static bool
1150 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1152 bool changed = false;
1153 gimple stmt = gsi_stmt (*gsi);
1154 unsigned i;
1155 gimple_stmt_iterator gsinext = *gsi;
1156 gimple next_stmt;
1158 gsi_next (&gsinext);
1159 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1161 /* Fold the main computation performed by the statement. */
1162 switch (gimple_code (stmt))
1164 case GIMPLE_ASSIGN:
1166 unsigned old_num_ops = gimple_num_ops (stmt);
1167 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1168 tree lhs = gimple_assign_lhs (stmt);
1169 tree new_rhs;
1170 /* First canonicalize operand order. This avoids building new
1171 trees if this is the only thing fold would later do. */
1172 if ((commutative_tree_code (subcode)
1173 || commutative_ternary_tree_code (subcode))
1174 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1175 gimple_assign_rhs2 (stmt), false))
1177 tree tem = gimple_assign_rhs1 (stmt);
1178 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1179 gimple_assign_set_rhs2 (stmt, tem);
1180 changed = true;
1182 new_rhs = fold_gimple_assign (gsi);
1183 if (new_rhs
1184 && !useless_type_conversion_p (TREE_TYPE (lhs),
1185 TREE_TYPE (new_rhs)))
1186 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1187 if (new_rhs
1188 && (!inplace
1189 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1191 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1192 changed = true;
1194 break;
1197 case GIMPLE_COND:
1198 changed |= fold_gimple_cond (stmt);
1199 break;
1201 case GIMPLE_CALL:
1202 changed |= gimple_fold_call (gsi, inplace);
1203 break;
1205 case GIMPLE_ASM:
1206 /* Fold *& in asm operands. */
1208 size_t noutputs;
1209 const char **oconstraints;
1210 const char *constraint;
1211 bool allows_mem, allows_reg;
1213 noutputs = gimple_asm_noutputs (stmt);
1214 oconstraints = XALLOCAVEC (const char *, noutputs);
1216 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1218 tree link = gimple_asm_output_op (stmt, i);
1219 tree op = TREE_VALUE (link);
1220 oconstraints[i]
1221 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1222 if (REFERENCE_CLASS_P (op)
1223 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1225 TREE_VALUE (link) = op;
1226 changed = true;
1229 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1231 tree link = gimple_asm_input_op (stmt, i);
1232 tree op = TREE_VALUE (link);
1233 constraint
1234 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1235 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1236 oconstraints, &allows_mem, &allows_reg);
1237 if (REFERENCE_CLASS_P (op)
1238 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1239 != NULL_TREE)
1241 TREE_VALUE (link) = op;
1242 changed = true;
1246 break;
1248 case GIMPLE_DEBUG:
1249 if (gimple_debug_bind_p (stmt))
1251 tree val = gimple_debug_bind_get_value (stmt);
1252 if (val
1253 && REFERENCE_CLASS_P (val))
1255 tree tem = maybe_fold_reference (val, false);
1256 if (tem)
1258 gimple_debug_bind_set_value (stmt, tem);
1259 changed = true;
1262 else if (val
1263 && TREE_CODE (val) == ADDR_EXPR)
1265 tree ref = TREE_OPERAND (val, 0);
1266 tree tem = maybe_fold_reference (ref, false);
1267 if (tem)
1269 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1270 gimple_debug_bind_set_value (stmt, tem);
1271 changed = true;
1275 break;
1277 default:;
1280 /* If stmt folds into nothing and it was the last stmt in a bb,
1281 don't call gsi_stmt. */
1282 if (gsi_end_p (*gsi))
1284 gcc_assert (next_stmt == NULL);
1285 return changed;
1288 stmt = gsi_stmt (*gsi);
1290 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1291 as we'd changing the next stmt. */
1292 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1294 tree lhs = gimple_get_lhs (stmt);
1295 if (lhs && REFERENCE_CLASS_P (lhs))
1297 tree new_lhs = maybe_fold_reference (lhs, true);
1298 if (new_lhs)
1300 gimple_set_lhs (stmt, new_lhs);
1301 changed = true;
1306 return changed;
1309 /* Fold the statement pointed to by GSI. In some cases, this function may
1310 replace the whole statement with a new one. Returns true iff folding
1311 makes any changes.
1312 The statement pointed to by GSI should be in valid gimple form but may
1313 be in unfolded state as resulting from for example constant propagation
1314 which can produce *&x = 0. */
1316 bool
1317 fold_stmt (gimple_stmt_iterator *gsi)
1319 return fold_stmt_1 (gsi, false);
1322 /* Perform the minimal folding on statement *GSI. Only operations like
1323 *&x created by constant propagation are handled. The statement cannot
1324 be replaced with a new one. Return true if the statement was
1325 changed, false otherwise.
1326 The statement *GSI should be in valid gimple form but may
1327 be in unfolded state as resulting from for example constant propagation
1328 which can produce *&x = 0. */
1330 bool
1331 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1333 gimple stmt = gsi_stmt (*gsi);
1334 bool changed = fold_stmt_1 (gsi, true);
1335 gcc_assert (gsi_stmt (*gsi) == stmt);
1336 return changed;
1339 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1340 if EXPR is null or we don't know how.
1341 If non-null, the result always has boolean type. */
1343 static tree
1344 canonicalize_bool (tree expr, bool invert)
1346 if (!expr)
1347 return NULL_TREE;
1348 else if (invert)
1350 if (integer_nonzerop (expr))
1351 return boolean_false_node;
1352 else if (integer_zerop (expr))
1353 return boolean_true_node;
1354 else if (TREE_CODE (expr) == SSA_NAME)
1355 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1356 build_int_cst (TREE_TYPE (expr), 0));
1357 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1358 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1359 boolean_type_node,
1360 TREE_OPERAND (expr, 0),
1361 TREE_OPERAND (expr, 1));
1362 else
1363 return NULL_TREE;
1365 else
1367 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1368 return expr;
1369 if (integer_nonzerop (expr))
1370 return boolean_true_node;
1371 else if (integer_zerop (expr))
1372 return boolean_false_node;
1373 else if (TREE_CODE (expr) == SSA_NAME)
1374 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1375 build_int_cst (TREE_TYPE (expr), 0));
1376 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1377 return fold_build2 (TREE_CODE (expr),
1378 boolean_type_node,
1379 TREE_OPERAND (expr, 0),
1380 TREE_OPERAND (expr, 1));
1381 else
1382 return NULL_TREE;
1386 /* Check to see if a boolean expression EXPR is logically equivalent to the
1387 comparison (OP1 CODE OP2). Check for various identities involving
1388 SSA_NAMEs. */
1390 static bool
1391 same_bool_comparison_p (const_tree expr, enum tree_code code,
1392 const_tree op1, const_tree op2)
1394 gimple s;
1396 /* The obvious case. */
1397 if (TREE_CODE (expr) == code
1398 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1399 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1400 return true;
1402 /* Check for comparing (name, name != 0) and the case where expr
1403 is an SSA_NAME with a definition matching the comparison. */
1404 if (TREE_CODE (expr) == SSA_NAME
1405 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1407 if (operand_equal_p (expr, op1, 0))
1408 return ((code == NE_EXPR && integer_zerop (op2))
1409 || (code == EQ_EXPR && integer_nonzerop (op2)));
1410 s = SSA_NAME_DEF_STMT (expr);
1411 if (is_gimple_assign (s)
1412 && gimple_assign_rhs_code (s) == code
1413 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1414 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1415 return true;
1418 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1419 of name is a comparison, recurse. */
1420 if (TREE_CODE (op1) == SSA_NAME
1421 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1423 s = SSA_NAME_DEF_STMT (op1);
1424 if (is_gimple_assign (s)
1425 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1427 enum tree_code c = gimple_assign_rhs_code (s);
1428 if ((c == NE_EXPR && integer_zerop (op2))
1429 || (c == EQ_EXPR && integer_nonzerop (op2)))
1430 return same_bool_comparison_p (expr, c,
1431 gimple_assign_rhs1 (s),
1432 gimple_assign_rhs2 (s));
1433 if ((c == EQ_EXPR && integer_zerop (op2))
1434 || (c == NE_EXPR && integer_nonzerop (op2)))
1435 return same_bool_comparison_p (expr,
1436 invert_tree_comparison (c, false),
1437 gimple_assign_rhs1 (s),
1438 gimple_assign_rhs2 (s));
1441 return false;
1444 /* Check to see if two boolean expressions OP1 and OP2 are logically
1445 equivalent. */
1447 static bool
1448 same_bool_result_p (const_tree op1, const_tree op2)
1450 /* Simple cases first. */
1451 if (operand_equal_p (op1, op2, 0))
1452 return true;
1454 /* Check the cases where at least one of the operands is a comparison.
1455 These are a bit smarter than operand_equal_p in that they apply some
1456 identifies on SSA_NAMEs. */
1457 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1458 && same_bool_comparison_p (op1, TREE_CODE (op2),
1459 TREE_OPERAND (op2, 0),
1460 TREE_OPERAND (op2, 1)))
1461 return true;
1462 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1463 && same_bool_comparison_p (op2, TREE_CODE (op1),
1464 TREE_OPERAND (op1, 0),
1465 TREE_OPERAND (op1, 1)))
1466 return true;
1468 /* Default case. */
1469 return false;
1472 /* Forward declarations for some mutually recursive functions. */
1474 static tree
1475 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1476 enum tree_code code2, tree op2a, tree op2b);
1477 static tree
1478 and_var_with_comparison (tree var, bool invert,
1479 enum tree_code code2, tree op2a, tree op2b);
1480 static tree
1481 and_var_with_comparison_1 (gimple stmt,
1482 enum tree_code code2, tree op2a, tree op2b);
1483 static tree
1484 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1485 enum tree_code code2, tree op2a, tree op2b);
1486 static tree
1487 or_var_with_comparison (tree var, bool invert,
1488 enum tree_code code2, tree op2a, tree op2b);
1489 static tree
1490 or_var_with_comparison_1 (gimple stmt,
1491 enum tree_code code2, tree op2a, tree op2b);
1493 /* Helper function for and_comparisons_1: try to simplify the AND of the
1494 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1495 If INVERT is true, invert the value of the VAR before doing the AND.
1496 Return NULL_EXPR if we can't simplify this to a single expression. */
1498 static tree
1499 and_var_with_comparison (tree var, bool invert,
1500 enum tree_code code2, tree op2a, tree op2b)
1502 tree t;
1503 gimple stmt = SSA_NAME_DEF_STMT (var);
1505 /* We can only deal with variables whose definitions are assignments. */
1506 if (!is_gimple_assign (stmt))
1507 return NULL_TREE;
1509 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1510 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1511 Then we only have to consider the simpler non-inverted cases. */
1512 if (invert)
1513 t = or_var_with_comparison_1 (stmt,
1514 invert_tree_comparison (code2, false),
1515 op2a, op2b);
1516 else
1517 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1518 return canonicalize_bool (t, invert);
1521 /* Try to simplify the AND of the ssa variable defined by the assignment
1522 STMT with the comparison specified by (OP2A CODE2 OP2B).
1523 Return NULL_EXPR if we can't simplify this to a single expression. */
1525 static tree
1526 and_var_with_comparison_1 (gimple stmt,
1527 enum tree_code code2, tree op2a, tree op2b)
1529 tree var = gimple_assign_lhs (stmt);
1530 tree true_test_var = NULL_TREE;
1531 tree false_test_var = NULL_TREE;
1532 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1534 /* Check for identities like (var AND (var == 0)) => false. */
1535 if (TREE_CODE (op2a) == SSA_NAME
1536 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1538 if ((code2 == NE_EXPR && integer_zerop (op2b))
1539 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1541 true_test_var = op2a;
1542 if (var == true_test_var)
1543 return var;
1545 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1546 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1548 false_test_var = op2a;
1549 if (var == false_test_var)
1550 return boolean_false_node;
1554 /* If the definition is a comparison, recurse on it. */
1555 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1557 tree t = and_comparisons_1 (innercode,
1558 gimple_assign_rhs1 (stmt),
1559 gimple_assign_rhs2 (stmt),
1560 code2,
1561 op2a,
1562 op2b);
1563 if (t)
1564 return t;
1567 /* If the definition is an AND or OR expression, we may be able to
1568 simplify by reassociating. */
1569 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1570 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1572 tree inner1 = gimple_assign_rhs1 (stmt);
1573 tree inner2 = gimple_assign_rhs2 (stmt);
1574 gimple s;
1575 tree t;
1576 tree partial = NULL_TREE;
1577 bool is_and = (innercode == BIT_AND_EXPR);
1579 /* Check for boolean identities that don't require recursive examination
1580 of inner1/inner2:
1581 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1582 inner1 AND (inner1 OR inner2) => inner1
1583 !inner1 AND (inner1 AND inner2) => false
1584 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1585 Likewise for similar cases involving inner2. */
1586 if (inner1 == true_test_var)
1587 return (is_and ? var : inner1);
1588 else if (inner2 == true_test_var)
1589 return (is_and ? var : inner2);
1590 else if (inner1 == false_test_var)
1591 return (is_and
1592 ? boolean_false_node
1593 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1594 else if (inner2 == false_test_var)
1595 return (is_and
1596 ? boolean_false_node
1597 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1599 /* Next, redistribute/reassociate the AND across the inner tests.
1600 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1601 if (TREE_CODE (inner1) == SSA_NAME
1602 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1603 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1604 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1605 gimple_assign_rhs1 (s),
1606 gimple_assign_rhs2 (s),
1607 code2, op2a, op2b)))
1609 /* Handle the AND case, where we are reassociating:
1610 (inner1 AND inner2) AND (op2a code2 op2b)
1611 => (t AND inner2)
1612 If the partial result t is a constant, we win. Otherwise
1613 continue on to try reassociating with the other inner test. */
1614 if (is_and)
1616 if (integer_onep (t))
1617 return inner2;
1618 else if (integer_zerop (t))
1619 return boolean_false_node;
1622 /* Handle the OR case, where we are redistributing:
1623 (inner1 OR inner2) AND (op2a code2 op2b)
1624 => (t OR (inner2 AND (op2a code2 op2b))) */
1625 else if (integer_onep (t))
1626 return boolean_true_node;
1628 /* Save partial result for later. */
1629 partial = t;
1632 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1633 if (TREE_CODE (inner2) == SSA_NAME
1634 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1635 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1636 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1637 gimple_assign_rhs1 (s),
1638 gimple_assign_rhs2 (s),
1639 code2, op2a, op2b)))
1641 /* Handle the AND case, where we are reassociating:
1642 (inner1 AND inner2) AND (op2a code2 op2b)
1643 => (inner1 AND t) */
1644 if (is_and)
1646 if (integer_onep (t))
1647 return inner1;
1648 else if (integer_zerop (t))
1649 return boolean_false_node;
1650 /* If both are the same, we can apply the identity
1651 (x AND x) == x. */
1652 else if (partial && same_bool_result_p (t, partial))
1653 return t;
1656 /* Handle the OR case. where we are redistributing:
1657 (inner1 OR inner2) AND (op2a code2 op2b)
1658 => (t OR (inner1 AND (op2a code2 op2b)))
1659 => (t OR partial) */
1660 else
1662 if (integer_onep (t))
1663 return boolean_true_node;
1664 else if (partial)
1666 /* We already got a simplification for the other
1667 operand to the redistributed OR expression. The
1668 interesting case is when at least one is false.
1669 Or, if both are the same, we can apply the identity
1670 (x OR x) == x. */
1671 if (integer_zerop (partial))
1672 return t;
1673 else if (integer_zerop (t))
1674 return partial;
1675 else if (same_bool_result_p (t, partial))
1676 return t;
1681 return NULL_TREE;
1684 /* Try to simplify the AND of two comparisons defined by
1685 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1686 If this can be done without constructing an intermediate value,
1687 return the resulting tree; otherwise NULL_TREE is returned.
1688 This function is deliberately asymmetric as it recurses on SSA_DEFs
1689 in the first comparison but not the second. */
1691 static tree
1692 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1693 enum tree_code code2, tree op2a, tree op2b)
1695 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1696 if (operand_equal_p (op1a, op2a, 0)
1697 && operand_equal_p (op1b, op2b, 0))
1699 /* Result will be either NULL_TREE, or a combined comparison. */
1700 tree t = combine_comparisons (UNKNOWN_LOCATION,
1701 TRUTH_ANDIF_EXPR, code1, code2,
1702 boolean_type_node, op1a, op1b);
1703 if (t)
1704 return t;
1707 /* Likewise the swapped case of the above. */
1708 if (operand_equal_p (op1a, op2b, 0)
1709 && operand_equal_p (op1b, op2a, 0))
1711 /* Result will be either NULL_TREE, or a combined comparison. */
1712 tree t = combine_comparisons (UNKNOWN_LOCATION,
1713 TRUTH_ANDIF_EXPR, code1,
1714 swap_tree_comparison (code2),
1715 boolean_type_node, op1a, op1b);
1716 if (t)
1717 return t;
1720 /* If both comparisons are of the same value against constants, we might
1721 be able to merge them. */
1722 if (operand_equal_p (op1a, op2a, 0)
1723 && TREE_CODE (op1b) == INTEGER_CST
1724 && TREE_CODE (op2b) == INTEGER_CST)
1726 int cmp = tree_int_cst_compare (op1b, op2b);
1728 /* If we have (op1a == op1b), we should either be able to
1729 return that or FALSE, depending on whether the constant op1b
1730 also satisfies the other comparison against op2b. */
1731 if (code1 == EQ_EXPR)
1733 bool done = true;
1734 bool val;
1735 switch (code2)
1737 case EQ_EXPR: val = (cmp == 0); break;
1738 case NE_EXPR: val = (cmp != 0); break;
1739 case LT_EXPR: val = (cmp < 0); break;
1740 case GT_EXPR: val = (cmp > 0); break;
1741 case LE_EXPR: val = (cmp <= 0); break;
1742 case GE_EXPR: val = (cmp >= 0); break;
1743 default: done = false;
1745 if (done)
1747 if (val)
1748 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1749 else
1750 return boolean_false_node;
1753 /* Likewise if the second comparison is an == comparison. */
1754 else if (code2 == EQ_EXPR)
1756 bool done = true;
1757 bool val;
1758 switch (code1)
1760 case EQ_EXPR: val = (cmp == 0); break;
1761 case NE_EXPR: val = (cmp != 0); break;
1762 case LT_EXPR: val = (cmp > 0); break;
1763 case GT_EXPR: val = (cmp < 0); break;
1764 case LE_EXPR: val = (cmp >= 0); break;
1765 case GE_EXPR: val = (cmp <= 0); break;
1766 default: done = false;
1768 if (done)
1770 if (val)
1771 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1772 else
1773 return boolean_false_node;
1777 /* Same business with inequality tests. */
1778 else if (code1 == NE_EXPR)
1780 bool val;
1781 switch (code2)
1783 case EQ_EXPR: val = (cmp != 0); break;
1784 case NE_EXPR: val = (cmp == 0); break;
1785 case LT_EXPR: val = (cmp >= 0); break;
1786 case GT_EXPR: val = (cmp <= 0); break;
1787 case LE_EXPR: val = (cmp > 0); break;
1788 case GE_EXPR: val = (cmp < 0); break;
1789 default:
1790 val = false;
1792 if (val)
1793 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1795 else if (code2 == NE_EXPR)
1797 bool val;
1798 switch (code1)
1800 case EQ_EXPR: val = (cmp == 0); break;
1801 case NE_EXPR: val = (cmp != 0); break;
1802 case LT_EXPR: val = (cmp <= 0); break;
1803 case GT_EXPR: val = (cmp >= 0); break;
1804 case LE_EXPR: val = (cmp < 0); break;
1805 case GE_EXPR: val = (cmp > 0); break;
1806 default:
1807 val = false;
1809 if (val)
1810 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1813 /* Chose the more restrictive of two < or <= comparisons. */
1814 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1815 && (code2 == LT_EXPR || code2 == LE_EXPR))
1817 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1818 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1819 else
1820 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1823 /* Likewise chose the more restrictive of two > or >= comparisons. */
1824 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1825 && (code2 == GT_EXPR || code2 == GE_EXPR))
1827 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1828 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1829 else
1830 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1833 /* Check for singleton ranges. */
1834 else if (cmp == 0
1835 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1836 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1837 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1839 /* Check for disjoint ranges. */
1840 else if (cmp <= 0
1841 && (code1 == LT_EXPR || code1 == LE_EXPR)
1842 && (code2 == GT_EXPR || code2 == GE_EXPR))
1843 return boolean_false_node;
1844 else if (cmp >= 0
1845 && (code1 == GT_EXPR || code1 == GE_EXPR)
1846 && (code2 == LT_EXPR || code2 == LE_EXPR))
1847 return boolean_false_node;
1850 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1851 NAME's definition is a truth value. See if there are any simplifications
1852 that can be done against the NAME's definition. */
1853 if (TREE_CODE (op1a) == SSA_NAME
1854 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1855 && (integer_zerop (op1b) || integer_onep (op1b)))
1857 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1858 || (code1 == NE_EXPR && integer_onep (op1b)));
1859 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1860 switch (gimple_code (stmt))
1862 case GIMPLE_ASSIGN:
1863 /* Try to simplify by copy-propagating the definition. */
1864 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1866 case GIMPLE_PHI:
1867 /* If every argument to the PHI produces the same result when
1868 ANDed with the second comparison, we win.
1869 Do not do this unless the type is bool since we need a bool
1870 result here anyway. */
1871 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1873 tree result = NULL_TREE;
1874 unsigned i;
1875 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1877 tree arg = gimple_phi_arg_def (stmt, i);
1879 /* If this PHI has itself as an argument, ignore it.
1880 If all the other args produce the same result,
1881 we're still OK. */
1882 if (arg == gimple_phi_result (stmt))
1883 continue;
1884 else if (TREE_CODE (arg) == INTEGER_CST)
1886 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1888 if (!result)
1889 result = boolean_false_node;
1890 else if (!integer_zerop (result))
1891 return NULL_TREE;
1893 else if (!result)
1894 result = fold_build2 (code2, boolean_type_node,
1895 op2a, op2b);
1896 else if (!same_bool_comparison_p (result,
1897 code2, op2a, op2b))
1898 return NULL_TREE;
1900 else if (TREE_CODE (arg) == SSA_NAME
1901 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1903 tree temp;
1904 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1905 /* In simple cases we can look through PHI nodes,
1906 but we have to be careful with loops.
1907 See PR49073. */
1908 if (! dom_info_available_p (CDI_DOMINATORS)
1909 || gimple_bb (def_stmt) == gimple_bb (stmt)
1910 || dominated_by_p (CDI_DOMINATORS,
1911 gimple_bb (def_stmt),
1912 gimple_bb (stmt)))
1913 return NULL_TREE;
1914 temp = and_var_with_comparison (arg, invert, code2,
1915 op2a, op2b);
1916 if (!temp)
1917 return NULL_TREE;
1918 else if (!result)
1919 result = temp;
1920 else if (!same_bool_result_p (result, temp))
1921 return NULL_TREE;
1923 else
1924 return NULL_TREE;
1926 return result;
1929 default:
1930 break;
1933 return NULL_TREE;
1936 /* Try to simplify the AND of two comparisons, specified by
1937 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1938 If this can be simplified to a single expression (without requiring
1939 introducing more SSA variables to hold intermediate values),
1940 return the resulting tree. Otherwise return NULL_TREE.
1941 If the result expression is non-null, it has boolean type. */
1943 tree
1944 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1945 enum tree_code code2, tree op2a, tree op2b)
1947 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1948 if (t)
1949 return t;
1950 else
1951 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1954 /* Helper function for or_comparisons_1: try to simplify the OR of the
1955 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1956 If INVERT is true, invert the value of VAR before doing the OR.
1957 Return NULL_EXPR if we can't simplify this to a single expression. */
1959 static tree
1960 or_var_with_comparison (tree var, bool invert,
1961 enum tree_code code2, tree op2a, tree op2b)
1963 tree t;
1964 gimple stmt = SSA_NAME_DEF_STMT (var);
1966 /* We can only deal with variables whose definitions are assignments. */
1967 if (!is_gimple_assign (stmt))
1968 return NULL_TREE;
1970 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1971 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1972 Then we only have to consider the simpler non-inverted cases. */
1973 if (invert)
1974 t = and_var_with_comparison_1 (stmt,
1975 invert_tree_comparison (code2, false),
1976 op2a, op2b);
1977 else
1978 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1979 return canonicalize_bool (t, invert);
1982 /* Try to simplify the OR of the ssa variable defined by the assignment
1983 STMT with the comparison specified by (OP2A CODE2 OP2B).
1984 Return NULL_EXPR if we can't simplify this to a single expression. */
1986 static tree
1987 or_var_with_comparison_1 (gimple stmt,
1988 enum tree_code code2, tree op2a, tree op2b)
1990 tree var = gimple_assign_lhs (stmt);
1991 tree true_test_var = NULL_TREE;
1992 tree false_test_var = NULL_TREE;
1993 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1995 /* Check for identities like (var OR (var != 0)) => true . */
1996 if (TREE_CODE (op2a) == SSA_NAME
1997 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1999 if ((code2 == NE_EXPR && integer_zerop (op2b))
2000 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2002 true_test_var = op2a;
2003 if (var == true_test_var)
2004 return var;
2006 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2007 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2009 false_test_var = op2a;
2010 if (var == false_test_var)
2011 return boolean_true_node;
2015 /* If the definition is a comparison, recurse on it. */
2016 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2018 tree t = or_comparisons_1 (innercode,
2019 gimple_assign_rhs1 (stmt),
2020 gimple_assign_rhs2 (stmt),
2021 code2,
2022 op2a,
2023 op2b);
2024 if (t)
2025 return t;
2028 /* If the definition is an AND or OR expression, we may be able to
2029 simplify by reassociating. */
2030 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2031 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2033 tree inner1 = gimple_assign_rhs1 (stmt);
2034 tree inner2 = gimple_assign_rhs2 (stmt);
2035 gimple s;
2036 tree t;
2037 tree partial = NULL_TREE;
2038 bool is_or = (innercode == BIT_IOR_EXPR);
2040 /* Check for boolean identities that don't require recursive examination
2041 of inner1/inner2:
2042 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2043 inner1 OR (inner1 AND inner2) => inner1
2044 !inner1 OR (inner1 OR inner2) => true
2045 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2047 if (inner1 == true_test_var)
2048 return (is_or ? var : inner1);
2049 else if (inner2 == true_test_var)
2050 return (is_or ? var : inner2);
2051 else if (inner1 == false_test_var)
2052 return (is_or
2053 ? boolean_true_node
2054 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2055 else if (inner2 == false_test_var)
2056 return (is_or
2057 ? boolean_true_node
2058 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2060 /* Next, redistribute/reassociate the OR across the inner tests.
2061 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2062 if (TREE_CODE (inner1) == SSA_NAME
2063 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2064 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2065 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2066 gimple_assign_rhs1 (s),
2067 gimple_assign_rhs2 (s),
2068 code2, op2a, op2b)))
2070 /* Handle the OR case, where we are reassociating:
2071 (inner1 OR inner2) OR (op2a code2 op2b)
2072 => (t OR inner2)
2073 If the partial result t is a constant, we win. Otherwise
2074 continue on to try reassociating with the other inner test. */
2075 if (is_or)
2077 if (integer_onep (t))
2078 return boolean_true_node;
2079 else if (integer_zerop (t))
2080 return inner2;
2083 /* Handle the AND case, where we are redistributing:
2084 (inner1 AND inner2) OR (op2a code2 op2b)
2085 => (t AND (inner2 OR (op2a code op2b))) */
2086 else if (integer_zerop (t))
2087 return boolean_false_node;
2089 /* Save partial result for later. */
2090 partial = t;
2093 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2094 if (TREE_CODE (inner2) == SSA_NAME
2095 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2096 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2097 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2098 gimple_assign_rhs1 (s),
2099 gimple_assign_rhs2 (s),
2100 code2, op2a, op2b)))
2102 /* Handle the OR case, where we are reassociating:
2103 (inner1 OR inner2) OR (op2a code2 op2b)
2104 => (inner1 OR t)
2105 => (t OR partial) */
2106 if (is_or)
2108 if (integer_zerop (t))
2109 return inner1;
2110 else if (integer_onep (t))
2111 return boolean_true_node;
2112 /* If both are the same, we can apply the identity
2113 (x OR x) == x. */
2114 else if (partial && same_bool_result_p (t, partial))
2115 return t;
2118 /* Handle the AND case, where we are redistributing:
2119 (inner1 AND inner2) OR (op2a code2 op2b)
2120 => (t AND (inner1 OR (op2a code2 op2b)))
2121 => (t AND partial) */
2122 else
2124 if (integer_zerop (t))
2125 return boolean_false_node;
2126 else if (partial)
2128 /* We already got a simplification for the other
2129 operand to the redistributed AND expression. The
2130 interesting case is when at least one is true.
2131 Or, if both are the same, we can apply the identity
2132 (x AND x) == x. */
2133 if (integer_onep (partial))
2134 return t;
2135 else if (integer_onep (t))
2136 return partial;
2137 else if (same_bool_result_p (t, partial))
2138 return t;
2143 return NULL_TREE;
2146 /* Try to simplify the OR of two comparisons defined by
2147 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2148 If this can be done without constructing an intermediate value,
2149 return the resulting tree; otherwise NULL_TREE is returned.
2150 This function is deliberately asymmetric as it recurses on SSA_DEFs
2151 in the first comparison but not the second. */
2153 static tree
2154 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2155 enum tree_code code2, tree op2a, tree op2b)
2157 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2158 if (operand_equal_p (op1a, op2a, 0)
2159 && operand_equal_p (op1b, op2b, 0))
2161 /* Result will be either NULL_TREE, or a combined comparison. */
2162 tree t = combine_comparisons (UNKNOWN_LOCATION,
2163 TRUTH_ORIF_EXPR, code1, code2,
2164 boolean_type_node, op1a, op1b);
2165 if (t)
2166 return t;
2169 /* Likewise the swapped case of the above. */
2170 if (operand_equal_p (op1a, op2b, 0)
2171 && operand_equal_p (op1b, op2a, 0))
2173 /* Result will be either NULL_TREE, or a combined comparison. */
2174 tree t = combine_comparisons (UNKNOWN_LOCATION,
2175 TRUTH_ORIF_EXPR, code1,
2176 swap_tree_comparison (code2),
2177 boolean_type_node, op1a, op1b);
2178 if (t)
2179 return t;
2182 /* If both comparisons are of the same value against constants, we might
2183 be able to merge them. */
2184 if (operand_equal_p (op1a, op2a, 0)
2185 && TREE_CODE (op1b) == INTEGER_CST
2186 && TREE_CODE (op2b) == INTEGER_CST)
2188 int cmp = tree_int_cst_compare (op1b, op2b);
2190 /* If we have (op1a != op1b), we should either be able to
2191 return that or TRUE, depending on whether the constant op1b
2192 also satisfies the other comparison against op2b. */
2193 if (code1 == NE_EXPR)
2195 bool done = true;
2196 bool val;
2197 switch (code2)
2199 case EQ_EXPR: val = (cmp == 0); break;
2200 case NE_EXPR: val = (cmp != 0); break;
2201 case LT_EXPR: val = (cmp < 0); break;
2202 case GT_EXPR: val = (cmp > 0); break;
2203 case LE_EXPR: val = (cmp <= 0); break;
2204 case GE_EXPR: val = (cmp >= 0); break;
2205 default: done = false;
2207 if (done)
2209 if (val)
2210 return boolean_true_node;
2211 else
2212 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2215 /* Likewise if the second comparison is a != comparison. */
2216 else if (code2 == NE_EXPR)
2218 bool done = true;
2219 bool val;
2220 switch (code1)
2222 case EQ_EXPR: val = (cmp == 0); break;
2223 case NE_EXPR: val = (cmp != 0); break;
2224 case LT_EXPR: val = (cmp > 0); break;
2225 case GT_EXPR: val = (cmp < 0); break;
2226 case LE_EXPR: val = (cmp >= 0); break;
2227 case GE_EXPR: val = (cmp <= 0); break;
2228 default: done = false;
2230 if (done)
2232 if (val)
2233 return boolean_true_node;
2234 else
2235 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2239 /* See if an equality test is redundant with the other comparison. */
2240 else if (code1 == EQ_EXPR)
2242 bool val;
2243 switch (code2)
2245 case EQ_EXPR: val = (cmp == 0); break;
2246 case NE_EXPR: val = (cmp != 0); break;
2247 case LT_EXPR: val = (cmp < 0); break;
2248 case GT_EXPR: val = (cmp > 0); break;
2249 case LE_EXPR: val = (cmp <= 0); break;
2250 case GE_EXPR: val = (cmp >= 0); break;
2251 default:
2252 val = false;
2254 if (val)
2255 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2257 else if (code2 == EQ_EXPR)
2259 bool val;
2260 switch (code1)
2262 case EQ_EXPR: val = (cmp == 0); break;
2263 case NE_EXPR: val = (cmp != 0); break;
2264 case LT_EXPR: val = (cmp > 0); break;
2265 case GT_EXPR: val = (cmp < 0); break;
2266 case LE_EXPR: val = (cmp >= 0); break;
2267 case GE_EXPR: val = (cmp <= 0); break;
2268 default:
2269 val = false;
2271 if (val)
2272 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2275 /* Chose the less restrictive of two < or <= comparisons. */
2276 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2277 && (code2 == LT_EXPR || code2 == LE_EXPR))
2279 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2280 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2281 else
2282 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2285 /* Likewise chose the less restrictive of two > or >= comparisons. */
2286 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2287 && (code2 == GT_EXPR || code2 == GE_EXPR))
2289 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2290 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2291 else
2292 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2295 /* Check for singleton ranges. */
2296 else if (cmp == 0
2297 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2298 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2299 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2301 /* Check for less/greater pairs that don't restrict the range at all. */
2302 else if (cmp >= 0
2303 && (code1 == LT_EXPR || code1 == LE_EXPR)
2304 && (code2 == GT_EXPR || code2 == GE_EXPR))
2305 return boolean_true_node;
2306 else if (cmp <= 0
2307 && (code1 == GT_EXPR || code1 == GE_EXPR)
2308 && (code2 == LT_EXPR || code2 == LE_EXPR))
2309 return boolean_true_node;
2312 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2313 NAME's definition is a truth value. See if there are any simplifications
2314 that can be done against the NAME's definition. */
2315 if (TREE_CODE (op1a) == SSA_NAME
2316 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2317 && (integer_zerop (op1b) || integer_onep (op1b)))
2319 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2320 || (code1 == NE_EXPR && integer_onep (op1b)));
2321 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2322 switch (gimple_code (stmt))
2324 case GIMPLE_ASSIGN:
2325 /* Try to simplify by copy-propagating the definition. */
2326 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2328 case GIMPLE_PHI:
2329 /* If every argument to the PHI produces the same result when
2330 ORed with the second comparison, we win.
2331 Do not do this unless the type is bool since we need a bool
2332 result here anyway. */
2333 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2335 tree result = NULL_TREE;
2336 unsigned i;
2337 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2339 tree arg = gimple_phi_arg_def (stmt, i);
2341 /* If this PHI has itself as an argument, ignore it.
2342 If all the other args produce the same result,
2343 we're still OK. */
2344 if (arg == gimple_phi_result (stmt))
2345 continue;
2346 else if (TREE_CODE (arg) == INTEGER_CST)
2348 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2350 if (!result)
2351 result = boolean_true_node;
2352 else if (!integer_onep (result))
2353 return NULL_TREE;
2355 else if (!result)
2356 result = fold_build2 (code2, boolean_type_node,
2357 op2a, op2b);
2358 else if (!same_bool_comparison_p (result,
2359 code2, op2a, op2b))
2360 return NULL_TREE;
2362 else if (TREE_CODE (arg) == SSA_NAME
2363 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2365 tree temp;
2366 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2367 /* In simple cases we can look through PHI nodes,
2368 but we have to be careful with loops.
2369 See PR49073. */
2370 if (! dom_info_available_p (CDI_DOMINATORS)
2371 || gimple_bb (def_stmt) == gimple_bb (stmt)
2372 || dominated_by_p (CDI_DOMINATORS,
2373 gimple_bb (def_stmt),
2374 gimple_bb (stmt)))
2375 return NULL_TREE;
2376 temp = or_var_with_comparison (arg, invert, code2,
2377 op2a, op2b);
2378 if (!temp)
2379 return NULL_TREE;
2380 else if (!result)
2381 result = temp;
2382 else if (!same_bool_result_p (result, temp))
2383 return NULL_TREE;
2385 else
2386 return NULL_TREE;
2388 return result;
2391 default:
2392 break;
2395 return NULL_TREE;
2398 /* Try to simplify the OR of two comparisons, specified by
2399 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2400 If this can be simplified to a single expression (without requiring
2401 introducing more SSA variables to hold intermediate values),
2402 return the resulting tree. Otherwise return NULL_TREE.
2403 If the result expression is non-null, it has boolean type. */
2405 tree
2406 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2407 enum tree_code code2, tree op2a, tree op2b)
2409 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2410 if (t)
2411 return t;
2412 else
2413 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2417 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2419 Either NULL_TREE, a simplified but non-constant or a constant
2420 is returned.
2422 ??? This should go into a gimple-fold-inline.h file to be eventually
2423 privatized with the single valueize function used in the various TUs
2424 to avoid the indirect function call overhead. */
2426 tree
2427 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2429 location_t loc = gimple_location (stmt);
2430 switch (gimple_code (stmt))
2432 case GIMPLE_ASSIGN:
2434 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2436 switch (get_gimple_rhs_class (subcode))
2438 case GIMPLE_SINGLE_RHS:
2440 tree rhs = gimple_assign_rhs1 (stmt);
2441 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2443 if (TREE_CODE (rhs) == SSA_NAME)
2445 /* If the RHS is an SSA_NAME, return its known constant value,
2446 if any. */
2447 return (*valueize) (rhs);
2449 /* Handle propagating invariant addresses into address
2450 operations. */
2451 else if (TREE_CODE (rhs) == ADDR_EXPR
2452 && !is_gimple_min_invariant (rhs))
2454 HOST_WIDE_INT offset;
2455 tree base;
2456 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2457 &offset,
2458 valueize);
2459 if (base
2460 && (CONSTANT_CLASS_P (base)
2461 || decl_address_invariant_p (base)))
2462 return build_invariant_address (TREE_TYPE (rhs),
2463 base, offset);
2465 else if (TREE_CODE (rhs) == CONSTRUCTOR
2466 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2467 && (CONSTRUCTOR_NELTS (rhs)
2468 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2470 unsigned i;
2471 tree val, list;
2473 list = NULL_TREE;
2474 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2476 val = (*valueize) (val);
2477 if (TREE_CODE (val) == INTEGER_CST
2478 || TREE_CODE (val) == REAL_CST
2479 || TREE_CODE (val) == FIXED_CST)
2480 list = tree_cons (NULL_TREE, val, list);
2481 else
2482 return NULL_TREE;
2485 return build_vector (TREE_TYPE (rhs), nreverse (list));
2488 if (kind == tcc_reference)
2490 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2491 || TREE_CODE (rhs) == REALPART_EXPR
2492 || TREE_CODE (rhs) == IMAGPART_EXPR)
2493 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2495 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2496 return fold_unary_loc (EXPR_LOCATION (rhs),
2497 TREE_CODE (rhs),
2498 TREE_TYPE (rhs), val);
2500 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2501 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2503 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2504 return fold_ternary_loc (EXPR_LOCATION (rhs),
2505 TREE_CODE (rhs),
2506 TREE_TYPE (rhs), val,
2507 TREE_OPERAND (rhs, 1),
2508 TREE_OPERAND (rhs, 2));
2510 else if (TREE_CODE (rhs) == MEM_REF
2511 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2513 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2514 if (TREE_CODE (val) == ADDR_EXPR
2515 && is_gimple_min_invariant (val))
2517 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2518 unshare_expr (val),
2519 TREE_OPERAND (rhs, 1));
2520 if (tem)
2521 rhs = tem;
2524 return fold_const_aggregate_ref_1 (rhs, valueize);
2526 else if (kind == tcc_declaration)
2527 return get_symbol_constant_value (rhs);
2528 return rhs;
2531 case GIMPLE_UNARY_RHS:
2533 /* Handle unary operators that can appear in GIMPLE form.
2534 Note that we know the single operand must be a constant,
2535 so this should almost always return a simplified RHS. */
2536 tree lhs = gimple_assign_lhs (stmt);
2537 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2539 /* Conversions are useless for CCP purposes if they are
2540 value-preserving. Thus the restrictions that
2541 useless_type_conversion_p places for restrict qualification
2542 of pointer types should not apply here.
2543 Substitution later will only substitute to allowed places. */
2544 if (CONVERT_EXPR_CODE_P (subcode)
2545 && POINTER_TYPE_P (TREE_TYPE (lhs))
2546 && POINTER_TYPE_P (TREE_TYPE (op0))
2547 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2548 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2549 && TYPE_MODE (TREE_TYPE (lhs))
2550 == TYPE_MODE (TREE_TYPE (op0)))
2551 return op0;
2553 return
2554 fold_unary_ignore_overflow_loc (loc, subcode,
2555 gimple_expr_type (stmt), op0);
2558 case GIMPLE_BINARY_RHS:
2560 /* Handle binary operators that can appear in GIMPLE form. */
2561 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2562 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2564 /* Translate &x + CST into an invariant form suitable for
2565 further propagation. */
2566 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2567 && TREE_CODE (op0) == ADDR_EXPR
2568 && TREE_CODE (op1) == INTEGER_CST)
2570 tree off = fold_convert (ptr_type_node, op1);
2571 return build_fold_addr_expr_loc
2572 (loc,
2573 fold_build2 (MEM_REF,
2574 TREE_TYPE (TREE_TYPE (op0)),
2575 unshare_expr (op0), off));
2578 return fold_binary_loc (loc, subcode,
2579 gimple_expr_type (stmt), op0, op1);
2582 case GIMPLE_TERNARY_RHS:
2584 /* Handle ternary operators that can appear in GIMPLE form. */
2585 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2586 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2587 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2589 /* Fold embedded expressions in ternary codes. */
2590 if ((subcode == COND_EXPR
2591 || subcode == VEC_COND_EXPR)
2592 && COMPARISON_CLASS_P (op0))
2594 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2595 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2596 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2597 TREE_TYPE (op0), op00, op01);
2598 if (tem)
2599 op0 = tem;
2602 return fold_ternary_loc (loc, subcode,
2603 gimple_expr_type (stmt), op0, op1, op2);
2606 default:
2607 gcc_unreachable ();
2611 case GIMPLE_CALL:
2613 tree fn;
2615 if (gimple_call_internal_p (stmt))
2616 /* No folding yet for these functions. */
2617 return NULL_TREE;
2619 fn = (*valueize) (gimple_call_fn (stmt));
2620 if (TREE_CODE (fn) == ADDR_EXPR
2621 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2622 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2624 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2625 tree call, retval;
2626 unsigned i;
2627 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2628 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2629 call = build_call_array_loc (loc,
2630 gimple_call_return_type (stmt),
2631 fn, gimple_call_num_args (stmt), args);
2632 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2633 if (retval)
2634 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2635 STRIP_NOPS (retval);
2636 return retval;
2638 return NULL_TREE;
2641 default:
2642 return NULL_TREE;
2646 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2647 Returns NULL_TREE if folding to a constant is not possible, otherwise
2648 returns a constant according to is_gimple_min_invariant. */
2650 tree
2651 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2653 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2654 if (res && is_gimple_min_invariant (res))
2655 return res;
2656 return NULL_TREE;
2660 /* The following set of functions are supposed to fold references using
2661 their constant initializers. */
2663 static tree fold_ctor_reference (tree type, tree ctor,
2664 unsigned HOST_WIDE_INT offset,
2665 unsigned HOST_WIDE_INT size);
2667 /* See if we can find constructor defining value of BASE.
2668 When we know the consructor with constant offset (such as
2669 base is array[40] and we do know constructor of array), then
2670 BIT_OFFSET is adjusted accordingly.
2672 As a special case, return error_mark_node when constructor
2673 is not explicitly available, but it is known to be zero
2674 such as 'static const int a;'. */
2675 static tree
2676 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2677 tree (*valueize)(tree))
2679 HOST_WIDE_INT bit_offset2, size, max_size;
2680 if (TREE_CODE (base) == MEM_REF)
2682 if (!integer_zerop (TREE_OPERAND (base, 1)))
2684 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2685 return NULL_TREE;
2686 *bit_offset += (mem_ref_offset (base).low
2687 * BITS_PER_UNIT);
2690 if (valueize
2691 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2692 base = valueize (TREE_OPERAND (base, 0));
2693 if (!base || TREE_CODE (base) != ADDR_EXPR)
2694 return NULL_TREE;
2695 base = TREE_OPERAND (base, 0);
2698 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2699 DECL_INITIAL. If BASE is a nested reference into another
2700 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2701 the inner reference. */
2702 switch (TREE_CODE (base))
2704 case VAR_DECL:
2705 if (!const_value_known_p (base))
2706 return NULL_TREE;
2708 /* Fallthru. */
2709 case CONST_DECL:
2710 if (!DECL_INITIAL (base)
2711 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2712 return error_mark_node;
2713 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2714 as special marker (_not_ zero ...) for its own purposes. */
2715 if (DECL_INITIAL (base) == error_mark_node)
2716 return NULL_TREE;
2717 return DECL_INITIAL (base);
2719 case ARRAY_REF:
2720 case COMPONENT_REF:
2721 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2722 if (max_size == -1 || size != max_size)
2723 return NULL_TREE;
2724 *bit_offset += bit_offset2;
2725 return get_base_constructor (base, bit_offset, valueize);
2727 case STRING_CST:
2728 case CONSTRUCTOR:
2729 return base;
2731 default:
2732 return NULL_TREE;
2736 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2737 to the memory at bit OFFSET.
2739 We do only simple job of folding byte accesses. */
2741 static tree
2742 fold_string_cst_ctor_reference (tree type, tree ctor,
2743 unsigned HOST_WIDE_INT offset,
2744 unsigned HOST_WIDE_INT size)
2746 if (INTEGRAL_TYPE_P (type)
2747 && (TYPE_MODE (type)
2748 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2749 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2750 == MODE_INT)
2751 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2752 && size == BITS_PER_UNIT
2753 && !(offset % BITS_PER_UNIT))
2755 offset /= BITS_PER_UNIT;
2756 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2757 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2758 [offset]));
2759 /* Folding
2760 const char a[20]="hello";
2761 return a[10];
2763 might lead to offset greater than string length. In this case we
2764 know value is either initialized to 0 or out of bounds. Return 0
2765 in both cases. */
2766 return build_zero_cst (type);
2768 return NULL_TREE;
2771 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2772 SIZE to the memory at bit OFFSET. */
2774 static tree
2775 fold_array_ctor_reference (tree type, tree ctor,
2776 unsigned HOST_WIDE_INT offset,
2777 unsigned HOST_WIDE_INT size)
2779 unsigned HOST_WIDE_INT cnt;
2780 tree cfield, cval;
2781 double_int low_bound, elt_size;
2782 double_int index, max_index;
2783 double_int access_index;
2784 tree domain_type = NULL_TREE;
2785 HOST_WIDE_INT inner_offset;
2787 /* Compute low bound and elt size. */
2788 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2789 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2790 if (domain_type && TYPE_MIN_VALUE (domain_type))
2792 /* Static constructors for variably sized objects makes no sense. */
2793 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2794 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2796 else
2797 low_bound = double_int_zero;
2798 /* Static constructors for variably sized objects makes no sense. */
2799 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2800 == INTEGER_CST);
2801 elt_size =
2802 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2805 /* We can handle only constantly sized accesses that are known to not
2806 be larger than size of array element. */
2807 if (!TYPE_SIZE_UNIT (type)
2808 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2809 || double_int_cmp (elt_size,
2810 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2811 return NULL_TREE;
2813 /* Compute the array index we look for. */
2814 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2815 elt_size, TRUNC_DIV_EXPR);
2816 access_index = double_int_add (access_index, low_bound);
2818 /* And offset within the access. */
2819 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2821 /* See if the array field is large enough to span whole access. We do not
2822 care to fold accesses spanning multiple array indexes. */
2823 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2824 return NULL_TREE;
2826 index = double_int_sub (low_bound, double_int_one);
2827 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2829 /* Array constructor might explicitely set index, or specify range
2830 or leave index NULL meaning that it is next index after previous
2831 one. */
2832 if (cfield)
2834 if (TREE_CODE (cfield) == INTEGER_CST)
2835 max_index = index = tree_to_double_int (cfield);
2836 else
2838 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2839 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2840 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2843 else
2844 max_index = index = double_int_add (index, double_int_one);
2846 /* Do we have match? */
2847 if (double_int_cmp (access_index, index, 1) >= 0
2848 && double_int_cmp (access_index, max_index, 1) <= 0)
2849 return fold_ctor_reference (type, cval, inner_offset, size);
2851 /* When memory is not explicitely mentioned in constructor,
2852 it is 0 (or out of range). */
2853 return build_zero_cst (type);
2856 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2857 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2859 static tree
2860 fold_nonarray_ctor_reference (tree type, tree ctor,
2861 unsigned HOST_WIDE_INT offset,
2862 unsigned HOST_WIDE_INT size)
2864 unsigned HOST_WIDE_INT cnt;
2865 tree cfield, cval;
2867 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2868 cval)
2870 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2871 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2872 tree field_size = DECL_SIZE (cfield);
2873 double_int bitoffset;
2874 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2875 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2876 double_int bitoffset_end, access_end;
2878 /* Variable sized objects in static constructors makes no sense,
2879 but field_size can be NULL for flexible array members. */
2880 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2881 && TREE_CODE (byte_offset) == INTEGER_CST
2882 && (field_size != NULL_TREE
2883 ? TREE_CODE (field_size) == INTEGER_CST
2884 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2886 /* Compute bit offset of the field. */
2887 bitoffset = double_int_add (tree_to_double_int (field_offset),
2888 double_int_mul (byte_offset_cst,
2889 bits_per_unit_cst));
2890 /* Compute bit offset where the field ends. */
2891 if (field_size != NULL_TREE)
2892 bitoffset_end = double_int_add (bitoffset,
2893 tree_to_double_int (field_size));
2894 else
2895 bitoffset_end = double_int_zero;
2897 access_end = double_int_add (uhwi_to_double_int (offset),
2898 uhwi_to_double_int (size));
2900 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2901 [BITOFFSET, BITOFFSET_END)? */
2902 if (double_int_cmp (access_end, bitoffset, 0) > 0
2903 && (field_size == NULL_TREE
2904 || double_int_cmp (uhwi_to_double_int (offset),
2905 bitoffset_end, 0) < 0))
2907 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2908 bitoffset);
2909 /* We do have overlap. Now see if field is large enough to
2910 cover the access. Give up for accesses spanning multiple
2911 fields. */
2912 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2913 return NULL_TREE;
2914 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2915 return NULL_TREE;
2916 return fold_ctor_reference (type, cval,
2917 double_int_to_uhwi (inner_offset), size);
2920 /* When memory is not explicitely mentioned in constructor, it is 0. */
2921 return build_zero_cst (type);
2924 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2925 to the memory at bit OFFSET. */
2927 static tree
2928 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2929 unsigned HOST_WIDE_INT size)
2931 tree ret;
2933 /* We found the field with exact match. */
2934 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2935 && !offset)
2936 return canonicalize_constructor_val (ctor);
2938 /* We are at the end of walk, see if we can view convert the
2939 result. */
2940 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2941 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2942 && operand_equal_p (TYPE_SIZE (type),
2943 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2945 ret = canonicalize_constructor_val (ctor);
2946 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2947 if (ret)
2948 STRIP_NOPS (ret);
2949 return ret;
2951 if (TREE_CODE (ctor) == STRING_CST)
2952 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2953 if (TREE_CODE (ctor) == CONSTRUCTOR)
2956 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2957 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2958 return fold_array_ctor_reference (type, ctor, offset, size);
2959 else
2960 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2963 return NULL_TREE;
2966 /* Return the tree representing the element referenced by T if T is an
2967 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2968 names using VALUEIZE. Return NULL_TREE otherwise. */
2970 tree
2971 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2973 tree ctor, idx, base;
2974 HOST_WIDE_INT offset, size, max_size;
2975 tree tem;
2977 if (TREE_THIS_VOLATILE (t))
2978 return NULL_TREE;
2980 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2981 return get_symbol_constant_value (t);
2983 tem = fold_read_from_constant_string (t);
2984 if (tem)
2985 return tem;
2987 switch (TREE_CODE (t))
2989 case ARRAY_REF:
2990 case ARRAY_RANGE_REF:
2991 /* Constant indexes are handled well by get_base_constructor.
2992 Only special case variable offsets.
2993 FIXME: This code can't handle nested references with variable indexes
2994 (they will be handled only by iteration of ccp). Perhaps we can bring
2995 get_ref_base_and_extent here and make it use a valueize callback. */
2996 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2997 && valueize
2998 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2999 && host_integerp (idx, 0))
3001 tree low_bound, unit_size;
3003 /* If the resulting bit-offset is constant, track it. */
3004 if ((low_bound = array_ref_low_bound (t),
3005 host_integerp (low_bound, 0))
3006 && (unit_size = array_ref_element_size (t),
3007 host_integerp (unit_size, 1)))
3009 offset = TREE_INT_CST_LOW (idx);
3010 offset -= TREE_INT_CST_LOW (low_bound);
3011 offset *= TREE_INT_CST_LOW (unit_size);
3012 offset *= BITS_PER_UNIT;
3014 base = TREE_OPERAND (t, 0);
3015 ctor = get_base_constructor (base, &offset, valueize);
3016 /* Empty constructor. Always fold to 0. */
3017 if (ctor == error_mark_node)
3018 return build_zero_cst (TREE_TYPE (t));
3019 /* Out of bound array access. Value is undefined,
3020 but don't fold. */
3021 if (offset < 0)
3022 return NULL_TREE;
3023 /* We can not determine ctor. */
3024 if (!ctor)
3025 return NULL_TREE;
3026 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3027 TREE_INT_CST_LOW (unit_size)
3028 * BITS_PER_UNIT);
3031 /* Fallthru. */
3033 case COMPONENT_REF:
3034 case BIT_FIELD_REF:
3035 case TARGET_MEM_REF:
3036 case MEM_REF:
3037 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3038 ctor = get_base_constructor (base, &offset, valueize);
3040 /* Empty constructor. Always fold to 0. */
3041 if (ctor == error_mark_node)
3042 return build_zero_cst (TREE_TYPE (t));
3043 /* We do not know precise address. */
3044 if (max_size == -1 || max_size != size)
3045 return NULL_TREE;
3046 /* We can not determine ctor. */
3047 if (!ctor)
3048 return NULL_TREE;
3050 /* Out of bound array access. Value is undefined, but don't fold. */
3051 if (offset < 0)
3052 return NULL_TREE;
3054 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3056 case REALPART_EXPR:
3057 case IMAGPART_EXPR:
3059 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3060 if (c && TREE_CODE (c) == COMPLEX_CST)
3061 return fold_build1_loc (EXPR_LOCATION (t),
3062 TREE_CODE (t), TREE_TYPE (t), c);
3063 break;
3066 default:
3067 break;
3070 return NULL_TREE;
3073 tree
3074 fold_const_aggregate_ref (tree t)
3076 return fold_const_aggregate_ref_1 (t, NULL);
3079 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3080 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3081 KNOWN_BINFO carries the binfo describing the true type of
3082 OBJ_TYPE_REF_OBJECT(REF). */
3084 tree
3085 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3087 unsigned HOST_WIDE_INT offset, size;
3088 tree v, fn;
3090 v = BINFO_VTABLE (known_binfo);
3091 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3092 if (!v)
3093 return NULL_TREE;
3095 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3097 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3098 v = TREE_OPERAND (v, 0);
3100 else
3101 offset = 0;
3103 if (TREE_CODE (v) != ADDR_EXPR)
3104 return NULL_TREE;
3105 v = TREE_OPERAND (v, 0);
3107 if (TREE_CODE (v) != VAR_DECL
3108 || !DECL_VIRTUAL_P (v)
3109 || !DECL_INITIAL (v)
3110 || DECL_INITIAL (v) == error_mark_node)
3111 return NULL_TREE;
3112 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3113 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3114 offset += token * size;
3115 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3116 offset, size);
3117 if (!fn || integer_zerop (fn))
3118 return NULL_TREE;
3119 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3120 || TREE_CODE (fn) == FDESC_EXPR);
3121 fn = TREE_OPERAND (fn, 0);
3122 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3124 /* When cgraph node is missing and function is not public, we cannot
3125 devirtualize. This can happen in WHOPR when the actual method
3126 ends up in other partition, because we found devirtualization
3127 possibility too late. */
3128 if (!can_refer_decl_in_current_unit_p (fn))
3129 return NULL_TREE;
3131 return fn;
3134 /* Return true iff VAL is a gimple expression that is known to be
3135 non-negative. Restricted to floating-point inputs. */
3137 bool
3138 gimple_val_nonnegative_real_p (tree val)
3140 gimple def_stmt;
3142 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3144 /* Use existing logic for non-gimple trees. */
3145 if (tree_expr_nonnegative_p (val))
3146 return true;
3148 if (TREE_CODE (val) != SSA_NAME)
3149 return false;
3151 /* Currently we look only at the immediately defining statement
3152 to make this determination, since recursion on defining
3153 statements of operands can lead to quadratic behavior in the
3154 worst case. This is expected to catch almost all occurrences
3155 in practice. It would be possible to implement limited-depth
3156 recursion if important cases are lost. Alternatively, passes
3157 that need this information (such as the pow/powi lowering code
3158 in the cse_sincos pass) could be revised to provide it through
3159 dataflow propagation. */
3161 def_stmt = SSA_NAME_DEF_STMT (val);
3163 if (is_gimple_assign (def_stmt))
3165 tree op0, op1;
3167 /* See fold-const.c:tree_expr_nonnegative_p for additional
3168 cases that could be handled with recursion. */
3170 switch (gimple_assign_rhs_code (def_stmt))
3172 case ABS_EXPR:
3173 /* Always true for floating-point operands. */
3174 return true;
3176 case MULT_EXPR:
3177 /* True if the two operands are identical (since we are
3178 restricted to floating-point inputs). */
3179 op0 = gimple_assign_rhs1 (def_stmt);
3180 op1 = gimple_assign_rhs2 (def_stmt);
3182 if (op0 == op1
3183 || operand_equal_p (op0, op1, 0))
3184 return true;
3186 default:
3187 return false;
3190 else if (is_gimple_call (def_stmt))
3192 tree fndecl = gimple_call_fndecl (def_stmt);
3193 if (fndecl
3194 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3196 tree arg1;
3198 switch (DECL_FUNCTION_CODE (fndecl))
3200 CASE_FLT_FN (BUILT_IN_ACOS):
3201 CASE_FLT_FN (BUILT_IN_ACOSH):
3202 CASE_FLT_FN (BUILT_IN_CABS):
3203 CASE_FLT_FN (BUILT_IN_COSH):
3204 CASE_FLT_FN (BUILT_IN_ERFC):
3205 CASE_FLT_FN (BUILT_IN_EXP):
3206 CASE_FLT_FN (BUILT_IN_EXP10):
3207 CASE_FLT_FN (BUILT_IN_EXP2):
3208 CASE_FLT_FN (BUILT_IN_FABS):
3209 CASE_FLT_FN (BUILT_IN_FDIM):
3210 CASE_FLT_FN (BUILT_IN_HYPOT):
3211 CASE_FLT_FN (BUILT_IN_POW10):
3212 return true;
3214 CASE_FLT_FN (BUILT_IN_SQRT):
3215 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3216 nonnegative inputs. */
3217 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3218 return true;
3220 break;
3222 CASE_FLT_FN (BUILT_IN_POWI):
3223 /* True if the second argument is an even integer. */
3224 arg1 = gimple_call_arg (def_stmt, 1);
3226 if (TREE_CODE (arg1) == INTEGER_CST
3227 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3228 return true;
3230 break;
3232 CASE_FLT_FN (BUILT_IN_POW):
3233 /* True if the second argument is an even integer-valued
3234 real. */
3235 arg1 = gimple_call_arg (def_stmt, 1);
3237 if (TREE_CODE (arg1) == REAL_CST)
3239 REAL_VALUE_TYPE c;
3240 HOST_WIDE_INT n;
3242 c = TREE_REAL_CST (arg1);
3243 n = real_to_integer (&c);
3245 if ((n & 1) == 0)
3247 REAL_VALUE_TYPE cint;
3248 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3249 if (real_identical (&c, &cint))
3250 return true;
3254 break;
3256 default:
3257 return false;
3262 return false;