re PR bootstrap/51346 (LTO bootstrap failed with bootstrap-profiled)
[official-gcc.git] / gcc / gimple-fold.c
blob0da5eef18a2d07218e11c04fc498b4ce8bd70e82
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
109 return true;
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
115 tree
116 canonicalize_constructor_val (tree cval)
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
127 ptr,
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
131 if (TREE_CODE (cval) == ADDR_EXPR)
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
135 if (base
136 && (TREE_CODE (base) == VAR_DECL
137 || TREE_CODE (base) == FUNCTION_DECL)
138 && !can_refer_decl_in_current_unit_p (base))
139 return NULL_TREE;
140 if (base && TREE_CODE (base) == VAR_DECL)
142 TREE_ADDRESSABLE (base) = 1;
143 if (cfun && gimple_referenced_vars (cfun))
144 add_referenced_var (base);
146 /* Fixup types in global initializers. */
147 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
148 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
150 return cval;
153 /* If SYM is a constant variable with known value, return the value.
154 NULL_TREE is returned otherwise. */
156 tree
157 get_symbol_constant_value (tree sym)
159 if (const_value_known_p (sym))
161 tree val = DECL_INITIAL (sym);
162 if (val)
164 val = canonicalize_constructor_val (val);
165 if (val && is_gimple_min_invariant (val))
166 return val;
167 else
168 return NULL_TREE;
170 /* Variables declared 'const' without an initializer
171 have zero as the initializer if they may not be
172 overridden at link or run time. */
173 if (!val
174 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
175 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
176 return build_zero_cst (TREE_TYPE (sym));
179 return NULL_TREE;
184 /* Subroutine of fold_stmt. We perform several simplifications of the
185 memory reference tree EXPR and make sure to re-gimplify them properly
186 after propagation of constant addresses. IS_LHS is true if the
187 reference is supposed to be an lvalue. */
189 static tree
190 maybe_fold_reference (tree expr, bool is_lhs)
192 tree *t = &expr;
193 tree result;
195 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
196 || TREE_CODE (expr) == REALPART_EXPR
197 || TREE_CODE (expr) == IMAGPART_EXPR)
198 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
199 return fold_unary_loc (EXPR_LOCATION (expr),
200 TREE_CODE (expr),
201 TREE_TYPE (expr),
202 TREE_OPERAND (expr, 0));
203 else if (TREE_CODE (expr) == BIT_FIELD_REF
204 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
205 return fold_ternary_loc (EXPR_LOCATION (expr),
206 TREE_CODE (expr),
207 TREE_TYPE (expr),
208 TREE_OPERAND (expr, 0),
209 TREE_OPERAND (expr, 1),
210 TREE_OPERAND (expr, 2));
212 while (handled_component_p (*t))
213 t = &TREE_OPERAND (*t, 0);
215 /* Canonicalize MEM_REFs invariant address operand. Do this first
216 to avoid feeding non-canonical MEM_REFs elsewhere. */
217 if (TREE_CODE (*t) == MEM_REF
218 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
220 bool volatile_p = TREE_THIS_VOLATILE (*t);
221 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
222 TREE_OPERAND (*t, 0),
223 TREE_OPERAND (*t, 1));
224 if (tem)
226 TREE_THIS_VOLATILE (tem) = volatile_p;
227 *t = tem;
228 tem = maybe_fold_reference (expr, is_lhs);
229 if (tem)
230 return tem;
231 return expr;
235 if (!is_lhs
236 && (result = fold_const_aggregate_ref (expr))
237 && is_gimple_min_invariant (result))
238 return result;
240 /* Fold back MEM_REFs to reference trees. */
241 if (TREE_CODE (*t) == MEM_REF
242 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
243 && integer_zerop (TREE_OPERAND (*t, 1))
244 && (TREE_THIS_VOLATILE (*t)
245 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
246 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
247 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
248 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
249 /* We have to look out here to not drop a required conversion
250 from the rhs to the lhs if is_lhs, but we don't have the
251 rhs here to verify that. Thus require strict type
252 compatibility. */
253 && types_compatible_p (TREE_TYPE (*t),
254 TREE_TYPE (TREE_OPERAND
255 (TREE_OPERAND (*t, 0), 0))))
257 tree tem;
258 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
259 tem = maybe_fold_reference (expr, is_lhs);
260 if (tem)
261 return tem;
262 return expr;
264 else if (TREE_CODE (*t) == TARGET_MEM_REF)
266 tree tem = maybe_fold_tmr (*t);
267 if (tem)
269 *t = tem;
270 tem = maybe_fold_reference (expr, is_lhs);
271 if (tem)
272 return tem;
273 return expr;
277 return NULL_TREE;
281 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
282 replacement rhs for the statement or NULL_TREE if no simplification
283 could be made. It is assumed that the operands have been previously
284 folded. */
286 static tree
287 fold_gimple_assign (gimple_stmt_iterator *si)
289 gimple stmt = gsi_stmt (*si);
290 enum tree_code subcode = gimple_assign_rhs_code (stmt);
291 location_t loc = gimple_location (stmt);
293 tree result = NULL_TREE;
295 switch (get_gimple_rhs_class (subcode))
297 case GIMPLE_SINGLE_RHS:
299 tree rhs = gimple_assign_rhs1 (stmt);
301 if (REFERENCE_CLASS_P (rhs))
302 return maybe_fold_reference (rhs, false);
304 else if (TREE_CODE (rhs) == ADDR_EXPR)
306 tree ref = TREE_OPERAND (rhs, 0);
307 tree tem = maybe_fold_reference (ref, true);
308 if (tem
309 && TREE_CODE (tem) == MEM_REF
310 && integer_zerop (TREE_OPERAND (tem, 1)))
311 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
312 else if (tem)
313 result = fold_convert (TREE_TYPE (rhs),
314 build_fold_addr_expr_loc (loc, tem));
315 else if (TREE_CODE (ref) == MEM_REF
316 && integer_zerop (TREE_OPERAND (ref, 1)))
317 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
320 else if (TREE_CODE (rhs) == CONSTRUCTOR
321 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
322 && (CONSTRUCTOR_NELTS (rhs)
323 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
325 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
326 unsigned i;
327 tree val;
329 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
330 if (TREE_CODE (val) != INTEGER_CST
331 && TREE_CODE (val) != REAL_CST
332 && TREE_CODE (val) != FIXED_CST)
333 return NULL_TREE;
335 return build_vector_from_ctor (TREE_TYPE (rhs),
336 CONSTRUCTOR_ELTS (rhs));
339 else if (DECL_P (rhs))
340 return unshare_expr (get_symbol_constant_value (rhs));
342 /* If we couldn't fold the RHS, hand over to the generic
343 fold routines. */
344 if (result == NULL_TREE)
345 result = fold (rhs);
347 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
348 that may have been added by fold, and "useless" type
349 conversions that might now be apparent due to propagation. */
350 STRIP_USELESS_TYPE_CONVERSION (result);
352 if (result != rhs && valid_gimple_rhs_p (result))
353 return result;
355 return NULL_TREE;
357 break;
359 case GIMPLE_UNARY_RHS:
361 tree rhs = gimple_assign_rhs1 (stmt);
363 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
364 if (result)
366 /* If the operation was a conversion do _not_ mark a
367 resulting constant with TREE_OVERFLOW if the original
368 constant was not. These conversions have implementation
369 defined behavior and retaining the TREE_OVERFLOW flag
370 here would confuse later passes such as VRP. */
371 if (CONVERT_EXPR_CODE_P (subcode)
372 && TREE_CODE (result) == INTEGER_CST
373 && TREE_CODE (rhs) == INTEGER_CST)
374 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
376 STRIP_USELESS_TYPE_CONVERSION (result);
377 if (valid_gimple_rhs_p (result))
378 return result;
381 break;
383 case GIMPLE_BINARY_RHS:
384 /* Try to canonicalize for boolean-typed X the comparisons
385 X == 0, X == 1, X != 0, and X != 1. */
386 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
387 || gimple_assign_rhs_code (stmt) == NE_EXPR)
389 tree lhs = gimple_assign_lhs (stmt);
390 tree op1 = gimple_assign_rhs1 (stmt);
391 tree op2 = gimple_assign_rhs2 (stmt);
392 tree type = TREE_TYPE (op1);
394 /* Check whether the comparison operands are of the same boolean
395 type as the result type is.
396 Check that second operand is an integer-constant with value
397 one or zero. */
398 if (TREE_CODE (op2) == INTEGER_CST
399 && (integer_zerop (op2) || integer_onep (op2))
400 && useless_type_conversion_p (TREE_TYPE (lhs), type))
402 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
403 bool is_logical_not = false;
405 /* X == 0 and X != 1 is a logical-not.of X
406 X == 1 and X != 0 is X */
407 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
408 || (cmp_code == NE_EXPR && integer_onep (op2)))
409 is_logical_not = true;
411 if (is_logical_not == false)
412 result = op1;
413 /* Only for one-bit precision typed X the transformation
414 !X -> ~X is valied. */
415 else if (TYPE_PRECISION (type) == 1)
416 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
417 type, op1);
418 /* Otherwise we use !X -> X ^ 1. */
419 else
420 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
421 type, op1, build_int_cst (type, 1));
426 if (!result)
427 result = fold_binary_loc (loc, subcode,
428 TREE_TYPE (gimple_assign_lhs (stmt)),
429 gimple_assign_rhs1 (stmt),
430 gimple_assign_rhs2 (stmt));
432 if (result)
434 STRIP_USELESS_TYPE_CONVERSION (result);
435 if (valid_gimple_rhs_p (result))
436 return result;
438 break;
440 case GIMPLE_TERNARY_RHS:
441 /* Try to fold a conditional expression. */
442 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
444 tree op0 = gimple_assign_rhs1 (stmt);
445 tree tem;
446 bool set = false;
447 location_t cond_loc = gimple_location (stmt);
449 if (COMPARISON_CLASS_P (op0))
451 fold_defer_overflow_warnings ();
452 tem = fold_binary_loc (cond_loc,
453 TREE_CODE (op0), TREE_TYPE (op0),
454 TREE_OPERAND (op0, 0),
455 TREE_OPERAND (op0, 1));
456 /* This is actually a conditional expression, not a GIMPLE
457 conditional statement, however, the valid_gimple_rhs_p
458 test still applies. */
459 set = (tem && is_gimple_condexpr (tem)
460 && valid_gimple_rhs_p (tem));
461 fold_undefer_overflow_warnings (set, stmt, 0);
463 else if (is_gimple_min_invariant (op0))
465 tem = op0;
466 set = true;
468 else
469 return NULL_TREE;
471 if (set)
472 result = fold_build3_loc (cond_loc, COND_EXPR,
473 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
474 gimple_assign_rhs2 (stmt),
475 gimple_assign_rhs3 (stmt));
478 if (!result)
479 result = fold_ternary_loc (loc, subcode,
480 TREE_TYPE (gimple_assign_lhs (stmt)),
481 gimple_assign_rhs1 (stmt),
482 gimple_assign_rhs2 (stmt),
483 gimple_assign_rhs3 (stmt));
485 if (result)
487 STRIP_USELESS_TYPE_CONVERSION (result);
488 if (valid_gimple_rhs_p (result))
489 return result;
491 break;
493 case GIMPLE_INVALID_RHS:
494 gcc_unreachable ();
497 return NULL_TREE;
500 /* Attempt to fold a conditional statement. Return true if any changes were
501 made. We only attempt to fold the condition expression, and do not perform
502 any transformation that would require alteration of the cfg. It is
503 assumed that the operands have been previously folded. */
505 static bool
506 fold_gimple_cond (gimple stmt)
508 tree result = fold_binary_loc (gimple_location (stmt),
509 gimple_cond_code (stmt),
510 boolean_type_node,
511 gimple_cond_lhs (stmt),
512 gimple_cond_rhs (stmt));
514 if (result)
516 STRIP_USELESS_TYPE_CONVERSION (result);
517 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
519 gimple_cond_set_condition_from_tree (stmt, result);
520 return true;
524 return false;
527 /* Convert EXPR into a GIMPLE value suitable for substitution on the
528 RHS of an assignment. Insert the necessary statements before
529 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
530 is replaced. If the call is expected to produces a result, then it
531 is replaced by an assignment of the new RHS to the result variable.
532 If the result is to be ignored, then the call is replaced by a
533 GIMPLE_NOP. A proper VDEF chain is retained by making the first
534 VUSE and the last VDEF of the whole sequence be the same as the replaced
535 statement and using new SSA names for stores in between. */
537 void
538 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
540 tree lhs;
541 gimple stmt, new_stmt;
542 gimple_stmt_iterator i;
543 gimple_seq stmts = gimple_seq_alloc();
544 struct gimplify_ctx gctx;
545 gimple last;
546 gimple laststore;
547 tree reaching_vuse;
549 stmt = gsi_stmt (*si_p);
551 gcc_assert (is_gimple_call (stmt));
553 push_gimplify_context (&gctx);
554 gctx.into_ssa = gimple_in_ssa_p (cfun);
556 lhs = gimple_call_lhs (stmt);
557 if (lhs == NULL_TREE)
559 gimplify_and_add (expr, &stmts);
560 /* We can end up with folding a memcpy of an empty class assignment
561 which gets optimized away by C++ gimplification. */
562 if (gimple_seq_empty_p (stmts))
564 pop_gimplify_context (NULL);
565 if (gimple_in_ssa_p (cfun))
567 unlink_stmt_vdef (stmt);
568 release_defs (stmt);
570 gsi_remove (si_p, true);
571 return;
574 else
576 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
577 new_stmt = gimple_build_assign (lhs, tmp);
578 i = gsi_last (stmts);
579 gsi_insert_after_without_update (&i, new_stmt,
580 GSI_CONTINUE_LINKING);
583 pop_gimplify_context (NULL);
585 if (gimple_has_location (stmt))
586 annotate_all_with_location (stmts, gimple_location (stmt));
588 /* First iterate over the replacement statements backward, assigning
589 virtual operands to their defining statements. */
590 laststore = NULL;
591 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
593 new_stmt = gsi_stmt (i);
594 if (gimple_assign_single_p (new_stmt)
595 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
597 tree vdef;
598 if (!laststore)
599 vdef = gimple_vdef (stmt);
600 else
601 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
602 gimple_set_vdef (new_stmt, vdef);
603 if (TREE_CODE (vdef) == SSA_NAME)
604 SSA_NAME_DEF_STMT (vdef) = new_stmt;
605 laststore = new_stmt;
609 /* Second iterate over the statements forward, assigning virtual
610 operands to their uses. */
611 last = NULL;
612 reaching_vuse = gimple_vuse (stmt);
613 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
615 /* Do not insert the last stmt in this loop but remember it
616 for replacing the original statement. */
617 if (last)
619 gsi_insert_before (si_p, last, GSI_NEW_STMT);
620 gsi_next (si_p);
622 new_stmt = gsi_stmt (i);
623 /* The replacement can expose previously unreferenced variables. */
624 if (gimple_in_ssa_p (cfun))
625 find_new_referenced_vars (new_stmt);
626 /* If the new statement possibly has a VUSE, update it with exact SSA
627 name we know will reach this one. */
628 if (gimple_has_mem_ops (new_stmt))
629 gimple_set_vuse (new_stmt, reaching_vuse);
630 gimple_set_modified (new_stmt, true);
631 if (gimple_vdef (new_stmt))
632 reaching_vuse = gimple_vdef (new_stmt);
633 last = new_stmt;
636 /* If the new sequence does not do a store release the virtual
637 definition of the original statement. */
638 if (reaching_vuse
639 && reaching_vuse == gimple_vuse (stmt))
641 tree vdef = gimple_vdef (stmt);
642 if (vdef
643 && TREE_CODE (vdef) == SSA_NAME)
645 unlink_stmt_vdef (stmt);
646 release_ssa_name (vdef);
650 /* Finally replace rhe original statement with the last. */
651 gsi_replace (si_p, last, false);
654 /* Return the string length, maximum string length or maximum value of
655 ARG in LENGTH.
656 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
657 is not NULL and, for TYPE == 0, its value is not equal to the length
658 we determine or if we are unable to determine the length or value,
659 return false. VISITED is a bitmap of visited variables.
660 TYPE is 0 if string length should be returned, 1 for maximum string
661 length and 2 for maximum value ARG can have. */
663 static bool
664 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
666 tree var, val;
667 gimple def_stmt;
669 if (TREE_CODE (arg) != SSA_NAME)
671 if (TREE_CODE (arg) == COND_EXPR)
672 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
673 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
674 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
675 else if (TREE_CODE (arg) == ADDR_EXPR
676 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
677 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
679 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
680 if (TREE_CODE (aop0) == INDIRECT_REF
681 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
682 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
683 length, visited, type);
686 if (type == 2)
688 val = arg;
689 if (TREE_CODE (val) != INTEGER_CST
690 || tree_int_cst_sgn (val) < 0)
691 return false;
693 else
694 val = c_strlen (arg, 1);
695 if (!val)
696 return false;
698 if (*length)
700 if (type > 0)
702 if (TREE_CODE (*length) != INTEGER_CST
703 || TREE_CODE (val) != INTEGER_CST)
704 return false;
706 if (tree_int_cst_lt (*length, val))
707 *length = val;
708 return true;
710 else if (simple_cst_equal (val, *length) != 1)
711 return false;
714 *length = val;
715 return true;
718 /* If we were already here, break the infinite cycle. */
719 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
720 return true;
722 var = arg;
723 def_stmt = SSA_NAME_DEF_STMT (var);
725 switch (gimple_code (def_stmt))
727 case GIMPLE_ASSIGN:
728 /* The RHS of the statement defining VAR must either have a
729 constant length or come from another SSA_NAME with a constant
730 length. */
731 if (gimple_assign_single_p (def_stmt)
732 || gimple_assign_unary_nop_p (def_stmt))
734 tree rhs = gimple_assign_rhs1 (def_stmt);
735 return get_maxval_strlen (rhs, length, visited, type);
737 return false;
739 case GIMPLE_PHI:
741 /* All the arguments of the PHI node must have the same constant
742 length. */
743 unsigned i;
745 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
747 tree arg = gimple_phi_arg (def_stmt, i)->def;
749 /* If this PHI has itself as an argument, we cannot
750 determine the string length of this argument. However,
751 if we can find a constant string length for the other
752 PHI args then we can still be sure that this is a
753 constant string length. So be optimistic and just
754 continue with the next argument. */
755 if (arg == gimple_phi_result (def_stmt))
756 continue;
758 if (!get_maxval_strlen (arg, length, visited, type))
759 return false;
762 return true;
764 default:
765 return false;
770 /* Fold builtin call in statement STMT. Returns a simplified tree.
771 We may return a non-constant expression, including another call
772 to a different function and with different arguments, e.g.,
773 substituting memcpy for strcpy when the string length is known.
774 Note that some builtins expand into inline code that may not
775 be valid in GIMPLE. Callers must take care. */
777 tree
778 gimple_fold_builtin (gimple stmt)
780 tree result, val[3];
781 tree callee, a;
782 int arg_idx, type;
783 bitmap visited;
784 bool ignore;
785 int nargs;
786 location_t loc = gimple_location (stmt);
788 gcc_assert (is_gimple_call (stmt));
790 ignore = (gimple_call_lhs (stmt) == NULL);
792 /* First try the generic builtin folder. If that succeeds, return the
793 result directly. */
794 result = fold_call_stmt (stmt, ignore);
795 if (result)
797 if (ignore)
798 STRIP_NOPS (result);
799 return result;
802 /* Ignore MD builtins. */
803 callee = gimple_call_fndecl (stmt);
804 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
805 return NULL_TREE;
807 /* Give up for always_inline inline builtins until they are
808 inlined. */
809 if (avoid_folding_inline_builtin (callee))
810 return NULL_TREE;
812 /* If the builtin could not be folded, and it has no argument list,
813 we're done. */
814 nargs = gimple_call_num_args (stmt);
815 if (nargs == 0)
816 return NULL_TREE;
818 /* Limit the work only for builtins we know how to simplify. */
819 switch (DECL_FUNCTION_CODE (callee))
821 case BUILT_IN_STRLEN:
822 case BUILT_IN_FPUTS:
823 case BUILT_IN_FPUTS_UNLOCKED:
824 arg_idx = 0;
825 type = 0;
826 break;
827 case BUILT_IN_STRCPY:
828 case BUILT_IN_STRNCPY:
829 arg_idx = 1;
830 type = 0;
831 break;
832 case BUILT_IN_MEMCPY_CHK:
833 case BUILT_IN_MEMPCPY_CHK:
834 case BUILT_IN_MEMMOVE_CHK:
835 case BUILT_IN_MEMSET_CHK:
836 case BUILT_IN_STRNCPY_CHK:
837 arg_idx = 2;
838 type = 2;
839 break;
840 case BUILT_IN_STRCPY_CHK:
841 case BUILT_IN_STPCPY_CHK:
842 arg_idx = 1;
843 type = 1;
844 break;
845 case BUILT_IN_SNPRINTF_CHK:
846 case BUILT_IN_VSNPRINTF_CHK:
847 arg_idx = 1;
848 type = 2;
849 break;
850 default:
851 return NULL_TREE;
854 if (arg_idx >= nargs)
855 return NULL_TREE;
857 /* Try to use the dataflow information gathered by the CCP process. */
858 visited = BITMAP_ALLOC (NULL);
859 bitmap_clear (visited);
861 memset (val, 0, sizeof (val));
862 a = gimple_call_arg (stmt, arg_idx);
863 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
864 val[arg_idx] = NULL_TREE;
866 BITMAP_FREE (visited);
868 result = NULL_TREE;
869 switch (DECL_FUNCTION_CODE (callee))
871 case BUILT_IN_STRLEN:
872 if (val[0] && nargs == 1)
874 tree new_val =
875 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
877 /* If the result is not a valid gimple value, or not a cast
878 of a valid gimple value, then we cannot use the result. */
879 if (is_gimple_val (new_val)
880 || (CONVERT_EXPR_P (new_val)
881 && is_gimple_val (TREE_OPERAND (new_val, 0))))
882 return new_val;
884 break;
886 case BUILT_IN_STRCPY:
887 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
888 result = fold_builtin_strcpy (loc, callee,
889 gimple_call_arg (stmt, 0),
890 gimple_call_arg (stmt, 1),
891 val[1]);
892 break;
894 case BUILT_IN_STRNCPY:
895 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
896 result = fold_builtin_strncpy (loc, callee,
897 gimple_call_arg (stmt, 0),
898 gimple_call_arg (stmt, 1),
899 gimple_call_arg (stmt, 2),
900 val[1]);
901 break;
903 case BUILT_IN_FPUTS:
904 if (nargs == 2)
905 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
906 gimple_call_arg (stmt, 1),
907 ignore, false, val[0]);
908 break;
910 case BUILT_IN_FPUTS_UNLOCKED:
911 if (nargs == 2)
912 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
913 gimple_call_arg (stmt, 1),
914 ignore, true, val[0]);
915 break;
917 case BUILT_IN_MEMCPY_CHK:
918 case BUILT_IN_MEMPCPY_CHK:
919 case BUILT_IN_MEMMOVE_CHK:
920 case BUILT_IN_MEMSET_CHK:
921 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
922 result = fold_builtin_memory_chk (loc, callee,
923 gimple_call_arg (stmt, 0),
924 gimple_call_arg (stmt, 1),
925 gimple_call_arg (stmt, 2),
926 gimple_call_arg (stmt, 3),
927 val[2], ignore,
928 DECL_FUNCTION_CODE (callee));
929 break;
931 case BUILT_IN_STRCPY_CHK:
932 case BUILT_IN_STPCPY_CHK:
933 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
934 result = fold_builtin_stxcpy_chk (loc, callee,
935 gimple_call_arg (stmt, 0),
936 gimple_call_arg (stmt, 1),
937 gimple_call_arg (stmt, 2),
938 val[1], ignore,
939 DECL_FUNCTION_CODE (callee));
940 break;
942 case BUILT_IN_STRNCPY_CHK:
943 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
944 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
945 gimple_call_arg (stmt, 1),
946 gimple_call_arg (stmt, 2),
947 gimple_call_arg (stmt, 3),
948 val[2]);
949 break;
951 case BUILT_IN_SNPRINTF_CHK:
952 case BUILT_IN_VSNPRINTF_CHK:
953 if (val[1] && is_gimple_val (val[1]))
954 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
955 DECL_FUNCTION_CODE (callee));
956 break;
958 default:
959 gcc_unreachable ();
962 if (result && ignore)
963 result = fold_ignored_result (result);
964 return result;
967 /* Generate code adjusting the first parameter of a call statement determined
968 by GSI by DELTA. */
970 void
971 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
973 gimple call_stmt = gsi_stmt (*gsi);
974 tree parm, tmp;
975 gimple new_stmt;
977 delta = convert_to_ptrofftype (delta);
978 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
979 parm = gimple_call_arg (call_stmt, 0);
980 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
981 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
982 add_referenced_var (tmp);
984 tmp = make_ssa_name (tmp, NULL);
985 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
986 SSA_NAME_DEF_STMT (tmp) = new_stmt;
987 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
988 gimple_call_set_arg (call_stmt, 0, tmp);
991 /* Return a binfo to be used for devirtualization of calls based on an object
992 represented by a declaration (i.e. a global or automatically allocated one)
993 or NULL if it cannot be found or is not safe. CST is expected to be an
994 ADDR_EXPR of such object or the function will return NULL. Currently it is
995 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
997 tree
998 gimple_extract_devirt_binfo_from_cst (tree cst)
1000 HOST_WIDE_INT offset, size, max_size;
1001 tree base, type, expected_type, binfo;
1002 bool last_artificial = false;
1004 if (!flag_devirtualize
1005 || TREE_CODE (cst) != ADDR_EXPR
1006 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1007 return NULL_TREE;
1009 cst = TREE_OPERAND (cst, 0);
1010 expected_type = TREE_TYPE (cst);
1011 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1012 type = TREE_TYPE (base);
1013 if (!DECL_P (base)
1014 || max_size == -1
1015 || max_size != size
1016 || TREE_CODE (type) != RECORD_TYPE)
1017 return NULL_TREE;
1019 /* Find the sub-object the constant actually refers to and mark whether it is
1020 an artificial one (as opposed to a user-defined one). */
1021 while (true)
1023 HOST_WIDE_INT pos, size;
1024 tree fld;
1026 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1027 break;
1028 if (offset < 0)
1029 return NULL_TREE;
1031 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1033 if (TREE_CODE (fld) != FIELD_DECL)
1034 continue;
1036 pos = int_bit_position (fld);
1037 size = tree_low_cst (DECL_SIZE (fld), 1);
1038 if (pos <= offset && (pos + size) > offset)
1039 break;
1041 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1042 return NULL_TREE;
1044 last_artificial = DECL_ARTIFICIAL (fld);
1045 type = TREE_TYPE (fld);
1046 offset -= pos;
1048 /* Artifical sub-objects are ancestors, we do not want to use them for
1049 devirtualization, at least not here. */
1050 if (last_artificial)
1051 return NULL_TREE;
1052 binfo = TYPE_BINFO (type);
1053 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1054 return NULL_TREE;
1055 else
1056 return binfo;
1059 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1060 The statement may be replaced by another statement, e.g., if the call
1061 simplifies to a constant value. Return true if any changes were made.
1062 It is assumed that the operands have been previously folded. */
1064 static bool
1065 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1067 gimple stmt = gsi_stmt (*gsi);
1068 tree callee;
1069 bool changed = false;
1070 unsigned i;
1072 /* Fold *& in call arguments. */
1073 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1074 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1076 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1077 if (tmp)
1079 gimple_call_set_arg (stmt, i, tmp);
1080 changed = true;
1084 /* Check for virtual calls that became direct calls. */
1085 callee = gimple_call_fn (stmt);
1086 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1088 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1090 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1091 changed = true;
1093 else
1095 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1096 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1097 if (binfo)
1099 HOST_WIDE_INT token
1100 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1101 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1102 if (fndecl)
1104 gimple_call_set_fndecl (stmt, fndecl);
1105 changed = true;
1111 /* Check whether propagating into the function address made the
1112 call direct, and thus possibly non-inlineable.
1113 ??? This asks for a more conservative setting of the non-inlinable
1114 flag, namely true for all indirect calls. But that would require
1115 that we can re-compute the flag conservatively, thus it isn't
1116 ever initialized from something else than return/argument type
1117 checks . */
1118 callee = gimple_call_fndecl (stmt);
1119 if (callee
1120 && !gimple_check_call_matching_types (stmt, callee))
1121 gimple_call_set_cannot_inline (stmt, true);
1123 if (inplace)
1124 return changed;
1126 /* Check for builtins that CCP can handle using information not
1127 available in the generic fold routines. */
1128 if (callee && DECL_BUILT_IN (callee))
1130 tree result = gimple_fold_builtin (stmt);
1131 if (result)
1133 if (!update_call_from_tree (gsi, result))
1134 gimplify_and_update_call_from_tree (gsi, result);
1135 changed = true;
1139 return changed;
1142 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1143 distinguishes both cases. */
1145 static bool
1146 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1148 bool changed = false;
1149 gimple stmt = gsi_stmt (*gsi);
1150 unsigned i;
1151 gimple_stmt_iterator gsinext = *gsi;
1152 gimple next_stmt;
1154 gsi_next (&gsinext);
1155 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1157 /* Fold the main computation performed by the statement. */
1158 switch (gimple_code (stmt))
1160 case GIMPLE_ASSIGN:
1162 unsigned old_num_ops = gimple_num_ops (stmt);
1163 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1164 tree lhs = gimple_assign_lhs (stmt);
1165 tree new_rhs;
1166 /* First canonicalize operand order. This avoids building new
1167 trees if this is the only thing fold would later do. */
1168 if ((commutative_tree_code (subcode)
1169 || commutative_ternary_tree_code (subcode))
1170 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1171 gimple_assign_rhs2 (stmt), false))
1173 tree tem = gimple_assign_rhs1 (stmt);
1174 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1175 gimple_assign_set_rhs2 (stmt, tem);
1176 changed = true;
1178 new_rhs = fold_gimple_assign (gsi);
1179 if (new_rhs
1180 && !useless_type_conversion_p (TREE_TYPE (lhs),
1181 TREE_TYPE (new_rhs)))
1182 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1183 if (new_rhs
1184 && (!inplace
1185 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1187 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1188 changed = true;
1190 break;
1193 case GIMPLE_COND:
1194 changed |= fold_gimple_cond (stmt);
1195 break;
1197 case GIMPLE_CALL:
1198 changed |= gimple_fold_call (gsi, inplace);
1199 break;
1201 case GIMPLE_ASM:
1202 /* Fold *& in asm operands. */
1204 size_t noutputs;
1205 const char **oconstraints;
1206 const char *constraint;
1207 bool allows_mem, allows_reg;
1209 noutputs = gimple_asm_noutputs (stmt);
1210 oconstraints = XALLOCAVEC (const char *, noutputs);
1212 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1214 tree link = gimple_asm_output_op (stmt, i);
1215 tree op = TREE_VALUE (link);
1216 oconstraints[i]
1217 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1218 if (REFERENCE_CLASS_P (op)
1219 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1221 TREE_VALUE (link) = op;
1222 changed = true;
1225 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1227 tree link = gimple_asm_input_op (stmt, i);
1228 tree op = TREE_VALUE (link);
1229 constraint
1230 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1231 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1232 oconstraints, &allows_mem, &allows_reg);
1233 if (REFERENCE_CLASS_P (op)
1234 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1235 != NULL_TREE)
1237 TREE_VALUE (link) = op;
1238 changed = true;
1242 break;
1244 case GIMPLE_DEBUG:
1245 if (gimple_debug_bind_p (stmt))
1247 tree val = gimple_debug_bind_get_value (stmt);
1248 if (val
1249 && REFERENCE_CLASS_P (val))
1251 tree tem = maybe_fold_reference (val, false);
1252 if (tem)
1254 gimple_debug_bind_set_value (stmt, tem);
1255 changed = true;
1259 break;
1261 default:;
1264 /* If stmt folds into nothing and it was the last stmt in a bb,
1265 don't call gsi_stmt. */
1266 if (gsi_end_p (*gsi))
1268 gcc_assert (next_stmt == NULL);
1269 return changed;
1272 stmt = gsi_stmt (*gsi);
1274 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1275 as we'd changing the next stmt. */
1276 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1278 tree lhs = gimple_get_lhs (stmt);
1279 if (lhs && REFERENCE_CLASS_P (lhs))
1281 tree new_lhs = maybe_fold_reference (lhs, true);
1282 if (new_lhs)
1284 gimple_set_lhs (stmt, new_lhs);
1285 changed = true;
1290 return changed;
1293 /* Fold the statement pointed to by GSI. In some cases, this function may
1294 replace the whole statement with a new one. Returns true iff folding
1295 makes any changes.
1296 The statement pointed to by GSI should be in valid gimple form but may
1297 be in unfolded state as resulting from for example constant propagation
1298 which can produce *&x = 0. */
1300 bool
1301 fold_stmt (gimple_stmt_iterator *gsi)
1303 return fold_stmt_1 (gsi, false);
1306 /* Perform the minimal folding on statement *GSI. Only operations like
1307 *&x created by constant propagation are handled. The statement cannot
1308 be replaced with a new one. Return true if the statement was
1309 changed, false otherwise.
1310 The statement *GSI should be in valid gimple form but may
1311 be in unfolded state as resulting from for example constant propagation
1312 which can produce *&x = 0. */
1314 bool
1315 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1317 gimple stmt = gsi_stmt (*gsi);
1318 bool changed = fold_stmt_1 (gsi, true);
1319 gcc_assert (gsi_stmt (*gsi) == stmt);
1320 return changed;
1323 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1324 if EXPR is null or we don't know how.
1325 If non-null, the result always has boolean type. */
1327 static tree
1328 canonicalize_bool (tree expr, bool invert)
1330 if (!expr)
1331 return NULL_TREE;
1332 else if (invert)
1334 if (integer_nonzerop (expr))
1335 return boolean_false_node;
1336 else if (integer_zerop (expr))
1337 return boolean_true_node;
1338 else if (TREE_CODE (expr) == SSA_NAME)
1339 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1340 build_int_cst (TREE_TYPE (expr), 0));
1341 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1342 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1343 boolean_type_node,
1344 TREE_OPERAND (expr, 0),
1345 TREE_OPERAND (expr, 1));
1346 else
1347 return NULL_TREE;
1349 else
1351 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1352 return expr;
1353 if (integer_nonzerop (expr))
1354 return boolean_true_node;
1355 else if (integer_zerop (expr))
1356 return boolean_false_node;
1357 else if (TREE_CODE (expr) == SSA_NAME)
1358 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1359 build_int_cst (TREE_TYPE (expr), 0));
1360 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1361 return fold_build2 (TREE_CODE (expr),
1362 boolean_type_node,
1363 TREE_OPERAND (expr, 0),
1364 TREE_OPERAND (expr, 1));
1365 else
1366 return NULL_TREE;
1370 /* Check to see if a boolean expression EXPR is logically equivalent to the
1371 comparison (OP1 CODE OP2). Check for various identities involving
1372 SSA_NAMEs. */
1374 static bool
1375 same_bool_comparison_p (const_tree expr, enum tree_code code,
1376 const_tree op1, const_tree op2)
1378 gimple s;
1380 /* The obvious case. */
1381 if (TREE_CODE (expr) == code
1382 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1383 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1384 return true;
1386 /* Check for comparing (name, name != 0) and the case where expr
1387 is an SSA_NAME with a definition matching the comparison. */
1388 if (TREE_CODE (expr) == SSA_NAME
1389 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1391 if (operand_equal_p (expr, op1, 0))
1392 return ((code == NE_EXPR && integer_zerop (op2))
1393 || (code == EQ_EXPR && integer_nonzerop (op2)));
1394 s = SSA_NAME_DEF_STMT (expr);
1395 if (is_gimple_assign (s)
1396 && gimple_assign_rhs_code (s) == code
1397 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1398 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1399 return true;
1402 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1403 of name is a comparison, recurse. */
1404 if (TREE_CODE (op1) == SSA_NAME
1405 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1407 s = SSA_NAME_DEF_STMT (op1);
1408 if (is_gimple_assign (s)
1409 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1411 enum tree_code c = gimple_assign_rhs_code (s);
1412 if ((c == NE_EXPR && integer_zerop (op2))
1413 || (c == EQ_EXPR && integer_nonzerop (op2)))
1414 return same_bool_comparison_p (expr, c,
1415 gimple_assign_rhs1 (s),
1416 gimple_assign_rhs2 (s));
1417 if ((c == EQ_EXPR && integer_zerop (op2))
1418 || (c == NE_EXPR && integer_nonzerop (op2)))
1419 return same_bool_comparison_p (expr,
1420 invert_tree_comparison (c, false),
1421 gimple_assign_rhs1 (s),
1422 gimple_assign_rhs2 (s));
1425 return false;
1428 /* Check to see if two boolean expressions OP1 and OP2 are logically
1429 equivalent. */
1431 static bool
1432 same_bool_result_p (const_tree op1, const_tree op2)
1434 /* Simple cases first. */
1435 if (operand_equal_p (op1, op2, 0))
1436 return true;
1438 /* Check the cases where at least one of the operands is a comparison.
1439 These are a bit smarter than operand_equal_p in that they apply some
1440 identifies on SSA_NAMEs. */
1441 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1442 && same_bool_comparison_p (op1, TREE_CODE (op2),
1443 TREE_OPERAND (op2, 0),
1444 TREE_OPERAND (op2, 1)))
1445 return true;
1446 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1447 && same_bool_comparison_p (op2, TREE_CODE (op1),
1448 TREE_OPERAND (op1, 0),
1449 TREE_OPERAND (op1, 1)))
1450 return true;
1452 /* Default case. */
1453 return false;
1456 /* Forward declarations for some mutually recursive functions. */
1458 static tree
1459 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1460 enum tree_code code2, tree op2a, tree op2b);
1461 static tree
1462 and_var_with_comparison (tree var, bool invert,
1463 enum tree_code code2, tree op2a, tree op2b);
1464 static tree
1465 and_var_with_comparison_1 (gimple stmt,
1466 enum tree_code code2, tree op2a, tree op2b);
1467 static tree
1468 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1469 enum tree_code code2, tree op2a, tree op2b);
1470 static tree
1471 or_var_with_comparison (tree var, bool invert,
1472 enum tree_code code2, tree op2a, tree op2b);
1473 static tree
1474 or_var_with_comparison_1 (gimple stmt,
1475 enum tree_code code2, tree op2a, tree op2b);
1477 /* Helper function for and_comparisons_1: try to simplify the AND of the
1478 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1479 If INVERT is true, invert the value of the VAR before doing the AND.
1480 Return NULL_EXPR if we can't simplify this to a single expression. */
1482 static tree
1483 and_var_with_comparison (tree var, bool invert,
1484 enum tree_code code2, tree op2a, tree op2b)
1486 tree t;
1487 gimple stmt = SSA_NAME_DEF_STMT (var);
1489 /* We can only deal with variables whose definitions are assignments. */
1490 if (!is_gimple_assign (stmt))
1491 return NULL_TREE;
1493 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1494 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1495 Then we only have to consider the simpler non-inverted cases. */
1496 if (invert)
1497 t = or_var_with_comparison_1 (stmt,
1498 invert_tree_comparison (code2, false),
1499 op2a, op2b);
1500 else
1501 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1502 return canonicalize_bool (t, invert);
1505 /* Try to simplify the AND of the ssa variable defined by the assignment
1506 STMT with the comparison specified by (OP2A CODE2 OP2B).
1507 Return NULL_EXPR if we can't simplify this to a single expression. */
1509 static tree
1510 and_var_with_comparison_1 (gimple stmt,
1511 enum tree_code code2, tree op2a, tree op2b)
1513 tree var = gimple_assign_lhs (stmt);
1514 tree true_test_var = NULL_TREE;
1515 tree false_test_var = NULL_TREE;
1516 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1518 /* Check for identities like (var AND (var == 0)) => false. */
1519 if (TREE_CODE (op2a) == SSA_NAME
1520 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1522 if ((code2 == NE_EXPR && integer_zerop (op2b))
1523 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1525 true_test_var = op2a;
1526 if (var == true_test_var)
1527 return var;
1529 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1530 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1532 false_test_var = op2a;
1533 if (var == false_test_var)
1534 return boolean_false_node;
1538 /* If the definition is a comparison, recurse on it. */
1539 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1541 tree t = and_comparisons_1 (innercode,
1542 gimple_assign_rhs1 (stmt),
1543 gimple_assign_rhs2 (stmt),
1544 code2,
1545 op2a,
1546 op2b);
1547 if (t)
1548 return t;
1551 /* If the definition is an AND or OR expression, we may be able to
1552 simplify by reassociating. */
1553 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1554 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1556 tree inner1 = gimple_assign_rhs1 (stmt);
1557 tree inner2 = gimple_assign_rhs2 (stmt);
1558 gimple s;
1559 tree t;
1560 tree partial = NULL_TREE;
1561 bool is_and = (innercode == BIT_AND_EXPR);
1563 /* Check for boolean identities that don't require recursive examination
1564 of inner1/inner2:
1565 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1566 inner1 AND (inner1 OR inner2) => inner1
1567 !inner1 AND (inner1 AND inner2) => false
1568 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1569 Likewise for similar cases involving inner2. */
1570 if (inner1 == true_test_var)
1571 return (is_and ? var : inner1);
1572 else if (inner2 == true_test_var)
1573 return (is_and ? var : inner2);
1574 else if (inner1 == false_test_var)
1575 return (is_and
1576 ? boolean_false_node
1577 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1578 else if (inner2 == false_test_var)
1579 return (is_and
1580 ? boolean_false_node
1581 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1583 /* Next, redistribute/reassociate the AND across the inner tests.
1584 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1585 if (TREE_CODE (inner1) == SSA_NAME
1586 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1587 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1588 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1589 gimple_assign_rhs1 (s),
1590 gimple_assign_rhs2 (s),
1591 code2, op2a, op2b)))
1593 /* Handle the AND case, where we are reassociating:
1594 (inner1 AND inner2) AND (op2a code2 op2b)
1595 => (t AND inner2)
1596 If the partial result t is a constant, we win. Otherwise
1597 continue on to try reassociating with the other inner test. */
1598 if (is_and)
1600 if (integer_onep (t))
1601 return inner2;
1602 else if (integer_zerop (t))
1603 return boolean_false_node;
1606 /* Handle the OR case, where we are redistributing:
1607 (inner1 OR inner2) AND (op2a code2 op2b)
1608 => (t OR (inner2 AND (op2a code2 op2b))) */
1609 else if (integer_onep (t))
1610 return boolean_true_node;
1612 /* Save partial result for later. */
1613 partial = t;
1616 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1617 if (TREE_CODE (inner2) == SSA_NAME
1618 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1619 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1620 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1621 gimple_assign_rhs1 (s),
1622 gimple_assign_rhs2 (s),
1623 code2, op2a, op2b)))
1625 /* Handle the AND case, where we are reassociating:
1626 (inner1 AND inner2) AND (op2a code2 op2b)
1627 => (inner1 AND t) */
1628 if (is_and)
1630 if (integer_onep (t))
1631 return inner1;
1632 else if (integer_zerop (t))
1633 return boolean_false_node;
1634 /* If both are the same, we can apply the identity
1635 (x AND x) == x. */
1636 else if (partial && same_bool_result_p (t, partial))
1637 return t;
1640 /* Handle the OR case. where we are redistributing:
1641 (inner1 OR inner2) AND (op2a code2 op2b)
1642 => (t OR (inner1 AND (op2a code2 op2b)))
1643 => (t OR partial) */
1644 else
1646 if (integer_onep (t))
1647 return boolean_true_node;
1648 else if (partial)
1650 /* We already got a simplification for the other
1651 operand to the redistributed OR expression. The
1652 interesting case is when at least one is false.
1653 Or, if both are the same, we can apply the identity
1654 (x OR x) == x. */
1655 if (integer_zerop (partial))
1656 return t;
1657 else if (integer_zerop (t))
1658 return partial;
1659 else if (same_bool_result_p (t, partial))
1660 return t;
1665 return NULL_TREE;
1668 /* Try to simplify the AND of two comparisons defined by
1669 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1670 If this can be done without constructing an intermediate value,
1671 return the resulting tree; otherwise NULL_TREE is returned.
1672 This function is deliberately asymmetric as it recurses on SSA_DEFs
1673 in the first comparison but not the second. */
1675 static tree
1676 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1677 enum tree_code code2, tree op2a, tree op2b)
1679 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1680 if (operand_equal_p (op1a, op2a, 0)
1681 && operand_equal_p (op1b, op2b, 0))
1683 /* Result will be either NULL_TREE, or a combined comparison. */
1684 tree t = combine_comparisons (UNKNOWN_LOCATION,
1685 TRUTH_ANDIF_EXPR, code1, code2,
1686 boolean_type_node, op1a, op1b);
1687 if (t)
1688 return t;
1691 /* Likewise the swapped case of the above. */
1692 if (operand_equal_p (op1a, op2b, 0)
1693 && operand_equal_p (op1b, op2a, 0))
1695 /* Result will be either NULL_TREE, or a combined comparison. */
1696 tree t = combine_comparisons (UNKNOWN_LOCATION,
1697 TRUTH_ANDIF_EXPR, code1,
1698 swap_tree_comparison (code2),
1699 boolean_type_node, op1a, op1b);
1700 if (t)
1701 return t;
1704 /* If both comparisons are of the same value against constants, we might
1705 be able to merge them. */
1706 if (operand_equal_p (op1a, op2a, 0)
1707 && TREE_CODE (op1b) == INTEGER_CST
1708 && TREE_CODE (op2b) == INTEGER_CST)
1710 int cmp = tree_int_cst_compare (op1b, op2b);
1712 /* If we have (op1a == op1b), we should either be able to
1713 return that or FALSE, depending on whether the constant op1b
1714 also satisfies the other comparison against op2b. */
1715 if (code1 == EQ_EXPR)
1717 bool done = true;
1718 bool val;
1719 switch (code2)
1721 case EQ_EXPR: val = (cmp == 0); break;
1722 case NE_EXPR: val = (cmp != 0); break;
1723 case LT_EXPR: val = (cmp < 0); break;
1724 case GT_EXPR: val = (cmp > 0); break;
1725 case LE_EXPR: val = (cmp <= 0); break;
1726 case GE_EXPR: val = (cmp >= 0); break;
1727 default: done = false;
1729 if (done)
1731 if (val)
1732 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1733 else
1734 return boolean_false_node;
1737 /* Likewise if the second comparison is an == comparison. */
1738 else if (code2 == EQ_EXPR)
1740 bool done = true;
1741 bool val;
1742 switch (code1)
1744 case EQ_EXPR: val = (cmp == 0); break;
1745 case NE_EXPR: val = (cmp != 0); break;
1746 case LT_EXPR: val = (cmp > 0); break;
1747 case GT_EXPR: val = (cmp < 0); break;
1748 case LE_EXPR: val = (cmp >= 0); break;
1749 case GE_EXPR: val = (cmp <= 0); break;
1750 default: done = false;
1752 if (done)
1754 if (val)
1755 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1756 else
1757 return boolean_false_node;
1761 /* Same business with inequality tests. */
1762 else if (code1 == NE_EXPR)
1764 bool val;
1765 switch (code2)
1767 case EQ_EXPR: val = (cmp != 0); break;
1768 case NE_EXPR: val = (cmp == 0); break;
1769 case LT_EXPR: val = (cmp >= 0); break;
1770 case GT_EXPR: val = (cmp <= 0); break;
1771 case LE_EXPR: val = (cmp > 0); break;
1772 case GE_EXPR: val = (cmp < 0); break;
1773 default:
1774 val = false;
1776 if (val)
1777 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1779 else if (code2 == NE_EXPR)
1781 bool val;
1782 switch (code1)
1784 case EQ_EXPR: val = (cmp == 0); break;
1785 case NE_EXPR: val = (cmp != 0); break;
1786 case LT_EXPR: val = (cmp <= 0); break;
1787 case GT_EXPR: val = (cmp >= 0); break;
1788 case LE_EXPR: val = (cmp < 0); break;
1789 case GE_EXPR: val = (cmp > 0); break;
1790 default:
1791 val = false;
1793 if (val)
1794 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1797 /* Chose the more restrictive of two < or <= comparisons. */
1798 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1799 && (code2 == LT_EXPR || code2 == LE_EXPR))
1801 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1802 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1803 else
1804 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1807 /* Likewise chose the more restrictive of two > or >= comparisons. */
1808 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1809 && (code2 == GT_EXPR || code2 == GE_EXPR))
1811 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1812 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1813 else
1814 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1817 /* Check for singleton ranges. */
1818 else if (cmp == 0
1819 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1820 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1821 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1823 /* Check for disjoint ranges. */
1824 else if (cmp <= 0
1825 && (code1 == LT_EXPR || code1 == LE_EXPR)
1826 && (code2 == GT_EXPR || code2 == GE_EXPR))
1827 return boolean_false_node;
1828 else if (cmp >= 0
1829 && (code1 == GT_EXPR || code1 == GE_EXPR)
1830 && (code2 == LT_EXPR || code2 == LE_EXPR))
1831 return boolean_false_node;
1834 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1835 NAME's definition is a truth value. See if there are any simplifications
1836 that can be done against the NAME's definition. */
1837 if (TREE_CODE (op1a) == SSA_NAME
1838 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1839 && (integer_zerop (op1b) || integer_onep (op1b)))
1841 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1842 || (code1 == NE_EXPR && integer_onep (op1b)));
1843 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1844 switch (gimple_code (stmt))
1846 case GIMPLE_ASSIGN:
1847 /* Try to simplify by copy-propagating the definition. */
1848 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1850 case GIMPLE_PHI:
1851 /* If every argument to the PHI produces the same result when
1852 ANDed with the second comparison, we win.
1853 Do not do this unless the type is bool since we need a bool
1854 result here anyway. */
1855 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1857 tree result = NULL_TREE;
1858 unsigned i;
1859 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1861 tree arg = gimple_phi_arg_def (stmt, i);
1863 /* If this PHI has itself as an argument, ignore it.
1864 If all the other args produce the same result,
1865 we're still OK. */
1866 if (arg == gimple_phi_result (stmt))
1867 continue;
1868 else if (TREE_CODE (arg) == INTEGER_CST)
1870 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1872 if (!result)
1873 result = boolean_false_node;
1874 else if (!integer_zerop (result))
1875 return NULL_TREE;
1877 else if (!result)
1878 result = fold_build2 (code2, boolean_type_node,
1879 op2a, op2b);
1880 else if (!same_bool_comparison_p (result,
1881 code2, op2a, op2b))
1882 return NULL_TREE;
1884 else if (TREE_CODE (arg) == SSA_NAME
1885 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1887 tree temp;
1888 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1889 /* In simple cases we can look through PHI nodes,
1890 but we have to be careful with loops.
1891 See PR49073. */
1892 if (! dom_info_available_p (CDI_DOMINATORS)
1893 || gimple_bb (def_stmt) == gimple_bb (stmt)
1894 || dominated_by_p (CDI_DOMINATORS,
1895 gimple_bb (def_stmt),
1896 gimple_bb (stmt)))
1897 return NULL_TREE;
1898 temp = and_var_with_comparison (arg, invert, code2,
1899 op2a, op2b);
1900 if (!temp)
1901 return NULL_TREE;
1902 else if (!result)
1903 result = temp;
1904 else if (!same_bool_result_p (result, temp))
1905 return NULL_TREE;
1907 else
1908 return NULL_TREE;
1910 return result;
1913 default:
1914 break;
1917 return NULL_TREE;
1920 /* Try to simplify the AND of two comparisons, specified by
1921 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1922 If this can be simplified to a single expression (without requiring
1923 introducing more SSA variables to hold intermediate values),
1924 return the resulting tree. Otherwise return NULL_TREE.
1925 If the result expression is non-null, it has boolean type. */
1927 tree
1928 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1929 enum tree_code code2, tree op2a, tree op2b)
1931 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1932 if (t)
1933 return t;
1934 else
1935 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1938 /* Helper function for or_comparisons_1: try to simplify the OR of the
1939 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1940 If INVERT is true, invert the value of VAR before doing the OR.
1941 Return NULL_EXPR if we can't simplify this to a single expression. */
1943 static tree
1944 or_var_with_comparison (tree var, bool invert,
1945 enum tree_code code2, tree op2a, tree op2b)
1947 tree t;
1948 gimple stmt = SSA_NAME_DEF_STMT (var);
1950 /* We can only deal with variables whose definitions are assignments. */
1951 if (!is_gimple_assign (stmt))
1952 return NULL_TREE;
1954 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1955 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1956 Then we only have to consider the simpler non-inverted cases. */
1957 if (invert)
1958 t = and_var_with_comparison_1 (stmt,
1959 invert_tree_comparison (code2, false),
1960 op2a, op2b);
1961 else
1962 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1963 return canonicalize_bool (t, invert);
1966 /* Try to simplify the OR of the ssa variable defined by the assignment
1967 STMT with the comparison specified by (OP2A CODE2 OP2B).
1968 Return NULL_EXPR if we can't simplify this to a single expression. */
1970 static tree
1971 or_var_with_comparison_1 (gimple stmt,
1972 enum tree_code code2, tree op2a, tree op2b)
1974 tree var = gimple_assign_lhs (stmt);
1975 tree true_test_var = NULL_TREE;
1976 tree false_test_var = NULL_TREE;
1977 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1979 /* Check for identities like (var OR (var != 0)) => true . */
1980 if (TREE_CODE (op2a) == SSA_NAME
1981 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1983 if ((code2 == NE_EXPR && integer_zerop (op2b))
1984 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1986 true_test_var = op2a;
1987 if (var == true_test_var)
1988 return var;
1990 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1991 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1993 false_test_var = op2a;
1994 if (var == false_test_var)
1995 return boolean_true_node;
1999 /* If the definition is a comparison, recurse on it. */
2000 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2002 tree t = or_comparisons_1 (innercode,
2003 gimple_assign_rhs1 (stmt),
2004 gimple_assign_rhs2 (stmt),
2005 code2,
2006 op2a,
2007 op2b);
2008 if (t)
2009 return t;
2012 /* If the definition is an AND or OR expression, we may be able to
2013 simplify by reassociating. */
2014 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2015 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2017 tree inner1 = gimple_assign_rhs1 (stmt);
2018 tree inner2 = gimple_assign_rhs2 (stmt);
2019 gimple s;
2020 tree t;
2021 tree partial = NULL_TREE;
2022 bool is_or = (innercode == BIT_IOR_EXPR);
2024 /* Check for boolean identities that don't require recursive examination
2025 of inner1/inner2:
2026 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2027 inner1 OR (inner1 AND inner2) => inner1
2028 !inner1 OR (inner1 OR inner2) => true
2029 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2031 if (inner1 == true_test_var)
2032 return (is_or ? var : inner1);
2033 else if (inner2 == true_test_var)
2034 return (is_or ? var : inner2);
2035 else if (inner1 == false_test_var)
2036 return (is_or
2037 ? boolean_true_node
2038 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2039 else if (inner2 == false_test_var)
2040 return (is_or
2041 ? boolean_true_node
2042 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2044 /* Next, redistribute/reassociate the OR across the inner tests.
2045 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2046 if (TREE_CODE (inner1) == SSA_NAME
2047 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2048 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2049 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2050 gimple_assign_rhs1 (s),
2051 gimple_assign_rhs2 (s),
2052 code2, op2a, op2b)))
2054 /* Handle the OR case, where we are reassociating:
2055 (inner1 OR inner2) OR (op2a code2 op2b)
2056 => (t OR inner2)
2057 If the partial result t is a constant, we win. Otherwise
2058 continue on to try reassociating with the other inner test. */
2059 if (is_or)
2061 if (integer_onep (t))
2062 return boolean_true_node;
2063 else if (integer_zerop (t))
2064 return inner2;
2067 /* Handle the AND case, where we are redistributing:
2068 (inner1 AND inner2) OR (op2a code2 op2b)
2069 => (t AND (inner2 OR (op2a code op2b))) */
2070 else if (integer_zerop (t))
2071 return boolean_false_node;
2073 /* Save partial result for later. */
2074 partial = t;
2077 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2078 if (TREE_CODE (inner2) == SSA_NAME
2079 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2080 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2081 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2082 gimple_assign_rhs1 (s),
2083 gimple_assign_rhs2 (s),
2084 code2, op2a, op2b)))
2086 /* Handle the OR case, where we are reassociating:
2087 (inner1 OR inner2) OR (op2a code2 op2b)
2088 => (inner1 OR t)
2089 => (t OR partial) */
2090 if (is_or)
2092 if (integer_zerop (t))
2093 return inner1;
2094 else if (integer_onep (t))
2095 return boolean_true_node;
2096 /* If both are the same, we can apply the identity
2097 (x OR x) == x. */
2098 else if (partial && same_bool_result_p (t, partial))
2099 return t;
2102 /* Handle the AND case, where we are redistributing:
2103 (inner1 AND inner2) OR (op2a code2 op2b)
2104 => (t AND (inner1 OR (op2a code2 op2b)))
2105 => (t AND partial) */
2106 else
2108 if (integer_zerop (t))
2109 return boolean_false_node;
2110 else if (partial)
2112 /* We already got a simplification for the other
2113 operand to the redistributed AND expression. The
2114 interesting case is when at least one is true.
2115 Or, if both are the same, we can apply the identity
2116 (x AND x) == x. */
2117 if (integer_onep (partial))
2118 return t;
2119 else if (integer_onep (t))
2120 return partial;
2121 else if (same_bool_result_p (t, partial))
2122 return t;
2127 return NULL_TREE;
2130 /* Try to simplify the OR of two comparisons defined by
2131 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2132 If this can be done without constructing an intermediate value,
2133 return the resulting tree; otherwise NULL_TREE is returned.
2134 This function is deliberately asymmetric as it recurses on SSA_DEFs
2135 in the first comparison but not the second. */
2137 static tree
2138 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2139 enum tree_code code2, tree op2a, tree op2b)
2141 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2142 if (operand_equal_p (op1a, op2a, 0)
2143 && operand_equal_p (op1b, op2b, 0))
2145 /* Result will be either NULL_TREE, or a combined comparison. */
2146 tree t = combine_comparisons (UNKNOWN_LOCATION,
2147 TRUTH_ORIF_EXPR, code1, code2,
2148 boolean_type_node, op1a, op1b);
2149 if (t)
2150 return t;
2153 /* Likewise the swapped case of the above. */
2154 if (operand_equal_p (op1a, op2b, 0)
2155 && operand_equal_p (op1b, op2a, 0))
2157 /* Result will be either NULL_TREE, or a combined comparison. */
2158 tree t = combine_comparisons (UNKNOWN_LOCATION,
2159 TRUTH_ORIF_EXPR, code1,
2160 swap_tree_comparison (code2),
2161 boolean_type_node, op1a, op1b);
2162 if (t)
2163 return t;
2166 /* If both comparisons are of the same value against constants, we might
2167 be able to merge them. */
2168 if (operand_equal_p (op1a, op2a, 0)
2169 && TREE_CODE (op1b) == INTEGER_CST
2170 && TREE_CODE (op2b) == INTEGER_CST)
2172 int cmp = tree_int_cst_compare (op1b, op2b);
2174 /* If we have (op1a != op1b), we should either be able to
2175 return that or TRUE, depending on whether the constant op1b
2176 also satisfies the other comparison against op2b. */
2177 if (code1 == NE_EXPR)
2179 bool done = true;
2180 bool val;
2181 switch (code2)
2183 case EQ_EXPR: val = (cmp == 0); break;
2184 case NE_EXPR: val = (cmp != 0); break;
2185 case LT_EXPR: val = (cmp < 0); break;
2186 case GT_EXPR: val = (cmp > 0); break;
2187 case LE_EXPR: val = (cmp <= 0); break;
2188 case GE_EXPR: val = (cmp >= 0); break;
2189 default: done = false;
2191 if (done)
2193 if (val)
2194 return boolean_true_node;
2195 else
2196 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2199 /* Likewise if the second comparison is a != comparison. */
2200 else if (code2 == NE_EXPR)
2202 bool done = true;
2203 bool val;
2204 switch (code1)
2206 case EQ_EXPR: val = (cmp == 0); break;
2207 case NE_EXPR: val = (cmp != 0); break;
2208 case LT_EXPR: val = (cmp > 0); break;
2209 case GT_EXPR: val = (cmp < 0); break;
2210 case LE_EXPR: val = (cmp >= 0); break;
2211 case GE_EXPR: val = (cmp <= 0); break;
2212 default: done = false;
2214 if (done)
2216 if (val)
2217 return boolean_true_node;
2218 else
2219 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2223 /* See if an equality test is redundant with the other comparison. */
2224 else if (code1 == EQ_EXPR)
2226 bool val;
2227 switch (code2)
2229 case EQ_EXPR: val = (cmp == 0); break;
2230 case NE_EXPR: val = (cmp != 0); break;
2231 case LT_EXPR: val = (cmp < 0); break;
2232 case GT_EXPR: val = (cmp > 0); break;
2233 case LE_EXPR: val = (cmp <= 0); break;
2234 case GE_EXPR: val = (cmp >= 0); break;
2235 default:
2236 val = false;
2238 if (val)
2239 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2241 else if (code2 == EQ_EXPR)
2243 bool val;
2244 switch (code1)
2246 case EQ_EXPR: val = (cmp == 0); break;
2247 case NE_EXPR: val = (cmp != 0); break;
2248 case LT_EXPR: val = (cmp > 0); break;
2249 case GT_EXPR: val = (cmp < 0); break;
2250 case LE_EXPR: val = (cmp >= 0); break;
2251 case GE_EXPR: val = (cmp <= 0); break;
2252 default:
2253 val = false;
2255 if (val)
2256 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2259 /* Chose the less restrictive of two < or <= comparisons. */
2260 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2261 && (code2 == LT_EXPR || code2 == LE_EXPR))
2263 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2264 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2265 else
2266 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2269 /* Likewise chose the less restrictive of two > or >= comparisons. */
2270 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2271 && (code2 == GT_EXPR || code2 == GE_EXPR))
2273 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2274 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2275 else
2276 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2279 /* Check for singleton ranges. */
2280 else if (cmp == 0
2281 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2282 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2283 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2285 /* Check for less/greater pairs that don't restrict the range at all. */
2286 else if (cmp >= 0
2287 && (code1 == LT_EXPR || code1 == LE_EXPR)
2288 && (code2 == GT_EXPR || code2 == GE_EXPR))
2289 return boolean_true_node;
2290 else if (cmp <= 0
2291 && (code1 == GT_EXPR || code1 == GE_EXPR)
2292 && (code2 == LT_EXPR || code2 == LE_EXPR))
2293 return boolean_true_node;
2296 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2297 NAME's definition is a truth value. See if there are any simplifications
2298 that can be done against the NAME's definition. */
2299 if (TREE_CODE (op1a) == SSA_NAME
2300 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2301 && (integer_zerop (op1b) || integer_onep (op1b)))
2303 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2304 || (code1 == NE_EXPR && integer_onep (op1b)));
2305 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2306 switch (gimple_code (stmt))
2308 case GIMPLE_ASSIGN:
2309 /* Try to simplify by copy-propagating the definition. */
2310 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2312 case GIMPLE_PHI:
2313 /* If every argument to the PHI produces the same result when
2314 ORed with the second comparison, we win.
2315 Do not do this unless the type is bool since we need a bool
2316 result here anyway. */
2317 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2319 tree result = NULL_TREE;
2320 unsigned i;
2321 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2323 tree arg = gimple_phi_arg_def (stmt, i);
2325 /* If this PHI has itself as an argument, ignore it.
2326 If all the other args produce the same result,
2327 we're still OK. */
2328 if (arg == gimple_phi_result (stmt))
2329 continue;
2330 else if (TREE_CODE (arg) == INTEGER_CST)
2332 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2334 if (!result)
2335 result = boolean_true_node;
2336 else if (!integer_onep (result))
2337 return NULL_TREE;
2339 else if (!result)
2340 result = fold_build2 (code2, boolean_type_node,
2341 op2a, op2b);
2342 else if (!same_bool_comparison_p (result,
2343 code2, op2a, op2b))
2344 return NULL_TREE;
2346 else if (TREE_CODE (arg) == SSA_NAME
2347 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2349 tree temp;
2350 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2351 /* In simple cases we can look through PHI nodes,
2352 but we have to be careful with loops.
2353 See PR49073. */
2354 if (! dom_info_available_p (CDI_DOMINATORS)
2355 || gimple_bb (def_stmt) == gimple_bb (stmt)
2356 || dominated_by_p (CDI_DOMINATORS,
2357 gimple_bb (def_stmt),
2358 gimple_bb (stmt)))
2359 return NULL_TREE;
2360 temp = or_var_with_comparison (arg, invert, code2,
2361 op2a, op2b);
2362 if (!temp)
2363 return NULL_TREE;
2364 else if (!result)
2365 result = temp;
2366 else if (!same_bool_result_p (result, temp))
2367 return NULL_TREE;
2369 else
2370 return NULL_TREE;
2372 return result;
2375 default:
2376 break;
2379 return NULL_TREE;
2382 /* Try to simplify the OR of two comparisons, specified by
2383 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2384 If this can be simplified to a single expression (without requiring
2385 introducing more SSA variables to hold intermediate values),
2386 return the resulting tree. Otherwise return NULL_TREE.
2387 If the result expression is non-null, it has boolean type. */
2389 tree
2390 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2391 enum tree_code code2, tree op2a, tree op2b)
2393 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2394 if (t)
2395 return t;
2396 else
2397 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2401 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2403 Either NULL_TREE, a simplified but non-constant or a constant
2404 is returned.
2406 ??? This should go into a gimple-fold-inline.h file to be eventually
2407 privatized with the single valueize function used in the various TUs
2408 to avoid the indirect function call overhead. */
2410 tree
2411 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2413 location_t loc = gimple_location (stmt);
2414 switch (gimple_code (stmt))
2416 case GIMPLE_ASSIGN:
2418 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2420 switch (get_gimple_rhs_class (subcode))
2422 case GIMPLE_SINGLE_RHS:
2424 tree rhs = gimple_assign_rhs1 (stmt);
2425 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2427 if (TREE_CODE (rhs) == SSA_NAME)
2429 /* If the RHS is an SSA_NAME, return its known constant value,
2430 if any. */
2431 return (*valueize) (rhs);
2433 /* Handle propagating invariant addresses into address
2434 operations. */
2435 else if (TREE_CODE (rhs) == ADDR_EXPR
2436 && !is_gimple_min_invariant (rhs))
2438 HOST_WIDE_INT offset;
2439 tree base;
2440 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2441 &offset,
2442 valueize);
2443 if (base
2444 && (CONSTANT_CLASS_P (base)
2445 || decl_address_invariant_p (base)))
2446 return build_invariant_address (TREE_TYPE (rhs),
2447 base, offset);
2449 else if (TREE_CODE (rhs) == CONSTRUCTOR
2450 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2451 && (CONSTRUCTOR_NELTS (rhs)
2452 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2454 unsigned i;
2455 tree val, list;
2457 list = NULL_TREE;
2458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2460 val = (*valueize) (val);
2461 if (TREE_CODE (val) == INTEGER_CST
2462 || TREE_CODE (val) == REAL_CST
2463 || TREE_CODE (val) == FIXED_CST)
2464 list = tree_cons (NULL_TREE, val, list);
2465 else
2466 return NULL_TREE;
2469 return build_vector (TREE_TYPE (rhs), nreverse (list));
2472 if (kind == tcc_reference)
2474 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2475 || TREE_CODE (rhs) == REALPART_EXPR
2476 || TREE_CODE (rhs) == IMAGPART_EXPR)
2477 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2479 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2480 return fold_unary_loc (EXPR_LOCATION (rhs),
2481 TREE_CODE (rhs),
2482 TREE_TYPE (rhs), val);
2484 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2485 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2487 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2488 return fold_ternary_loc (EXPR_LOCATION (rhs),
2489 TREE_CODE (rhs),
2490 TREE_TYPE (rhs), val,
2491 TREE_OPERAND (rhs, 1),
2492 TREE_OPERAND (rhs, 2));
2494 else if (TREE_CODE (rhs) == MEM_REF
2495 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2497 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2498 if (TREE_CODE (val) == ADDR_EXPR
2499 && is_gimple_min_invariant (val))
2501 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2502 unshare_expr (val),
2503 TREE_OPERAND (rhs, 1));
2504 if (tem)
2505 rhs = tem;
2508 return fold_const_aggregate_ref_1 (rhs, valueize);
2510 else if (kind == tcc_declaration)
2511 return get_symbol_constant_value (rhs);
2512 return rhs;
2515 case GIMPLE_UNARY_RHS:
2517 /* Handle unary operators that can appear in GIMPLE form.
2518 Note that we know the single operand must be a constant,
2519 so this should almost always return a simplified RHS. */
2520 tree lhs = gimple_assign_lhs (stmt);
2521 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2523 /* Conversions are useless for CCP purposes if they are
2524 value-preserving. Thus the restrictions that
2525 useless_type_conversion_p places for restrict qualification
2526 of pointer types should not apply here.
2527 Substitution later will only substitute to allowed places. */
2528 if (CONVERT_EXPR_CODE_P (subcode)
2529 && POINTER_TYPE_P (TREE_TYPE (lhs))
2530 && POINTER_TYPE_P (TREE_TYPE (op0))
2531 && (TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2532 == TYPE_ADDR_SPACE (TREE_TYPE (op0))))
2533 return op0;
2535 return
2536 fold_unary_ignore_overflow_loc (loc, subcode,
2537 gimple_expr_type (stmt), op0);
2540 case GIMPLE_BINARY_RHS:
2542 /* Handle binary operators that can appear in GIMPLE form. */
2543 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2544 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2546 /* Translate &x + CST into an invariant form suitable for
2547 further propagation. */
2548 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2549 && TREE_CODE (op0) == ADDR_EXPR
2550 && TREE_CODE (op1) == INTEGER_CST)
2552 tree off = fold_convert (ptr_type_node, op1);
2553 return build_fold_addr_expr_loc
2554 (loc,
2555 fold_build2 (MEM_REF,
2556 TREE_TYPE (TREE_TYPE (op0)),
2557 unshare_expr (op0), off));
2560 return fold_binary_loc (loc, subcode,
2561 gimple_expr_type (stmt), op0, op1);
2564 case GIMPLE_TERNARY_RHS:
2566 /* Handle ternary operators that can appear in GIMPLE form. */
2567 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2568 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2569 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2571 /* Fold embedded expressions in ternary codes. */
2572 if ((subcode == COND_EXPR
2573 || subcode == VEC_COND_EXPR)
2574 && COMPARISON_CLASS_P (op0))
2576 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2577 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2578 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2579 TREE_TYPE (op0), op00, op01);
2580 if (tem)
2581 op0 = tem;
2584 return fold_ternary_loc (loc, subcode,
2585 gimple_expr_type (stmt), op0, op1, op2);
2588 default:
2589 gcc_unreachable ();
2593 case GIMPLE_CALL:
2595 tree fn;
2597 if (gimple_call_internal_p (stmt))
2598 /* No folding yet for these functions. */
2599 return NULL_TREE;
2601 fn = (*valueize) (gimple_call_fn (stmt));
2602 if (TREE_CODE (fn) == ADDR_EXPR
2603 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2604 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2606 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2607 tree call, retval;
2608 unsigned i;
2609 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2610 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2611 call = build_call_array_loc (loc,
2612 gimple_call_return_type (stmt),
2613 fn, gimple_call_num_args (stmt), args);
2614 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2615 if (retval)
2616 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2617 STRIP_NOPS (retval);
2618 return retval;
2620 return NULL_TREE;
2623 default:
2624 return NULL_TREE;
2628 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2629 Returns NULL_TREE if folding to a constant is not possible, otherwise
2630 returns a constant according to is_gimple_min_invariant. */
2632 tree
2633 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2635 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2636 if (res && is_gimple_min_invariant (res))
2637 return res;
2638 return NULL_TREE;
2642 /* The following set of functions are supposed to fold references using
2643 their constant initializers. */
2645 static tree fold_ctor_reference (tree type, tree ctor,
2646 unsigned HOST_WIDE_INT offset,
2647 unsigned HOST_WIDE_INT size);
2649 /* See if we can find constructor defining value of BASE.
2650 When we know the consructor with constant offset (such as
2651 base is array[40] and we do know constructor of array), then
2652 BIT_OFFSET is adjusted accordingly.
2654 As a special case, return error_mark_node when constructor
2655 is not explicitly available, but it is known to be zero
2656 such as 'static const int a;'. */
2657 static tree
2658 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2659 tree (*valueize)(tree))
2661 HOST_WIDE_INT bit_offset2, size, max_size;
2662 if (TREE_CODE (base) == MEM_REF)
2664 if (!integer_zerop (TREE_OPERAND (base, 1)))
2666 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2667 return NULL_TREE;
2668 *bit_offset += (mem_ref_offset (base).low
2669 * BITS_PER_UNIT);
2672 if (valueize
2673 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2674 base = valueize (TREE_OPERAND (base, 0));
2675 if (!base || TREE_CODE (base) != ADDR_EXPR)
2676 return NULL_TREE;
2677 base = TREE_OPERAND (base, 0);
2680 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2681 DECL_INITIAL. If BASE is a nested reference into another
2682 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2683 the inner reference. */
2684 switch (TREE_CODE (base))
2686 case VAR_DECL:
2687 if (!const_value_known_p (base))
2688 return NULL_TREE;
2690 /* Fallthru. */
2691 case CONST_DECL:
2692 if (!DECL_INITIAL (base)
2693 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2694 return error_mark_node;
2695 return DECL_INITIAL (base);
2697 case ARRAY_REF:
2698 case COMPONENT_REF:
2699 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2700 if (max_size == -1 || size != max_size)
2701 return NULL_TREE;
2702 *bit_offset += bit_offset2;
2703 return get_base_constructor (base, bit_offset, valueize);
2705 case STRING_CST:
2706 case CONSTRUCTOR:
2707 return base;
2709 default:
2710 return NULL_TREE;
2714 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2715 to the memory at bit OFFSET.
2717 We do only simple job of folding byte accesses. */
2719 static tree
2720 fold_string_cst_ctor_reference (tree type, tree ctor,
2721 unsigned HOST_WIDE_INT offset,
2722 unsigned HOST_WIDE_INT size)
2724 if (INTEGRAL_TYPE_P (type)
2725 && (TYPE_MODE (type)
2726 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2727 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2728 == MODE_INT)
2729 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2730 && size == BITS_PER_UNIT
2731 && !(offset % BITS_PER_UNIT))
2733 offset /= BITS_PER_UNIT;
2734 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2735 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2736 [offset]));
2737 /* Folding
2738 const char a[20]="hello";
2739 return a[10];
2741 might lead to offset greater than string length. In this case we
2742 know value is either initialized to 0 or out of bounds. Return 0
2743 in both cases. */
2744 return build_zero_cst (type);
2746 return NULL_TREE;
2749 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2750 SIZE to the memory at bit OFFSET. */
2752 static tree
2753 fold_array_ctor_reference (tree type, tree ctor,
2754 unsigned HOST_WIDE_INT offset,
2755 unsigned HOST_WIDE_INT size)
2757 unsigned HOST_WIDE_INT cnt;
2758 tree cfield, cval;
2759 double_int low_bound, elt_size;
2760 double_int index, max_index;
2761 double_int access_index;
2762 tree domain_type = NULL_TREE;
2763 HOST_WIDE_INT inner_offset;
2765 /* Compute low bound and elt size. */
2766 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2767 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2768 if (domain_type && TYPE_MIN_VALUE (domain_type))
2770 /* Static constructors for variably sized objects makes no sense. */
2771 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2772 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2774 else
2775 low_bound = double_int_zero;
2776 /* Static constructors for variably sized objects makes no sense. */
2777 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2778 == INTEGER_CST);
2779 elt_size =
2780 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2783 /* We can handle only constantly sized accesses that are known to not
2784 be larger than size of array element. */
2785 if (!TYPE_SIZE_UNIT (type)
2786 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2787 || double_int_cmp (elt_size,
2788 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2789 return NULL_TREE;
2791 /* Compute the array index we look for. */
2792 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2793 elt_size, TRUNC_DIV_EXPR);
2794 access_index = double_int_add (access_index, low_bound);
2796 /* And offset within the access. */
2797 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2799 /* See if the array field is large enough to span whole access. We do not
2800 care to fold accesses spanning multiple array indexes. */
2801 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2802 return NULL_TREE;
2804 index = double_int_sub (low_bound, double_int_one);
2805 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2807 /* Array constructor might explicitely set index, or specify range
2808 or leave index NULL meaning that it is next index after previous
2809 one. */
2810 if (cfield)
2812 if (TREE_CODE (cfield) == INTEGER_CST)
2813 max_index = index = tree_to_double_int (cfield);
2814 else
2816 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2817 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2818 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2821 else
2822 max_index = index = double_int_add (index, double_int_one);
2824 /* Do we have match? */
2825 if (double_int_cmp (access_index, index, 1) >= 0
2826 && double_int_cmp (access_index, max_index, 1) <= 0)
2827 return fold_ctor_reference (type, cval, inner_offset, size);
2829 /* When memory is not explicitely mentioned in constructor,
2830 it is 0 (or out of range). */
2831 return build_zero_cst (type);
2834 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2835 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2837 static tree
2838 fold_nonarray_ctor_reference (tree type, tree ctor,
2839 unsigned HOST_WIDE_INT offset,
2840 unsigned HOST_WIDE_INT size)
2842 unsigned HOST_WIDE_INT cnt;
2843 tree cfield, cval;
2845 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2846 cval)
2848 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2849 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2850 tree field_size = DECL_SIZE (cfield);
2851 double_int bitoffset;
2852 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2853 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2854 double_int bitoffset_end, access_end;
2856 /* Variable sized objects in static constructors makes no sense,
2857 but field_size can be NULL for flexible array members. */
2858 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2859 && TREE_CODE (byte_offset) == INTEGER_CST
2860 && (field_size != NULL_TREE
2861 ? TREE_CODE (field_size) == INTEGER_CST
2862 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2864 /* Compute bit offset of the field. */
2865 bitoffset = double_int_add (tree_to_double_int (field_offset),
2866 double_int_mul (byte_offset_cst,
2867 bits_per_unit_cst));
2868 /* Compute bit offset where the field ends. */
2869 if (field_size != NULL_TREE)
2870 bitoffset_end = double_int_add (bitoffset,
2871 tree_to_double_int (field_size));
2872 else
2873 bitoffset_end = double_int_zero;
2875 access_end = double_int_add (uhwi_to_double_int (offset),
2876 uhwi_to_double_int (size));
2878 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2879 [BITOFFSET, BITOFFSET_END)? */
2880 if (double_int_cmp (access_end, bitoffset, 0) > 0
2881 && (field_size == NULL_TREE
2882 || double_int_cmp (uhwi_to_double_int (offset),
2883 bitoffset_end, 0) < 0))
2885 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2886 bitoffset);
2887 /* We do have overlap. Now see if field is large enough to
2888 cover the access. Give up for accesses spanning multiple
2889 fields. */
2890 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2891 return NULL_TREE;
2892 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2893 return NULL_TREE;
2894 return fold_ctor_reference (type, cval,
2895 double_int_to_uhwi (inner_offset), size);
2898 /* When memory is not explicitely mentioned in constructor, it is 0. */
2899 return build_zero_cst (type);
2902 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2903 to the memory at bit OFFSET. */
2905 static tree
2906 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2907 unsigned HOST_WIDE_INT size)
2909 tree ret;
2911 /* We found the field with exact match. */
2912 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2913 && !offset)
2914 return canonicalize_constructor_val (ctor);
2916 /* We are at the end of walk, see if we can view convert the
2917 result. */
2918 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2919 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2920 && operand_equal_p (TYPE_SIZE (type),
2921 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2923 ret = canonicalize_constructor_val (ctor);
2924 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2925 if (ret)
2926 STRIP_NOPS (ret);
2927 return ret;
2929 if (TREE_CODE (ctor) == STRING_CST)
2930 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2931 if (TREE_CODE (ctor) == CONSTRUCTOR)
2934 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2935 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2936 return fold_array_ctor_reference (type, ctor, offset, size);
2937 else
2938 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2941 return NULL_TREE;
2944 /* Return the tree representing the element referenced by T if T is an
2945 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2946 names using VALUEIZE. Return NULL_TREE otherwise. */
2948 tree
2949 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2951 tree ctor, idx, base;
2952 HOST_WIDE_INT offset, size, max_size;
2953 tree tem;
2955 if (TREE_THIS_VOLATILE (t))
2956 return NULL_TREE;
2958 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2959 return get_symbol_constant_value (t);
2961 tem = fold_read_from_constant_string (t);
2962 if (tem)
2963 return tem;
2965 switch (TREE_CODE (t))
2967 case ARRAY_REF:
2968 case ARRAY_RANGE_REF:
2969 /* Constant indexes are handled well by get_base_constructor.
2970 Only special case variable offsets.
2971 FIXME: This code can't handle nested references with variable indexes
2972 (they will be handled only by iteration of ccp). Perhaps we can bring
2973 get_ref_base_and_extent here and make it use a valueize callback. */
2974 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2975 && valueize
2976 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2977 && host_integerp (idx, 0))
2979 tree low_bound, unit_size;
2981 /* If the resulting bit-offset is constant, track it. */
2982 if ((low_bound = array_ref_low_bound (t),
2983 host_integerp (low_bound, 0))
2984 && (unit_size = array_ref_element_size (t),
2985 host_integerp (unit_size, 1)))
2987 offset = TREE_INT_CST_LOW (idx);
2988 offset -= TREE_INT_CST_LOW (low_bound);
2989 offset *= TREE_INT_CST_LOW (unit_size);
2990 offset *= BITS_PER_UNIT;
2992 base = TREE_OPERAND (t, 0);
2993 ctor = get_base_constructor (base, &offset, valueize);
2994 /* Empty constructor. Always fold to 0. */
2995 if (ctor == error_mark_node)
2996 return build_zero_cst (TREE_TYPE (t));
2997 /* Out of bound array access. Value is undefined,
2998 but don't fold. */
2999 if (offset < 0)
3000 return NULL_TREE;
3001 /* We can not determine ctor. */
3002 if (!ctor)
3003 return NULL_TREE;
3004 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3005 TREE_INT_CST_LOW (unit_size)
3006 * BITS_PER_UNIT);
3009 /* Fallthru. */
3011 case COMPONENT_REF:
3012 case BIT_FIELD_REF:
3013 case TARGET_MEM_REF:
3014 case MEM_REF:
3015 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3016 ctor = get_base_constructor (base, &offset, valueize);
3018 /* Empty constructor. Always fold to 0. */
3019 if (ctor == error_mark_node)
3020 return build_zero_cst (TREE_TYPE (t));
3021 /* We do not know precise address. */
3022 if (max_size == -1 || max_size != size)
3023 return NULL_TREE;
3024 /* We can not determine ctor. */
3025 if (!ctor)
3026 return NULL_TREE;
3028 /* Out of bound array access. Value is undefined, but don't fold. */
3029 if (offset < 0)
3030 return NULL_TREE;
3032 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3034 case REALPART_EXPR:
3035 case IMAGPART_EXPR:
3037 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3038 if (c && TREE_CODE (c) == COMPLEX_CST)
3039 return fold_build1_loc (EXPR_LOCATION (t),
3040 TREE_CODE (t), TREE_TYPE (t), c);
3041 break;
3044 default:
3045 break;
3048 return NULL_TREE;
3051 tree
3052 fold_const_aggregate_ref (tree t)
3054 return fold_const_aggregate_ref_1 (t, NULL);
3057 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3058 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3059 KNOWN_BINFO carries the binfo describing the true type of
3060 OBJ_TYPE_REF_OBJECT(REF). */
3062 tree
3063 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3065 unsigned HOST_WIDE_INT offset, size;
3066 tree v, fn;
3068 v = BINFO_VTABLE (known_binfo);
3069 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3070 if (!v)
3071 return NULL_TREE;
3073 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3075 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3076 v = TREE_OPERAND (v, 0);
3078 else
3079 offset = 0;
3081 if (TREE_CODE (v) != ADDR_EXPR)
3082 return NULL_TREE;
3083 v = TREE_OPERAND (v, 0);
3085 if (TREE_CODE (v) != VAR_DECL
3086 || !DECL_VIRTUAL_P (v)
3087 || !DECL_INITIAL (v)
3088 || DECL_INITIAL (v) == error_mark_node)
3089 return NULL_TREE;
3090 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3091 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3092 offset += token * size;
3093 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3094 offset, size);
3095 if (!fn)
3096 return NULL_TREE;
3097 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3098 || TREE_CODE (fn) == FDESC_EXPR);
3099 fn = TREE_OPERAND (fn, 0);
3100 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3102 /* When cgraph node is missing and function is not public, we cannot
3103 devirtualize. This can happen in WHOPR when the actual method
3104 ends up in other partition, because we found devirtualization
3105 possibility too late. */
3106 if (!can_refer_decl_in_current_unit_p (fn))
3107 return NULL_TREE;
3109 return fn;
3112 /* Return true iff VAL is a gimple expression that is known to be
3113 non-negative. Restricted to floating-point inputs. */
3115 bool
3116 gimple_val_nonnegative_real_p (tree val)
3118 gimple def_stmt;
3120 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3122 /* Use existing logic for non-gimple trees. */
3123 if (tree_expr_nonnegative_p (val))
3124 return true;
3126 if (TREE_CODE (val) != SSA_NAME)
3127 return false;
3129 /* Currently we look only at the immediately defining statement
3130 to make this determination, since recursion on defining
3131 statements of operands can lead to quadratic behavior in the
3132 worst case. This is expected to catch almost all occurrences
3133 in practice. It would be possible to implement limited-depth
3134 recursion if important cases are lost. Alternatively, passes
3135 that need this information (such as the pow/powi lowering code
3136 in the cse_sincos pass) could be revised to provide it through
3137 dataflow propagation. */
3139 def_stmt = SSA_NAME_DEF_STMT (val);
3141 if (is_gimple_assign (def_stmt))
3143 tree op0, op1;
3145 /* See fold-const.c:tree_expr_nonnegative_p for additional
3146 cases that could be handled with recursion. */
3148 switch (gimple_assign_rhs_code (def_stmt))
3150 case ABS_EXPR:
3151 /* Always true for floating-point operands. */
3152 return true;
3154 case MULT_EXPR:
3155 /* True if the two operands are identical (since we are
3156 restricted to floating-point inputs). */
3157 op0 = gimple_assign_rhs1 (def_stmt);
3158 op1 = gimple_assign_rhs2 (def_stmt);
3160 if (op0 == op1
3161 || operand_equal_p (op0, op1, 0))
3162 return true;
3164 default:
3165 return false;
3168 else if (is_gimple_call (def_stmt))
3170 tree fndecl = gimple_call_fndecl (def_stmt);
3171 if (fndecl
3172 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3174 tree arg1;
3176 switch (DECL_FUNCTION_CODE (fndecl))
3178 CASE_FLT_FN (BUILT_IN_ACOS):
3179 CASE_FLT_FN (BUILT_IN_ACOSH):
3180 CASE_FLT_FN (BUILT_IN_CABS):
3181 CASE_FLT_FN (BUILT_IN_COSH):
3182 CASE_FLT_FN (BUILT_IN_ERFC):
3183 CASE_FLT_FN (BUILT_IN_EXP):
3184 CASE_FLT_FN (BUILT_IN_EXP10):
3185 CASE_FLT_FN (BUILT_IN_EXP2):
3186 CASE_FLT_FN (BUILT_IN_FABS):
3187 CASE_FLT_FN (BUILT_IN_FDIM):
3188 CASE_FLT_FN (BUILT_IN_HYPOT):
3189 CASE_FLT_FN (BUILT_IN_POW10):
3190 return true;
3192 CASE_FLT_FN (BUILT_IN_SQRT):
3193 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3194 nonnegative inputs. */
3195 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3196 return true;
3198 break;
3200 CASE_FLT_FN (BUILT_IN_POWI):
3201 /* True if the second argument is an even integer. */
3202 arg1 = gimple_call_arg (def_stmt, 1);
3204 if (TREE_CODE (arg1) == INTEGER_CST
3205 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3206 return true;
3208 break;
3210 CASE_FLT_FN (BUILT_IN_POW):
3211 /* True if the second argument is an even integer-valued
3212 real. */
3213 arg1 = gimple_call_arg (def_stmt, 1);
3215 if (TREE_CODE (arg1) == REAL_CST)
3217 REAL_VALUE_TYPE c;
3218 HOST_WIDE_INT n;
3220 c = TREE_REAL_CST (arg1);
3221 n = real_to_integer (&c);
3223 if ((n & 1) == 0)
3225 REAL_VALUE_TYPE cint;
3226 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3227 if (real_identical (&c, &cint))
3228 return true;
3232 break;
3234 default:
3235 return false;
3240 return false;