2011-12-09 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / gimple-fold.c
blob5da9be37575f8bb9dee8be4c5f4732cdd4bcb693
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
109 return true;
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
115 tree
116 canonicalize_constructor_val (tree cval)
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
127 ptr,
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
131 if (TREE_CODE (cval) == ADDR_EXPR)
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
135 if (base
136 && (TREE_CODE (base) == VAR_DECL
137 || TREE_CODE (base) == FUNCTION_DECL)
138 && !can_refer_decl_in_current_unit_p (base))
139 return NULL_TREE;
140 if (base && TREE_CODE (base) == VAR_DECL)
142 TREE_ADDRESSABLE (base) = 1;
143 if (cfun && gimple_referenced_vars (cfun))
144 add_referenced_var (base);
146 /* Fixup types in global initializers. */
147 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
148 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
150 return cval;
153 /* If SYM is a constant variable with known value, return the value.
154 NULL_TREE is returned otherwise. */
156 tree
157 get_symbol_constant_value (tree sym)
159 if (const_value_known_p (sym))
161 tree val = DECL_INITIAL (sym);
162 if (val)
164 val = canonicalize_constructor_val (val);
165 if (val && is_gimple_min_invariant (val))
166 return val;
167 else
168 return NULL_TREE;
170 /* Variables declared 'const' without an initializer
171 have zero as the initializer if they may not be
172 overridden at link or run time. */
173 if (!val
174 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
175 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
176 return build_zero_cst (TREE_TYPE (sym));
179 return NULL_TREE;
184 /* Subroutine of fold_stmt. We perform several simplifications of the
185 memory reference tree EXPR and make sure to re-gimplify them properly
186 after propagation of constant addresses. IS_LHS is true if the
187 reference is supposed to be an lvalue. */
189 static tree
190 maybe_fold_reference (tree expr, bool is_lhs)
192 tree *t = &expr;
193 tree result;
195 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
196 || TREE_CODE (expr) == REALPART_EXPR
197 || TREE_CODE (expr) == IMAGPART_EXPR)
198 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
199 return fold_unary_loc (EXPR_LOCATION (expr),
200 TREE_CODE (expr),
201 TREE_TYPE (expr),
202 TREE_OPERAND (expr, 0));
203 else if (TREE_CODE (expr) == BIT_FIELD_REF
204 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
205 return fold_ternary_loc (EXPR_LOCATION (expr),
206 TREE_CODE (expr),
207 TREE_TYPE (expr),
208 TREE_OPERAND (expr, 0),
209 TREE_OPERAND (expr, 1),
210 TREE_OPERAND (expr, 2));
212 while (handled_component_p (*t))
213 t = &TREE_OPERAND (*t, 0);
215 /* Canonicalize MEM_REFs invariant address operand. Do this first
216 to avoid feeding non-canonical MEM_REFs elsewhere. */
217 if (TREE_CODE (*t) == MEM_REF
218 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
220 bool volatile_p = TREE_THIS_VOLATILE (*t);
221 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
222 TREE_OPERAND (*t, 0),
223 TREE_OPERAND (*t, 1));
224 if (tem)
226 TREE_THIS_VOLATILE (tem) = volatile_p;
227 *t = tem;
228 tem = maybe_fold_reference (expr, is_lhs);
229 if (tem)
230 return tem;
231 return expr;
235 if (!is_lhs
236 && (result = fold_const_aggregate_ref (expr))
237 && is_gimple_min_invariant (result))
238 return result;
240 /* Fold back MEM_REFs to reference trees. */
241 if (TREE_CODE (*t) == MEM_REF
242 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
243 && integer_zerop (TREE_OPERAND (*t, 1))
244 && (TREE_THIS_VOLATILE (*t)
245 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
246 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
247 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
248 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
249 /* We have to look out here to not drop a required conversion
250 from the rhs to the lhs if is_lhs, but we don't have the
251 rhs here to verify that. Thus require strict type
252 compatibility. */
253 && types_compatible_p (TREE_TYPE (*t),
254 TREE_TYPE (TREE_OPERAND
255 (TREE_OPERAND (*t, 0), 0))))
257 tree tem;
258 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
259 tem = maybe_fold_reference (expr, is_lhs);
260 if (tem)
261 return tem;
262 return expr;
264 else if (TREE_CODE (*t) == TARGET_MEM_REF)
266 tree tem = maybe_fold_tmr (*t);
267 if (tem)
269 *t = tem;
270 tem = maybe_fold_reference (expr, is_lhs);
271 if (tem)
272 return tem;
273 return expr;
277 return NULL_TREE;
281 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
282 replacement rhs for the statement or NULL_TREE if no simplification
283 could be made. It is assumed that the operands have been previously
284 folded. */
286 static tree
287 fold_gimple_assign (gimple_stmt_iterator *si)
289 gimple stmt = gsi_stmt (*si);
290 enum tree_code subcode = gimple_assign_rhs_code (stmt);
291 location_t loc = gimple_location (stmt);
293 tree result = NULL_TREE;
295 switch (get_gimple_rhs_class (subcode))
297 case GIMPLE_SINGLE_RHS:
299 tree rhs = gimple_assign_rhs1 (stmt);
301 if (REFERENCE_CLASS_P (rhs))
302 return maybe_fold_reference (rhs, false);
304 else if (TREE_CODE (rhs) == ADDR_EXPR)
306 tree ref = TREE_OPERAND (rhs, 0);
307 tree tem = maybe_fold_reference (ref, true);
308 if (tem
309 && TREE_CODE (tem) == MEM_REF
310 && integer_zerop (TREE_OPERAND (tem, 1)))
311 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
312 else if (tem)
313 result = fold_convert (TREE_TYPE (rhs),
314 build_fold_addr_expr_loc (loc, tem));
315 else if (TREE_CODE (ref) == MEM_REF
316 && integer_zerop (TREE_OPERAND (ref, 1)))
317 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
320 else if (TREE_CODE (rhs) == CONSTRUCTOR
321 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
322 && (CONSTRUCTOR_NELTS (rhs)
323 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
325 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
326 unsigned i;
327 tree val;
329 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
330 if (TREE_CODE (val) != INTEGER_CST
331 && TREE_CODE (val) != REAL_CST
332 && TREE_CODE (val) != FIXED_CST)
333 return NULL_TREE;
335 return build_vector_from_ctor (TREE_TYPE (rhs),
336 CONSTRUCTOR_ELTS (rhs));
339 else if (DECL_P (rhs))
340 return unshare_expr (get_symbol_constant_value (rhs));
342 /* If we couldn't fold the RHS, hand over to the generic
343 fold routines. */
344 if (result == NULL_TREE)
345 result = fold (rhs);
347 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
348 that may have been added by fold, and "useless" type
349 conversions that might now be apparent due to propagation. */
350 STRIP_USELESS_TYPE_CONVERSION (result);
352 if (result != rhs && valid_gimple_rhs_p (result))
353 return result;
355 return NULL_TREE;
357 break;
359 case GIMPLE_UNARY_RHS:
361 tree rhs = gimple_assign_rhs1 (stmt);
363 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
364 if (result)
366 /* If the operation was a conversion do _not_ mark a
367 resulting constant with TREE_OVERFLOW if the original
368 constant was not. These conversions have implementation
369 defined behavior and retaining the TREE_OVERFLOW flag
370 here would confuse later passes such as VRP. */
371 if (CONVERT_EXPR_CODE_P (subcode)
372 && TREE_CODE (result) == INTEGER_CST
373 && TREE_CODE (rhs) == INTEGER_CST)
374 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
376 STRIP_USELESS_TYPE_CONVERSION (result);
377 if (valid_gimple_rhs_p (result))
378 return result;
381 break;
383 case GIMPLE_BINARY_RHS:
384 /* Try to canonicalize for boolean-typed X the comparisons
385 X == 0, X == 1, X != 0, and X != 1. */
386 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
387 || gimple_assign_rhs_code (stmt) == NE_EXPR)
389 tree lhs = gimple_assign_lhs (stmt);
390 tree op1 = gimple_assign_rhs1 (stmt);
391 tree op2 = gimple_assign_rhs2 (stmt);
392 tree type = TREE_TYPE (op1);
394 /* Check whether the comparison operands are of the same boolean
395 type as the result type is.
396 Check that second operand is an integer-constant with value
397 one or zero. */
398 if (TREE_CODE (op2) == INTEGER_CST
399 && (integer_zerop (op2) || integer_onep (op2))
400 && useless_type_conversion_p (TREE_TYPE (lhs), type))
402 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
403 bool is_logical_not = false;
405 /* X == 0 and X != 1 is a logical-not.of X
406 X == 1 and X != 0 is X */
407 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
408 || (cmp_code == NE_EXPR && integer_onep (op2)))
409 is_logical_not = true;
411 if (is_logical_not == false)
412 result = op1;
413 /* Only for one-bit precision typed X the transformation
414 !X -> ~X is valied. */
415 else if (TYPE_PRECISION (type) == 1)
416 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
417 type, op1);
418 /* Otherwise we use !X -> X ^ 1. */
419 else
420 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
421 type, op1, build_int_cst (type, 1));
426 if (!result)
427 result = fold_binary_loc (loc, subcode,
428 TREE_TYPE (gimple_assign_lhs (stmt)),
429 gimple_assign_rhs1 (stmt),
430 gimple_assign_rhs2 (stmt));
432 if (result)
434 STRIP_USELESS_TYPE_CONVERSION (result);
435 if (valid_gimple_rhs_p (result))
436 return result;
438 break;
440 case GIMPLE_TERNARY_RHS:
441 /* Try to fold a conditional expression. */
442 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
444 tree op0 = gimple_assign_rhs1 (stmt);
445 tree tem;
446 bool set = false;
447 location_t cond_loc = gimple_location (stmt);
449 if (COMPARISON_CLASS_P (op0))
451 fold_defer_overflow_warnings ();
452 tem = fold_binary_loc (cond_loc,
453 TREE_CODE (op0), TREE_TYPE (op0),
454 TREE_OPERAND (op0, 0),
455 TREE_OPERAND (op0, 1));
456 /* This is actually a conditional expression, not a GIMPLE
457 conditional statement, however, the valid_gimple_rhs_p
458 test still applies. */
459 set = (tem && is_gimple_condexpr (tem)
460 && valid_gimple_rhs_p (tem));
461 fold_undefer_overflow_warnings (set, stmt, 0);
463 else if (is_gimple_min_invariant (op0))
465 tem = op0;
466 set = true;
468 else
469 return NULL_TREE;
471 if (set)
472 result = fold_build3_loc (cond_loc, COND_EXPR,
473 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
474 gimple_assign_rhs2 (stmt),
475 gimple_assign_rhs3 (stmt));
478 if (!result)
479 result = fold_ternary_loc (loc, subcode,
480 TREE_TYPE (gimple_assign_lhs (stmt)),
481 gimple_assign_rhs1 (stmt),
482 gimple_assign_rhs2 (stmt),
483 gimple_assign_rhs3 (stmt));
485 if (result)
487 STRIP_USELESS_TYPE_CONVERSION (result);
488 if (valid_gimple_rhs_p (result))
489 return result;
491 break;
493 case GIMPLE_INVALID_RHS:
494 gcc_unreachable ();
497 return NULL_TREE;
500 /* Attempt to fold a conditional statement. Return true if any changes were
501 made. We only attempt to fold the condition expression, and do not perform
502 any transformation that would require alteration of the cfg. It is
503 assumed that the operands have been previously folded. */
505 static bool
506 fold_gimple_cond (gimple stmt)
508 tree result = fold_binary_loc (gimple_location (stmt),
509 gimple_cond_code (stmt),
510 boolean_type_node,
511 gimple_cond_lhs (stmt),
512 gimple_cond_rhs (stmt));
514 if (result)
516 STRIP_USELESS_TYPE_CONVERSION (result);
517 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
519 gimple_cond_set_condition_from_tree (stmt, result);
520 return true;
524 return false;
527 /* Convert EXPR into a GIMPLE value suitable for substitution on the
528 RHS of an assignment. Insert the necessary statements before
529 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
530 is replaced. If the call is expected to produces a result, then it
531 is replaced by an assignment of the new RHS to the result variable.
532 If the result is to be ignored, then the call is replaced by a
533 GIMPLE_NOP. A proper VDEF chain is retained by making the first
534 VUSE and the last VDEF of the whole sequence be the same as the replaced
535 statement and using new SSA names for stores in between. */
537 void
538 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
540 tree lhs;
541 gimple stmt, new_stmt;
542 gimple_stmt_iterator i;
543 gimple_seq stmts = gimple_seq_alloc();
544 struct gimplify_ctx gctx;
545 gimple last;
546 gimple laststore;
547 tree reaching_vuse;
549 stmt = gsi_stmt (*si_p);
551 gcc_assert (is_gimple_call (stmt));
553 push_gimplify_context (&gctx);
554 gctx.into_ssa = gimple_in_ssa_p (cfun);
556 lhs = gimple_call_lhs (stmt);
557 if (lhs == NULL_TREE)
559 gimplify_and_add (expr, &stmts);
560 /* We can end up with folding a memcpy of an empty class assignment
561 which gets optimized away by C++ gimplification. */
562 if (gimple_seq_empty_p (stmts))
564 pop_gimplify_context (NULL);
565 if (gimple_in_ssa_p (cfun))
567 unlink_stmt_vdef (stmt);
568 release_defs (stmt);
570 gsi_remove (si_p, true);
571 return;
574 else
576 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
577 new_stmt = gimple_build_assign (lhs, tmp);
578 i = gsi_last (stmts);
579 gsi_insert_after_without_update (&i, new_stmt,
580 GSI_CONTINUE_LINKING);
583 pop_gimplify_context (NULL);
585 if (gimple_has_location (stmt))
586 annotate_all_with_location (stmts, gimple_location (stmt));
588 /* First iterate over the replacement statements backward, assigning
589 virtual operands to their defining statements. */
590 laststore = NULL;
591 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
593 new_stmt = gsi_stmt (i);
594 if (gimple_assign_single_p (new_stmt)
595 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
597 tree vdef;
598 if (!laststore)
599 vdef = gimple_vdef (stmt);
600 else
601 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
602 gimple_set_vdef (new_stmt, vdef);
603 if (vdef && TREE_CODE (vdef) == SSA_NAME)
604 SSA_NAME_DEF_STMT (vdef) = new_stmt;
605 laststore = new_stmt;
609 /* Second iterate over the statements forward, assigning virtual
610 operands to their uses. */
611 last = NULL;
612 reaching_vuse = gimple_vuse (stmt);
613 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
615 /* Do not insert the last stmt in this loop but remember it
616 for replacing the original statement. */
617 if (last)
619 gsi_insert_before (si_p, last, GSI_NEW_STMT);
620 gsi_next (si_p);
622 new_stmt = gsi_stmt (i);
623 /* The replacement can expose previously unreferenced variables. */
624 if (gimple_in_ssa_p (cfun))
625 find_new_referenced_vars (new_stmt);
626 /* If the new statement possibly has a VUSE, update it with exact SSA
627 name we know will reach this one. */
628 if (gimple_has_mem_ops (new_stmt))
629 gimple_set_vuse (new_stmt, reaching_vuse);
630 gimple_set_modified (new_stmt, true);
631 if (gimple_vdef (new_stmt))
632 reaching_vuse = gimple_vdef (new_stmt);
633 last = new_stmt;
636 /* If the new sequence does not do a store release the virtual
637 definition of the original statement. */
638 if (reaching_vuse
639 && reaching_vuse == gimple_vuse (stmt))
641 tree vdef = gimple_vdef (stmt);
642 if (vdef
643 && TREE_CODE (vdef) == SSA_NAME)
645 unlink_stmt_vdef (stmt);
646 release_ssa_name (vdef);
650 /* Finally replace rhe original statement with the last. */
651 gsi_replace (si_p, last, false);
654 /* Return the string length, maximum string length or maximum value of
655 ARG in LENGTH.
656 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
657 is not NULL and, for TYPE == 0, its value is not equal to the length
658 we determine or if we are unable to determine the length or value,
659 return false. VISITED is a bitmap of visited variables.
660 TYPE is 0 if string length should be returned, 1 for maximum string
661 length and 2 for maximum value ARG can have. */
663 static bool
664 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
666 tree var, val;
667 gimple def_stmt;
669 if (TREE_CODE (arg) != SSA_NAME)
671 if (TREE_CODE (arg) == COND_EXPR)
672 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
673 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
674 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
675 else if (TREE_CODE (arg) == ADDR_EXPR
676 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
677 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
679 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
680 if (TREE_CODE (aop0) == INDIRECT_REF
681 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
682 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
683 length, visited, type);
686 if (type == 2)
688 val = arg;
689 if (TREE_CODE (val) != INTEGER_CST
690 || tree_int_cst_sgn (val) < 0)
691 return false;
693 else
694 val = c_strlen (arg, 1);
695 if (!val)
696 return false;
698 if (*length)
700 if (type > 0)
702 if (TREE_CODE (*length) != INTEGER_CST
703 || TREE_CODE (val) != INTEGER_CST)
704 return false;
706 if (tree_int_cst_lt (*length, val))
707 *length = val;
708 return true;
710 else if (simple_cst_equal (val, *length) != 1)
711 return false;
714 *length = val;
715 return true;
718 /* If we were already here, break the infinite cycle. */
719 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
720 return true;
722 var = arg;
723 def_stmt = SSA_NAME_DEF_STMT (var);
725 switch (gimple_code (def_stmt))
727 case GIMPLE_ASSIGN:
728 /* The RHS of the statement defining VAR must either have a
729 constant length or come from another SSA_NAME with a constant
730 length. */
731 if (gimple_assign_single_p (def_stmt)
732 || gimple_assign_unary_nop_p (def_stmt))
734 tree rhs = gimple_assign_rhs1 (def_stmt);
735 return get_maxval_strlen (rhs, length, visited, type);
737 return false;
739 case GIMPLE_PHI:
741 /* All the arguments of the PHI node must have the same constant
742 length. */
743 unsigned i;
745 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
747 tree arg = gimple_phi_arg (def_stmt, i)->def;
749 /* If this PHI has itself as an argument, we cannot
750 determine the string length of this argument. However,
751 if we can find a constant string length for the other
752 PHI args then we can still be sure that this is a
753 constant string length. So be optimistic and just
754 continue with the next argument. */
755 if (arg == gimple_phi_result (def_stmt))
756 continue;
758 if (!get_maxval_strlen (arg, length, visited, type))
759 return false;
762 return true;
764 default:
765 return false;
770 /* Fold builtin call in statement STMT. Returns a simplified tree.
771 We may return a non-constant expression, including another call
772 to a different function and with different arguments, e.g.,
773 substituting memcpy for strcpy when the string length is known.
774 Note that some builtins expand into inline code that may not
775 be valid in GIMPLE. Callers must take care. */
777 tree
778 gimple_fold_builtin (gimple stmt)
780 tree result, val[3];
781 tree callee, a;
782 int arg_idx, type;
783 bitmap visited;
784 bool ignore;
785 int nargs;
786 location_t loc = gimple_location (stmt);
788 gcc_assert (is_gimple_call (stmt));
790 ignore = (gimple_call_lhs (stmt) == NULL);
792 /* First try the generic builtin folder. If that succeeds, return the
793 result directly. */
794 result = fold_call_stmt (stmt, ignore);
795 if (result)
797 if (ignore)
798 STRIP_NOPS (result);
799 return result;
802 /* Ignore MD builtins. */
803 callee = gimple_call_fndecl (stmt);
804 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
805 return NULL_TREE;
807 /* Give up for always_inline inline builtins until they are
808 inlined. */
809 if (avoid_folding_inline_builtin (callee))
810 return NULL_TREE;
812 /* If the builtin could not be folded, and it has no argument list,
813 we're done. */
814 nargs = gimple_call_num_args (stmt);
815 if (nargs == 0)
816 return NULL_TREE;
818 /* Limit the work only for builtins we know how to simplify. */
819 switch (DECL_FUNCTION_CODE (callee))
821 case BUILT_IN_STRLEN:
822 case BUILT_IN_FPUTS:
823 case BUILT_IN_FPUTS_UNLOCKED:
824 arg_idx = 0;
825 type = 0;
826 break;
827 case BUILT_IN_STRCPY:
828 case BUILT_IN_STRNCPY:
829 arg_idx = 1;
830 type = 0;
831 break;
832 case BUILT_IN_MEMCPY_CHK:
833 case BUILT_IN_MEMPCPY_CHK:
834 case BUILT_IN_MEMMOVE_CHK:
835 case BUILT_IN_MEMSET_CHK:
836 case BUILT_IN_STRNCPY_CHK:
837 arg_idx = 2;
838 type = 2;
839 break;
840 case BUILT_IN_STRCPY_CHK:
841 case BUILT_IN_STPCPY_CHK:
842 arg_idx = 1;
843 type = 1;
844 break;
845 case BUILT_IN_SNPRINTF_CHK:
846 case BUILT_IN_VSNPRINTF_CHK:
847 arg_idx = 1;
848 type = 2;
849 break;
850 default:
851 return NULL_TREE;
854 if (arg_idx >= nargs)
855 return NULL_TREE;
857 /* Try to use the dataflow information gathered by the CCP process. */
858 visited = BITMAP_ALLOC (NULL);
859 bitmap_clear (visited);
861 memset (val, 0, sizeof (val));
862 a = gimple_call_arg (stmt, arg_idx);
863 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
864 val[arg_idx] = NULL_TREE;
866 BITMAP_FREE (visited);
868 result = NULL_TREE;
869 switch (DECL_FUNCTION_CODE (callee))
871 case BUILT_IN_STRLEN:
872 if (val[0] && nargs == 1)
874 tree new_val =
875 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
877 /* If the result is not a valid gimple value, or not a cast
878 of a valid gimple value, then we cannot use the result. */
879 if (is_gimple_val (new_val)
880 || (CONVERT_EXPR_P (new_val)
881 && is_gimple_val (TREE_OPERAND (new_val, 0))))
882 return new_val;
884 break;
886 case BUILT_IN_STRCPY:
887 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
888 result = fold_builtin_strcpy (loc, callee,
889 gimple_call_arg (stmt, 0),
890 gimple_call_arg (stmt, 1),
891 val[1]);
892 break;
894 case BUILT_IN_STRNCPY:
895 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
896 result = fold_builtin_strncpy (loc, callee,
897 gimple_call_arg (stmt, 0),
898 gimple_call_arg (stmt, 1),
899 gimple_call_arg (stmt, 2),
900 val[1]);
901 break;
903 case BUILT_IN_FPUTS:
904 if (nargs == 2)
905 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
906 gimple_call_arg (stmt, 1),
907 ignore, false, val[0]);
908 break;
910 case BUILT_IN_FPUTS_UNLOCKED:
911 if (nargs == 2)
912 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
913 gimple_call_arg (stmt, 1),
914 ignore, true, val[0]);
915 break;
917 case BUILT_IN_MEMCPY_CHK:
918 case BUILT_IN_MEMPCPY_CHK:
919 case BUILT_IN_MEMMOVE_CHK:
920 case BUILT_IN_MEMSET_CHK:
921 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
922 result = fold_builtin_memory_chk (loc, callee,
923 gimple_call_arg (stmt, 0),
924 gimple_call_arg (stmt, 1),
925 gimple_call_arg (stmt, 2),
926 gimple_call_arg (stmt, 3),
927 val[2], ignore,
928 DECL_FUNCTION_CODE (callee));
929 break;
931 case BUILT_IN_STRCPY_CHK:
932 case BUILT_IN_STPCPY_CHK:
933 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
934 result = fold_builtin_stxcpy_chk (loc, callee,
935 gimple_call_arg (stmt, 0),
936 gimple_call_arg (stmt, 1),
937 gimple_call_arg (stmt, 2),
938 val[1], ignore,
939 DECL_FUNCTION_CODE (callee));
940 break;
942 case BUILT_IN_STRNCPY_CHK:
943 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
944 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
945 gimple_call_arg (stmt, 1),
946 gimple_call_arg (stmt, 2),
947 gimple_call_arg (stmt, 3),
948 val[2]);
949 break;
951 case BUILT_IN_SNPRINTF_CHK:
952 case BUILT_IN_VSNPRINTF_CHK:
953 if (val[1] && is_gimple_val (val[1]))
954 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
955 DECL_FUNCTION_CODE (callee));
956 break;
958 default:
959 gcc_unreachable ();
962 if (result && ignore)
963 result = fold_ignored_result (result);
964 return result;
967 /* Generate code adjusting the first parameter of a call statement determined
968 by GSI by DELTA. */
970 void
971 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
973 gimple call_stmt = gsi_stmt (*gsi);
974 tree parm, tmp;
975 gimple new_stmt;
977 delta = convert_to_ptrofftype (delta);
978 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
979 parm = gimple_call_arg (call_stmt, 0);
980 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
981 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
982 add_referenced_var (tmp);
984 tmp = make_ssa_name (tmp, NULL);
985 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
986 SSA_NAME_DEF_STMT (tmp) = new_stmt;
987 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
988 gimple_call_set_arg (call_stmt, 0, tmp);
991 /* Return a binfo to be used for devirtualization of calls based on an object
992 represented by a declaration (i.e. a global or automatically allocated one)
993 or NULL if it cannot be found or is not safe. CST is expected to be an
994 ADDR_EXPR of such object or the function will return NULL. Currently it is
995 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
997 tree
998 gimple_extract_devirt_binfo_from_cst (tree cst)
1000 HOST_WIDE_INT offset, size, max_size;
1001 tree base, type, expected_type, binfo;
1002 bool last_artificial = false;
1004 if (!flag_devirtualize
1005 || TREE_CODE (cst) != ADDR_EXPR
1006 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1007 return NULL_TREE;
1009 cst = TREE_OPERAND (cst, 0);
1010 expected_type = TREE_TYPE (cst);
1011 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1012 type = TREE_TYPE (base);
1013 if (!DECL_P (base)
1014 || max_size == -1
1015 || max_size != size
1016 || TREE_CODE (type) != RECORD_TYPE)
1017 return NULL_TREE;
1019 /* Find the sub-object the constant actually refers to and mark whether it is
1020 an artificial one (as opposed to a user-defined one). */
1021 while (true)
1023 HOST_WIDE_INT pos, size;
1024 tree fld;
1026 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1027 break;
1028 if (offset < 0)
1029 return NULL_TREE;
1031 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1033 if (TREE_CODE (fld) != FIELD_DECL)
1034 continue;
1036 pos = int_bit_position (fld);
1037 size = tree_low_cst (DECL_SIZE (fld), 1);
1038 if (pos <= offset && (pos + size) > offset)
1039 break;
1041 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1042 return NULL_TREE;
1044 last_artificial = DECL_ARTIFICIAL (fld);
1045 type = TREE_TYPE (fld);
1046 offset -= pos;
1048 /* Artifical sub-objects are ancestors, we do not want to use them for
1049 devirtualization, at least not here. */
1050 if (last_artificial)
1051 return NULL_TREE;
1052 binfo = TYPE_BINFO (type);
1053 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1054 return NULL_TREE;
1055 else
1056 return binfo;
1059 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1060 The statement may be replaced by another statement, e.g., if the call
1061 simplifies to a constant value. Return true if any changes were made.
1062 It is assumed that the operands have been previously folded. */
1064 static bool
1065 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1067 gimple stmt = gsi_stmt (*gsi);
1068 tree callee;
1069 bool changed = false;
1070 unsigned i;
1072 /* Fold *& in call arguments. */
1073 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1074 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1076 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1077 if (tmp)
1079 gimple_call_set_arg (stmt, i, tmp);
1080 changed = true;
1084 /* Check for virtual calls that became direct calls. */
1085 callee = gimple_call_fn (stmt);
1086 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1088 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1090 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1091 changed = true;
1093 else
1095 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1096 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1097 if (binfo)
1099 HOST_WIDE_INT token
1100 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1101 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1102 if (fndecl)
1104 gimple_call_set_fndecl (stmt, fndecl);
1105 changed = true;
1111 if (inplace)
1112 return changed;
1114 /* Check for builtins that CCP can handle using information not
1115 available in the generic fold routines. */
1116 callee = gimple_call_fndecl (stmt);
1117 if (callee && DECL_BUILT_IN (callee))
1119 tree result = gimple_fold_builtin (stmt);
1120 if (result)
1122 if (!update_call_from_tree (gsi, result))
1123 gimplify_and_update_call_from_tree (gsi, result);
1124 changed = true;
1128 return changed;
1131 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1132 distinguishes both cases. */
1134 static bool
1135 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1137 bool changed = false;
1138 gimple stmt = gsi_stmt (*gsi);
1139 unsigned i;
1140 gimple_stmt_iterator gsinext = *gsi;
1141 gimple next_stmt;
1143 gsi_next (&gsinext);
1144 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1146 /* Fold the main computation performed by the statement. */
1147 switch (gimple_code (stmt))
1149 case GIMPLE_ASSIGN:
1151 unsigned old_num_ops = gimple_num_ops (stmt);
1152 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1153 tree lhs = gimple_assign_lhs (stmt);
1154 tree new_rhs;
1155 /* First canonicalize operand order. This avoids building new
1156 trees if this is the only thing fold would later do. */
1157 if ((commutative_tree_code (subcode)
1158 || commutative_ternary_tree_code (subcode))
1159 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1160 gimple_assign_rhs2 (stmt), false))
1162 tree tem = gimple_assign_rhs1 (stmt);
1163 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1164 gimple_assign_set_rhs2 (stmt, tem);
1165 changed = true;
1167 new_rhs = fold_gimple_assign (gsi);
1168 if (new_rhs
1169 && !useless_type_conversion_p (TREE_TYPE (lhs),
1170 TREE_TYPE (new_rhs)))
1171 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1172 if (new_rhs
1173 && (!inplace
1174 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1176 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1177 changed = true;
1179 break;
1182 case GIMPLE_COND:
1183 changed |= fold_gimple_cond (stmt);
1184 break;
1186 case GIMPLE_CALL:
1187 changed |= gimple_fold_call (gsi, inplace);
1188 break;
1190 case GIMPLE_ASM:
1191 /* Fold *& in asm operands. */
1193 size_t noutputs;
1194 const char **oconstraints;
1195 const char *constraint;
1196 bool allows_mem, allows_reg;
1198 noutputs = gimple_asm_noutputs (stmt);
1199 oconstraints = XALLOCAVEC (const char *, noutputs);
1201 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1203 tree link = gimple_asm_output_op (stmt, i);
1204 tree op = TREE_VALUE (link);
1205 oconstraints[i]
1206 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1207 if (REFERENCE_CLASS_P (op)
1208 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1210 TREE_VALUE (link) = op;
1211 changed = true;
1214 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1216 tree link = gimple_asm_input_op (stmt, i);
1217 tree op = TREE_VALUE (link);
1218 constraint
1219 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1220 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1221 oconstraints, &allows_mem, &allows_reg);
1222 if (REFERENCE_CLASS_P (op)
1223 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1224 != NULL_TREE)
1226 TREE_VALUE (link) = op;
1227 changed = true;
1231 break;
1233 case GIMPLE_DEBUG:
1234 if (gimple_debug_bind_p (stmt))
1236 tree val = gimple_debug_bind_get_value (stmt);
1237 if (val
1238 && REFERENCE_CLASS_P (val))
1240 tree tem = maybe_fold_reference (val, false);
1241 if (tem)
1243 gimple_debug_bind_set_value (stmt, tem);
1244 changed = true;
1248 break;
1250 default:;
1253 /* If stmt folds into nothing and it was the last stmt in a bb,
1254 don't call gsi_stmt. */
1255 if (gsi_end_p (*gsi))
1257 gcc_assert (next_stmt == NULL);
1258 return changed;
1261 stmt = gsi_stmt (*gsi);
1263 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1264 as we'd changing the next stmt. */
1265 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1267 tree lhs = gimple_get_lhs (stmt);
1268 if (lhs && REFERENCE_CLASS_P (lhs))
1270 tree new_lhs = maybe_fold_reference (lhs, true);
1271 if (new_lhs)
1273 gimple_set_lhs (stmt, new_lhs);
1274 changed = true;
1279 return changed;
1282 /* Fold the statement pointed to by GSI. In some cases, this function may
1283 replace the whole statement with a new one. Returns true iff folding
1284 makes any changes.
1285 The statement pointed to by GSI should be in valid gimple form but may
1286 be in unfolded state as resulting from for example constant propagation
1287 which can produce *&x = 0. */
1289 bool
1290 fold_stmt (gimple_stmt_iterator *gsi)
1292 return fold_stmt_1 (gsi, false);
1295 /* Perform the minimal folding on statement *GSI. Only operations like
1296 *&x created by constant propagation are handled. The statement cannot
1297 be replaced with a new one. Return true if the statement was
1298 changed, false otherwise.
1299 The statement *GSI should be in valid gimple form but may
1300 be in unfolded state as resulting from for example constant propagation
1301 which can produce *&x = 0. */
1303 bool
1304 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1306 gimple stmt = gsi_stmt (*gsi);
1307 bool changed = fold_stmt_1 (gsi, true);
1308 gcc_assert (gsi_stmt (*gsi) == stmt);
1309 return changed;
1312 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1313 if EXPR is null or we don't know how.
1314 If non-null, the result always has boolean type. */
1316 static tree
1317 canonicalize_bool (tree expr, bool invert)
1319 if (!expr)
1320 return NULL_TREE;
1321 else if (invert)
1323 if (integer_nonzerop (expr))
1324 return boolean_false_node;
1325 else if (integer_zerop (expr))
1326 return boolean_true_node;
1327 else if (TREE_CODE (expr) == SSA_NAME)
1328 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1329 build_int_cst (TREE_TYPE (expr), 0));
1330 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1331 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1332 boolean_type_node,
1333 TREE_OPERAND (expr, 0),
1334 TREE_OPERAND (expr, 1));
1335 else
1336 return NULL_TREE;
1338 else
1340 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1341 return expr;
1342 if (integer_nonzerop (expr))
1343 return boolean_true_node;
1344 else if (integer_zerop (expr))
1345 return boolean_false_node;
1346 else if (TREE_CODE (expr) == SSA_NAME)
1347 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1348 build_int_cst (TREE_TYPE (expr), 0));
1349 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1350 return fold_build2 (TREE_CODE (expr),
1351 boolean_type_node,
1352 TREE_OPERAND (expr, 0),
1353 TREE_OPERAND (expr, 1));
1354 else
1355 return NULL_TREE;
1359 /* Check to see if a boolean expression EXPR is logically equivalent to the
1360 comparison (OP1 CODE OP2). Check for various identities involving
1361 SSA_NAMEs. */
1363 static bool
1364 same_bool_comparison_p (const_tree expr, enum tree_code code,
1365 const_tree op1, const_tree op2)
1367 gimple s;
1369 /* The obvious case. */
1370 if (TREE_CODE (expr) == code
1371 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1372 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1373 return true;
1375 /* Check for comparing (name, name != 0) and the case where expr
1376 is an SSA_NAME with a definition matching the comparison. */
1377 if (TREE_CODE (expr) == SSA_NAME
1378 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1380 if (operand_equal_p (expr, op1, 0))
1381 return ((code == NE_EXPR && integer_zerop (op2))
1382 || (code == EQ_EXPR && integer_nonzerop (op2)));
1383 s = SSA_NAME_DEF_STMT (expr);
1384 if (is_gimple_assign (s)
1385 && gimple_assign_rhs_code (s) == code
1386 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1387 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1388 return true;
1391 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1392 of name is a comparison, recurse. */
1393 if (TREE_CODE (op1) == SSA_NAME
1394 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1396 s = SSA_NAME_DEF_STMT (op1);
1397 if (is_gimple_assign (s)
1398 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1400 enum tree_code c = gimple_assign_rhs_code (s);
1401 if ((c == NE_EXPR && integer_zerop (op2))
1402 || (c == EQ_EXPR && integer_nonzerop (op2)))
1403 return same_bool_comparison_p (expr, c,
1404 gimple_assign_rhs1 (s),
1405 gimple_assign_rhs2 (s));
1406 if ((c == EQ_EXPR && integer_zerop (op2))
1407 || (c == NE_EXPR && integer_nonzerop (op2)))
1408 return same_bool_comparison_p (expr,
1409 invert_tree_comparison (c, false),
1410 gimple_assign_rhs1 (s),
1411 gimple_assign_rhs2 (s));
1414 return false;
1417 /* Check to see if two boolean expressions OP1 and OP2 are logically
1418 equivalent. */
1420 static bool
1421 same_bool_result_p (const_tree op1, const_tree op2)
1423 /* Simple cases first. */
1424 if (operand_equal_p (op1, op2, 0))
1425 return true;
1427 /* Check the cases where at least one of the operands is a comparison.
1428 These are a bit smarter than operand_equal_p in that they apply some
1429 identifies on SSA_NAMEs. */
1430 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1431 && same_bool_comparison_p (op1, TREE_CODE (op2),
1432 TREE_OPERAND (op2, 0),
1433 TREE_OPERAND (op2, 1)))
1434 return true;
1435 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1436 && same_bool_comparison_p (op2, TREE_CODE (op1),
1437 TREE_OPERAND (op1, 0),
1438 TREE_OPERAND (op1, 1)))
1439 return true;
1441 /* Default case. */
1442 return false;
1445 /* Forward declarations for some mutually recursive functions. */
1447 static tree
1448 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1449 enum tree_code code2, tree op2a, tree op2b);
1450 static tree
1451 and_var_with_comparison (tree var, bool invert,
1452 enum tree_code code2, tree op2a, tree op2b);
1453 static tree
1454 and_var_with_comparison_1 (gimple stmt,
1455 enum tree_code code2, tree op2a, tree op2b);
1456 static tree
1457 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1458 enum tree_code code2, tree op2a, tree op2b);
1459 static tree
1460 or_var_with_comparison (tree var, bool invert,
1461 enum tree_code code2, tree op2a, tree op2b);
1462 static tree
1463 or_var_with_comparison_1 (gimple stmt,
1464 enum tree_code code2, tree op2a, tree op2b);
1466 /* Helper function for and_comparisons_1: try to simplify the AND of the
1467 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1468 If INVERT is true, invert the value of the VAR before doing the AND.
1469 Return NULL_EXPR if we can't simplify this to a single expression. */
1471 static tree
1472 and_var_with_comparison (tree var, bool invert,
1473 enum tree_code code2, tree op2a, tree op2b)
1475 tree t;
1476 gimple stmt = SSA_NAME_DEF_STMT (var);
1478 /* We can only deal with variables whose definitions are assignments. */
1479 if (!is_gimple_assign (stmt))
1480 return NULL_TREE;
1482 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1483 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1484 Then we only have to consider the simpler non-inverted cases. */
1485 if (invert)
1486 t = or_var_with_comparison_1 (stmt,
1487 invert_tree_comparison (code2, false),
1488 op2a, op2b);
1489 else
1490 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1491 return canonicalize_bool (t, invert);
1494 /* Try to simplify the AND of the ssa variable defined by the assignment
1495 STMT with the comparison specified by (OP2A CODE2 OP2B).
1496 Return NULL_EXPR if we can't simplify this to a single expression. */
1498 static tree
1499 and_var_with_comparison_1 (gimple stmt,
1500 enum tree_code code2, tree op2a, tree op2b)
1502 tree var = gimple_assign_lhs (stmt);
1503 tree true_test_var = NULL_TREE;
1504 tree false_test_var = NULL_TREE;
1505 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1507 /* Check for identities like (var AND (var == 0)) => false. */
1508 if (TREE_CODE (op2a) == SSA_NAME
1509 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1511 if ((code2 == NE_EXPR && integer_zerop (op2b))
1512 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1514 true_test_var = op2a;
1515 if (var == true_test_var)
1516 return var;
1518 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1519 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1521 false_test_var = op2a;
1522 if (var == false_test_var)
1523 return boolean_false_node;
1527 /* If the definition is a comparison, recurse on it. */
1528 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1530 tree t = and_comparisons_1 (innercode,
1531 gimple_assign_rhs1 (stmt),
1532 gimple_assign_rhs2 (stmt),
1533 code2,
1534 op2a,
1535 op2b);
1536 if (t)
1537 return t;
1540 /* If the definition is an AND or OR expression, we may be able to
1541 simplify by reassociating. */
1542 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1543 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1545 tree inner1 = gimple_assign_rhs1 (stmt);
1546 tree inner2 = gimple_assign_rhs2 (stmt);
1547 gimple s;
1548 tree t;
1549 tree partial = NULL_TREE;
1550 bool is_and = (innercode == BIT_AND_EXPR);
1552 /* Check for boolean identities that don't require recursive examination
1553 of inner1/inner2:
1554 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1555 inner1 AND (inner1 OR inner2) => inner1
1556 !inner1 AND (inner1 AND inner2) => false
1557 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1558 Likewise for similar cases involving inner2. */
1559 if (inner1 == true_test_var)
1560 return (is_and ? var : inner1);
1561 else if (inner2 == true_test_var)
1562 return (is_and ? var : inner2);
1563 else if (inner1 == false_test_var)
1564 return (is_and
1565 ? boolean_false_node
1566 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1567 else if (inner2 == false_test_var)
1568 return (is_and
1569 ? boolean_false_node
1570 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1572 /* Next, redistribute/reassociate the AND across the inner tests.
1573 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1574 if (TREE_CODE (inner1) == SSA_NAME
1575 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1576 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1577 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1578 gimple_assign_rhs1 (s),
1579 gimple_assign_rhs2 (s),
1580 code2, op2a, op2b)))
1582 /* Handle the AND case, where we are reassociating:
1583 (inner1 AND inner2) AND (op2a code2 op2b)
1584 => (t AND inner2)
1585 If the partial result t is a constant, we win. Otherwise
1586 continue on to try reassociating with the other inner test. */
1587 if (is_and)
1589 if (integer_onep (t))
1590 return inner2;
1591 else if (integer_zerop (t))
1592 return boolean_false_node;
1595 /* Handle the OR case, where we are redistributing:
1596 (inner1 OR inner2) AND (op2a code2 op2b)
1597 => (t OR (inner2 AND (op2a code2 op2b))) */
1598 else if (integer_onep (t))
1599 return boolean_true_node;
1601 /* Save partial result for later. */
1602 partial = t;
1605 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1606 if (TREE_CODE (inner2) == SSA_NAME
1607 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1608 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1609 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1610 gimple_assign_rhs1 (s),
1611 gimple_assign_rhs2 (s),
1612 code2, op2a, op2b)))
1614 /* Handle the AND case, where we are reassociating:
1615 (inner1 AND inner2) AND (op2a code2 op2b)
1616 => (inner1 AND t) */
1617 if (is_and)
1619 if (integer_onep (t))
1620 return inner1;
1621 else if (integer_zerop (t))
1622 return boolean_false_node;
1623 /* If both are the same, we can apply the identity
1624 (x AND x) == x. */
1625 else if (partial && same_bool_result_p (t, partial))
1626 return t;
1629 /* Handle the OR case. where we are redistributing:
1630 (inner1 OR inner2) AND (op2a code2 op2b)
1631 => (t OR (inner1 AND (op2a code2 op2b)))
1632 => (t OR partial) */
1633 else
1635 if (integer_onep (t))
1636 return boolean_true_node;
1637 else if (partial)
1639 /* We already got a simplification for the other
1640 operand to the redistributed OR expression. The
1641 interesting case is when at least one is false.
1642 Or, if both are the same, we can apply the identity
1643 (x OR x) == x. */
1644 if (integer_zerop (partial))
1645 return t;
1646 else if (integer_zerop (t))
1647 return partial;
1648 else if (same_bool_result_p (t, partial))
1649 return t;
1654 return NULL_TREE;
1657 /* Try to simplify the AND of two comparisons defined by
1658 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1659 If this can be done without constructing an intermediate value,
1660 return the resulting tree; otherwise NULL_TREE is returned.
1661 This function is deliberately asymmetric as it recurses on SSA_DEFs
1662 in the first comparison but not the second. */
1664 static tree
1665 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1666 enum tree_code code2, tree op2a, tree op2b)
1668 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1669 if (operand_equal_p (op1a, op2a, 0)
1670 && operand_equal_p (op1b, op2b, 0))
1672 /* Result will be either NULL_TREE, or a combined comparison. */
1673 tree t = combine_comparisons (UNKNOWN_LOCATION,
1674 TRUTH_ANDIF_EXPR, code1, code2,
1675 boolean_type_node, op1a, op1b);
1676 if (t)
1677 return t;
1680 /* Likewise the swapped case of the above. */
1681 if (operand_equal_p (op1a, op2b, 0)
1682 && operand_equal_p (op1b, op2a, 0))
1684 /* Result will be either NULL_TREE, or a combined comparison. */
1685 tree t = combine_comparisons (UNKNOWN_LOCATION,
1686 TRUTH_ANDIF_EXPR, code1,
1687 swap_tree_comparison (code2),
1688 boolean_type_node, op1a, op1b);
1689 if (t)
1690 return t;
1693 /* If both comparisons are of the same value against constants, we might
1694 be able to merge them. */
1695 if (operand_equal_p (op1a, op2a, 0)
1696 && TREE_CODE (op1b) == INTEGER_CST
1697 && TREE_CODE (op2b) == INTEGER_CST)
1699 int cmp = tree_int_cst_compare (op1b, op2b);
1701 /* If we have (op1a == op1b), we should either be able to
1702 return that or FALSE, depending on whether the constant op1b
1703 also satisfies the other comparison against op2b. */
1704 if (code1 == EQ_EXPR)
1706 bool done = true;
1707 bool val;
1708 switch (code2)
1710 case EQ_EXPR: val = (cmp == 0); break;
1711 case NE_EXPR: val = (cmp != 0); break;
1712 case LT_EXPR: val = (cmp < 0); break;
1713 case GT_EXPR: val = (cmp > 0); break;
1714 case LE_EXPR: val = (cmp <= 0); break;
1715 case GE_EXPR: val = (cmp >= 0); break;
1716 default: done = false;
1718 if (done)
1720 if (val)
1721 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1722 else
1723 return boolean_false_node;
1726 /* Likewise if the second comparison is an == comparison. */
1727 else if (code2 == EQ_EXPR)
1729 bool done = true;
1730 bool val;
1731 switch (code1)
1733 case EQ_EXPR: val = (cmp == 0); break;
1734 case NE_EXPR: val = (cmp != 0); break;
1735 case LT_EXPR: val = (cmp > 0); break;
1736 case GT_EXPR: val = (cmp < 0); break;
1737 case LE_EXPR: val = (cmp >= 0); break;
1738 case GE_EXPR: val = (cmp <= 0); break;
1739 default: done = false;
1741 if (done)
1743 if (val)
1744 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1745 else
1746 return boolean_false_node;
1750 /* Same business with inequality tests. */
1751 else if (code1 == NE_EXPR)
1753 bool val;
1754 switch (code2)
1756 case EQ_EXPR: val = (cmp != 0); break;
1757 case NE_EXPR: val = (cmp == 0); break;
1758 case LT_EXPR: val = (cmp >= 0); break;
1759 case GT_EXPR: val = (cmp <= 0); break;
1760 case LE_EXPR: val = (cmp > 0); break;
1761 case GE_EXPR: val = (cmp < 0); break;
1762 default:
1763 val = false;
1765 if (val)
1766 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1768 else if (code2 == NE_EXPR)
1770 bool val;
1771 switch (code1)
1773 case EQ_EXPR: val = (cmp == 0); break;
1774 case NE_EXPR: val = (cmp != 0); break;
1775 case LT_EXPR: val = (cmp <= 0); break;
1776 case GT_EXPR: val = (cmp >= 0); break;
1777 case LE_EXPR: val = (cmp < 0); break;
1778 case GE_EXPR: val = (cmp > 0); break;
1779 default:
1780 val = false;
1782 if (val)
1783 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1786 /* Chose the more restrictive of two < or <= comparisons. */
1787 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1788 && (code2 == LT_EXPR || code2 == LE_EXPR))
1790 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1791 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1792 else
1793 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1796 /* Likewise chose the more restrictive of two > or >= comparisons. */
1797 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1798 && (code2 == GT_EXPR || code2 == GE_EXPR))
1800 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1801 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1802 else
1803 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1806 /* Check for singleton ranges. */
1807 else if (cmp == 0
1808 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1809 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1810 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1812 /* Check for disjoint ranges. */
1813 else if (cmp <= 0
1814 && (code1 == LT_EXPR || code1 == LE_EXPR)
1815 && (code2 == GT_EXPR || code2 == GE_EXPR))
1816 return boolean_false_node;
1817 else if (cmp >= 0
1818 && (code1 == GT_EXPR || code1 == GE_EXPR)
1819 && (code2 == LT_EXPR || code2 == LE_EXPR))
1820 return boolean_false_node;
1823 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1824 NAME's definition is a truth value. See if there are any simplifications
1825 that can be done against the NAME's definition. */
1826 if (TREE_CODE (op1a) == SSA_NAME
1827 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1828 && (integer_zerop (op1b) || integer_onep (op1b)))
1830 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1831 || (code1 == NE_EXPR && integer_onep (op1b)));
1832 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1833 switch (gimple_code (stmt))
1835 case GIMPLE_ASSIGN:
1836 /* Try to simplify by copy-propagating the definition. */
1837 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1839 case GIMPLE_PHI:
1840 /* If every argument to the PHI produces the same result when
1841 ANDed with the second comparison, we win.
1842 Do not do this unless the type is bool since we need a bool
1843 result here anyway. */
1844 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1846 tree result = NULL_TREE;
1847 unsigned i;
1848 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1850 tree arg = gimple_phi_arg_def (stmt, i);
1852 /* If this PHI has itself as an argument, ignore it.
1853 If all the other args produce the same result,
1854 we're still OK. */
1855 if (arg == gimple_phi_result (stmt))
1856 continue;
1857 else if (TREE_CODE (arg) == INTEGER_CST)
1859 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1861 if (!result)
1862 result = boolean_false_node;
1863 else if (!integer_zerop (result))
1864 return NULL_TREE;
1866 else if (!result)
1867 result = fold_build2 (code2, boolean_type_node,
1868 op2a, op2b);
1869 else if (!same_bool_comparison_p (result,
1870 code2, op2a, op2b))
1871 return NULL_TREE;
1873 else if (TREE_CODE (arg) == SSA_NAME
1874 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1876 tree temp;
1877 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1878 /* In simple cases we can look through PHI nodes,
1879 but we have to be careful with loops.
1880 See PR49073. */
1881 if (! dom_info_available_p (CDI_DOMINATORS)
1882 || gimple_bb (def_stmt) == gimple_bb (stmt)
1883 || dominated_by_p (CDI_DOMINATORS,
1884 gimple_bb (def_stmt),
1885 gimple_bb (stmt)))
1886 return NULL_TREE;
1887 temp = and_var_with_comparison (arg, invert, code2,
1888 op2a, op2b);
1889 if (!temp)
1890 return NULL_TREE;
1891 else if (!result)
1892 result = temp;
1893 else if (!same_bool_result_p (result, temp))
1894 return NULL_TREE;
1896 else
1897 return NULL_TREE;
1899 return result;
1902 default:
1903 break;
1906 return NULL_TREE;
1909 /* Try to simplify the AND of two comparisons, specified by
1910 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1911 If this can be simplified to a single expression (without requiring
1912 introducing more SSA variables to hold intermediate values),
1913 return the resulting tree. Otherwise return NULL_TREE.
1914 If the result expression is non-null, it has boolean type. */
1916 tree
1917 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1918 enum tree_code code2, tree op2a, tree op2b)
1920 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1921 if (t)
1922 return t;
1923 else
1924 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1927 /* Helper function for or_comparisons_1: try to simplify the OR of the
1928 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1929 If INVERT is true, invert the value of VAR before doing the OR.
1930 Return NULL_EXPR if we can't simplify this to a single expression. */
1932 static tree
1933 or_var_with_comparison (tree var, bool invert,
1934 enum tree_code code2, tree op2a, tree op2b)
1936 tree t;
1937 gimple stmt = SSA_NAME_DEF_STMT (var);
1939 /* We can only deal with variables whose definitions are assignments. */
1940 if (!is_gimple_assign (stmt))
1941 return NULL_TREE;
1943 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1944 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1945 Then we only have to consider the simpler non-inverted cases. */
1946 if (invert)
1947 t = and_var_with_comparison_1 (stmt,
1948 invert_tree_comparison (code2, false),
1949 op2a, op2b);
1950 else
1951 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1952 return canonicalize_bool (t, invert);
1955 /* Try to simplify the OR of the ssa variable defined by the assignment
1956 STMT with the comparison specified by (OP2A CODE2 OP2B).
1957 Return NULL_EXPR if we can't simplify this to a single expression. */
1959 static tree
1960 or_var_with_comparison_1 (gimple stmt,
1961 enum tree_code code2, tree op2a, tree op2b)
1963 tree var = gimple_assign_lhs (stmt);
1964 tree true_test_var = NULL_TREE;
1965 tree false_test_var = NULL_TREE;
1966 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1968 /* Check for identities like (var OR (var != 0)) => true . */
1969 if (TREE_CODE (op2a) == SSA_NAME
1970 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1972 if ((code2 == NE_EXPR && integer_zerop (op2b))
1973 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1975 true_test_var = op2a;
1976 if (var == true_test_var)
1977 return var;
1979 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1980 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1982 false_test_var = op2a;
1983 if (var == false_test_var)
1984 return boolean_true_node;
1988 /* If the definition is a comparison, recurse on it. */
1989 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1991 tree t = or_comparisons_1 (innercode,
1992 gimple_assign_rhs1 (stmt),
1993 gimple_assign_rhs2 (stmt),
1994 code2,
1995 op2a,
1996 op2b);
1997 if (t)
1998 return t;
2001 /* If the definition is an AND or OR expression, we may be able to
2002 simplify by reassociating. */
2003 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2004 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2006 tree inner1 = gimple_assign_rhs1 (stmt);
2007 tree inner2 = gimple_assign_rhs2 (stmt);
2008 gimple s;
2009 tree t;
2010 tree partial = NULL_TREE;
2011 bool is_or = (innercode == BIT_IOR_EXPR);
2013 /* Check for boolean identities that don't require recursive examination
2014 of inner1/inner2:
2015 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2016 inner1 OR (inner1 AND inner2) => inner1
2017 !inner1 OR (inner1 OR inner2) => true
2018 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2020 if (inner1 == true_test_var)
2021 return (is_or ? var : inner1);
2022 else if (inner2 == true_test_var)
2023 return (is_or ? var : inner2);
2024 else if (inner1 == false_test_var)
2025 return (is_or
2026 ? boolean_true_node
2027 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2028 else if (inner2 == false_test_var)
2029 return (is_or
2030 ? boolean_true_node
2031 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2033 /* Next, redistribute/reassociate the OR across the inner tests.
2034 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2035 if (TREE_CODE (inner1) == SSA_NAME
2036 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2037 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2038 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2039 gimple_assign_rhs1 (s),
2040 gimple_assign_rhs2 (s),
2041 code2, op2a, op2b)))
2043 /* Handle the OR case, where we are reassociating:
2044 (inner1 OR inner2) OR (op2a code2 op2b)
2045 => (t OR inner2)
2046 If the partial result t is a constant, we win. Otherwise
2047 continue on to try reassociating with the other inner test. */
2048 if (is_or)
2050 if (integer_onep (t))
2051 return boolean_true_node;
2052 else if (integer_zerop (t))
2053 return inner2;
2056 /* Handle the AND case, where we are redistributing:
2057 (inner1 AND inner2) OR (op2a code2 op2b)
2058 => (t AND (inner2 OR (op2a code op2b))) */
2059 else if (integer_zerop (t))
2060 return boolean_false_node;
2062 /* Save partial result for later. */
2063 partial = t;
2066 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2067 if (TREE_CODE (inner2) == SSA_NAME
2068 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2069 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2070 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2071 gimple_assign_rhs1 (s),
2072 gimple_assign_rhs2 (s),
2073 code2, op2a, op2b)))
2075 /* Handle the OR case, where we are reassociating:
2076 (inner1 OR inner2) OR (op2a code2 op2b)
2077 => (inner1 OR t)
2078 => (t OR partial) */
2079 if (is_or)
2081 if (integer_zerop (t))
2082 return inner1;
2083 else if (integer_onep (t))
2084 return boolean_true_node;
2085 /* If both are the same, we can apply the identity
2086 (x OR x) == x. */
2087 else if (partial && same_bool_result_p (t, partial))
2088 return t;
2091 /* Handle the AND case, where we are redistributing:
2092 (inner1 AND inner2) OR (op2a code2 op2b)
2093 => (t AND (inner1 OR (op2a code2 op2b)))
2094 => (t AND partial) */
2095 else
2097 if (integer_zerop (t))
2098 return boolean_false_node;
2099 else if (partial)
2101 /* We already got a simplification for the other
2102 operand to the redistributed AND expression. The
2103 interesting case is when at least one is true.
2104 Or, if both are the same, we can apply the identity
2105 (x AND x) == x. */
2106 if (integer_onep (partial))
2107 return t;
2108 else if (integer_onep (t))
2109 return partial;
2110 else if (same_bool_result_p (t, partial))
2111 return t;
2116 return NULL_TREE;
2119 /* Try to simplify the OR of two comparisons defined by
2120 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2121 If this can be done without constructing an intermediate value,
2122 return the resulting tree; otherwise NULL_TREE is returned.
2123 This function is deliberately asymmetric as it recurses on SSA_DEFs
2124 in the first comparison but not the second. */
2126 static tree
2127 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2128 enum tree_code code2, tree op2a, tree op2b)
2130 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2131 if (operand_equal_p (op1a, op2a, 0)
2132 && operand_equal_p (op1b, op2b, 0))
2134 /* Result will be either NULL_TREE, or a combined comparison. */
2135 tree t = combine_comparisons (UNKNOWN_LOCATION,
2136 TRUTH_ORIF_EXPR, code1, code2,
2137 boolean_type_node, op1a, op1b);
2138 if (t)
2139 return t;
2142 /* Likewise the swapped case of the above. */
2143 if (operand_equal_p (op1a, op2b, 0)
2144 && operand_equal_p (op1b, op2a, 0))
2146 /* Result will be either NULL_TREE, or a combined comparison. */
2147 tree t = combine_comparisons (UNKNOWN_LOCATION,
2148 TRUTH_ORIF_EXPR, code1,
2149 swap_tree_comparison (code2),
2150 boolean_type_node, op1a, op1b);
2151 if (t)
2152 return t;
2155 /* If both comparisons are of the same value against constants, we might
2156 be able to merge them. */
2157 if (operand_equal_p (op1a, op2a, 0)
2158 && TREE_CODE (op1b) == INTEGER_CST
2159 && TREE_CODE (op2b) == INTEGER_CST)
2161 int cmp = tree_int_cst_compare (op1b, op2b);
2163 /* If we have (op1a != op1b), we should either be able to
2164 return that or TRUE, depending on whether the constant op1b
2165 also satisfies the other comparison against op2b. */
2166 if (code1 == NE_EXPR)
2168 bool done = true;
2169 bool val;
2170 switch (code2)
2172 case EQ_EXPR: val = (cmp == 0); break;
2173 case NE_EXPR: val = (cmp != 0); break;
2174 case LT_EXPR: val = (cmp < 0); break;
2175 case GT_EXPR: val = (cmp > 0); break;
2176 case LE_EXPR: val = (cmp <= 0); break;
2177 case GE_EXPR: val = (cmp >= 0); break;
2178 default: done = false;
2180 if (done)
2182 if (val)
2183 return boolean_true_node;
2184 else
2185 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2188 /* Likewise if the second comparison is a != comparison. */
2189 else if (code2 == NE_EXPR)
2191 bool done = true;
2192 bool val;
2193 switch (code1)
2195 case EQ_EXPR: val = (cmp == 0); break;
2196 case NE_EXPR: val = (cmp != 0); break;
2197 case LT_EXPR: val = (cmp > 0); break;
2198 case GT_EXPR: val = (cmp < 0); break;
2199 case LE_EXPR: val = (cmp >= 0); break;
2200 case GE_EXPR: val = (cmp <= 0); break;
2201 default: done = false;
2203 if (done)
2205 if (val)
2206 return boolean_true_node;
2207 else
2208 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2212 /* See if an equality test is redundant with the other comparison. */
2213 else if (code1 == EQ_EXPR)
2215 bool val;
2216 switch (code2)
2218 case EQ_EXPR: val = (cmp == 0); break;
2219 case NE_EXPR: val = (cmp != 0); break;
2220 case LT_EXPR: val = (cmp < 0); break;
2221 case GT_EXPR: val = (cmp > 0); break;
2222 case LE_EXPR: val = (cmp <= 0); break;
2223 case GE_EXPR: val = (cmp >= 0); break;
2224 default:
2225 val = false;
2227 if (val)
2228 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2230 else if (code2 == EQ_EXPR)
2232 bool val;
2233 switch (code1)
2235 case EQ_EXPR: val = (cmp == 0); break;
2236 case NE_EXPR: val = (cmp != 0); break;
2237 case LT_EXPR: val = (cmp > 0); break;
2238 case GT_EXPR: val = (cmp < 0); break;
2239 case LE_EXPR: val = (cmp >= 0); break;
2240 case GE_EXPR: val = (cmp <= 0); break;
2241 default:
2242 val = false;
2244 if (val)
2245 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2248 /* Chose the less restrictive of two < or <= comparisons. */
2249 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2250 && (code2 == LT_EXPR || code2 == LE_EXPR))
2252 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2253 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2254 else
2255 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2258 /* Likewise chose the less restrictive of two > or >= comparisons. */
2259 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2260 && (code2 == GT_EXPR || code2 == GE_EXPR))
2262 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2263 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2264 else
2265 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2268 /* Check for singleton ranges. */
2269 else if (cmp == 0
2270 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2271 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2272 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2274 /* Check for less/greater pairs that don't restrict the range at all. */
2275 else if (cmp >= 0
2276 && (code1 == LT_EXPR || code1 == LE_EXPR)
2277 && (code2 == GT_EXPR || code2 == GE_EXPR))
2278 return boolean_true_node;
2279 else if (cmp <= 0
2280 && (code1 == GT_EXPR || code1 == GE_EXPR)
2281 && (code2 == LT_EXPR || code2 == LE_EXPR))
2282 return boolean_true_node;
2285 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2286 NAME's definition is a truth value. See if there are any simplifications
2287 that can be done against the NAME's definition. */
2288 if (TREE_CODE (op1a) == SSA_NAME
2289 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2290 && (integer_zerop (op1b) || integer_onep (op1b)))
2292 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2293 || (code1 == NE_EXPR && integer_onep (op1b)));
2294 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2295 switch (gimple_code (stmt))
2297 case GIMPLE_ASSIGN:
2298 /* Try to simplify by copy-propagating the definition. */
2299 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2301 case GIMPLE_PHI:
2302 /* If every argument to the PHI produces the same result when
2303 ORed with the second comparison, we win.
2304 Do not do this unless the type is bool since we need a bool
2305 result here anyway. */
2306 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2308 tree result = NULL_TREE;
2309 unsigned i;
2310 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2312 tree arg = gimple_phi_arg_def (stmt, i);
2314 /* If this PHI has itself as an argument, ignore it.
2315 If all the other args produce the same result,
2316 we're still OK. */
2317 if (arg == gimple_phi_result (stmt))
2318 continue;
2319 else if (TREE_CODE (arg) == INTEGER_CST)
2321 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2323 if (!result)
2324 result = boolean_true_node;
2325 else if (!integer_onep (result))
2326 return NULL_TREE;
2328 else if (!result)
2329 result = fold_build2 (code2, boolean_type_node,
2330 op2a, op2b);
2331 else if (!same_bool_comparison_p (result,
2332 code2, op2a, op2b))
2333 return NULL_TREE;
2335 else if (TREE_CODE (arg) == SSA_NAME
2336 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2338 tree temp;
2339 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2340 /* In simple cases we can look through PHI nodes,
2341 but we have to be careful with loops.
2342 See PR49073. */
2343 if (! dom_info_available_p (CDI_DOMINATORS)
2344 || gimple_bb (def_stmt) == gimple_bb (stmt)
2345 || dominated_by_p (CDI_DOMINATORS,
2346 gimple_bb (def_stmt),
2347 gimple_bb (stmt)))
2348 return NULL_TREE;
2349 temp = or_var_with_comparison (arg, invert, code2,
2350 op2a, op2b);
2351 if (!temp)
2352 return NULL_TREE;
2353 else if (!result)
2354 result = temp;
2355 else if (!same_bool_result_p (result, temp))
2356 return NULL_TREE;
2358 else
2359 return NULL_TREE;
2361 return result;
2364 default:
2365 break;
2368 return NULL_TREE;
2371 /* Try to simplify the OR of two comparisons, specified by
2372 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2373 If this can be simplified to a single expression (without requiring
2374 introducing more SSA variables to hold intermediate values),
2375 return the resulting tree. Otherwise return NULL_TREE.
2376 If the result expression is non-null, it has boolean type. */
2378 tree
2379 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2380 enum tree_code code2, tree op2a, tree op2b)
2382 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2383 if (t)
2384 return t;
2385 else
2386 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2390 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2392 Either NULL_TREE, a simplified but non-constant or a constant
2393 is returned.
2395 ??? This should go into a gimple-fold-inline.h file to be eventually
2396 privatized with the single valueize function used in the various TUs
2397 to avoid the indirect function call overhead. */
2399 tree
2400 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2402 location_t loc = gimple_location (stmt);
2403 switch (gimple_code (stmt))
2405 case GIMPLE_ASSIGN:
2407 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2409 switch (get_gimple_rhs_class (subcode))
2411 case GIMPLE_SINGLE_RHS:
2413 tree rhs = gimple_assign_rhs1 (stmt);
2414 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2416 if (TREE_CODE (rhs) == SSA_NAME)
2418 /* If the RHS is an SSA_NAME, return its known constant value,
2419 if any. */
2420 return (*valueize) (rhs);
2422 /* Handle propagating invariant addresses into address
2423 operations. */
2424 else if (TREE_CODE (rhs) == ADDR_EXPR
2425 && !is_gimple_min_invariant (rhs))
2427 HOST_WIDE_INT offset;
2428 tree base;
2429 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2430 &offset,
2431 valueize);
2432 if (base
2433 && (CONSTANT_CLASS_P (base)
2434 || decl_address_invariant_p (base)))
2435 return build_invariant_address (TREE_TYPE (rhs),
2436 base, offset);
2438 else if (TREE_CODE (rhs) == CONSTRUCTOR
2439 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2440 && (CONSTRUCTOR_NELTS (rhs)
2441 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2443 unsigned i;
2444 tree val, list;
2446 list = NULL_TREE;
2447 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2449 val = (*valueize) (val);
2450 if (TREE_CODE (val) == INTEGER_CST
2451 || TREE_CODE (val) == REAL_CST
2452 || TREE_CODE (val) == FIXED_CST)
2453 list = tree_cons (NULL_TREE, val, list);
2454 else
2455 return NULL_TREE;
2458 return build_vector (TREE_TYPE (rhs), nreverse (list));
2461 if (kind == tcc_reference)
2463 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2464 || TREE_CODE (rhs) == REALPART_EXPR
2465 || TREE_CODE (rhs) == IMAGPART_EXPR)
2466 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2468 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2469 return fold_unary_loc (EXPR_LOCATION (rhs),
2470 TREE_CODE (rhs),
2471 TREE_TYPE (rhs), val);
2473 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2474 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2476 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2477 return fold_ternary_loc (EXPR_LOCATION (rhs),
2478 TREE_CODE (rhs),
2479 TREE_TYPE (rhs), val,
2480 TREE_OPERAND (rhs, 1),
2481 TREE_OPERAND (rhs, 2));
2483 else if (TREE_CODE (rhs) == MEM_REF
2484 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2486 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2487 if (TREE_CODE (val) == ADDR_EXPR
2488 && is_gimple_min_invariant (val))
2490 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2491 unshare_expr (val),
2492 TREE_OPERAND (rhs, 1));
2493 if (tem)
2494 rhs = tem;
2497 return fold_const_aggregate_ref_1 (rhs, valueize);
2499 else if (kind == tcc_declaration)
2500 return get_symbol_constant_value (rhs);
2501 return rhs;
2504 case GIMPLE_UNARY_RHS:
2506 /* Handle unary operators that can appear in GIMPLE form.
2507 Note that we know the single operand must be a constant,
2508 so this should almost always return a simplified RHS. */
2509 tree lhs = gimple_assign_lhs (stmt);
2510 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2512 /* Conversions are useless for CCP purposes if they are
2513 value-preserving. Thus the restrictions that
2514 useless_type_conversion_p places for restrict qualification
2515 of pointer types should not apply here.
2516 Substitution later will only substitute to allowed places. */
2517 if (CONVERT_EXPR_CODE_P (subcode)
2518 && POINTER_TYPE_P (TREE_TYPE (lhs))
2519 && POINTER_TYPE_P (TREE_TYPE (op0))
2520 && (TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2521 == TYPE_ADDR_SPACE (TREE_TYPE (op0))))
2522 return op0;
2524 return
2525 fold_unary_ignore_overflow_loc (loc, subcode,
2526 gimple_expr_type (stmt), op0);
2529 case GIMPLE_BINARY_RHS:
2531 /* Handle binary operators that can appear in GIMPLE form. */
2532 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2533 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2535 /* Translate &x + CST into an invariant form suitable for
2536 further propagation. */
2537 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2538 && TREE_CODE (op0) == ADDR_EXPR
2539 && TREE_CODE (op1) == INTEGER_CST)
2541 tree off = fold_convert (ptr_type_node, op1);
2542 return build_fold_addr_expr_loc
2543 (loc,
2544 fold_build2 (MEM_REF,
2545 TREE_TYPE (TREE_TYPE (op0)),
2546 unshare_expr (op0), off));
2549 return fold_binary_loc (loc, subcode,
2550 gimple_expr_type (stmt), op0, op1);
2553 case GIMPLE_TERNARY_RHS:
2555 /* Handle ternary operators that can appear in GIMPLE form. */
2556 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2557 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2558 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2560 /* Fold embedded expressions in ternary codes. */
2561 if ((subcode == COND_EXPR
2562 || subcode == VEC_COND_EXPR)
2563 && COMPARISON_CLASS_P (op0))
2565 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2566 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2567 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2568 TREE_TYPE (op0), op00, op01);
2569 if (tem)
2570 op0 = tem;
2573 return fold_ternary_loc (loc, subcode,
2574 gimple_expr_type (stmt), op0, op1, op2);
2577 default:
2578 gcc_unreachable ();
2582 case GIMPLE_CALL:
2584 tree fn;
2586 if (gimple_call_internal_p (stmt))
2587 /* No folding yet for these functions. */
2588 return NULL_TREE;
2590 fn = (*valueize) (gimple_call_fn (stmt));
2591 if (TREE_CODE (fn) == ADDR_EXPR
2592 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2593 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2595 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2596 tree call, retval;
2597 unsigned i;
2598 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2599 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2600 call = build_call_array_loc (loc,
2601 gimple_call_return_type (stmt),
2602 fn, gimple_call_num_args (stmt), args);
2603 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2604 if (retval)
2605 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2606 STRIP_NOPS (retval);
2607 return retval;
2609 return NULL_TREE;
2612 default:
2613 return NULL_TREE;
2617 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2618 Returns NULL_TREE if folding to a constant is not possible, otherwise
2619 returns a constant according to is_gimple_min_invariant. */
2621 tree
2622 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2624 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2625 if (res && is_gimple_min_invariant (res))
2626 return res;
2627 return NULL_TREE;
2631 /* The following set of functions are supposed to fold references using
2632 their constant initializers. */
2634 static tree fold_ctor_reference (tree type, tree ctor,
2635 unsigned HOST_WIDE_INT offset,
2636 unsigned HOST_WIDE_INT size);
2638 /* See if we can find constructor defining value of BASE.
2639 When we know the consructor with constant offset (such as
2640 base is array[40] and we do know constructor of array), then
2641 BIT_OFFSET is adjusted accordingly.
2643 As a special case, return error_mark_node when constructor
2644 is not explicitly available, but it is known to be zero
2645 such as 'static const int a;'. */
2646 static tree
2647 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2648 tree (*valueize)(tree))
2650 HOST_WIDE_INT bit_offset2, size, max_size;
2651 if (TREE_CODE (base) == MEM_REF)
2653 if (!integer_zerop (TREE_OPERAND (base, 1)))
2655 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2656 return NULL_TREE;
2657 *bit_offset += (mem_ref_offset (base).low
2658 * BITS_PER_UNIT);
2661 if (valueize
2662 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2663 base = valueize (TREE_OPERAND (base, 0));
2664 if (!base || TREE_CODE (base) != ADDR_EXPR)
2665 return NULL_TREE;
2666 base = TREE_OPERAND (base, 0);
2669 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2670 DECL_INITIAL. If BASE is a nested reference into another
2671 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2672 the inner reference. */
2673 switch (TREE_CODE (base))
2675 case VAR_DECL:
2676 if (!const_value_known_p (base))
2677 return NULL_TREE;
2679 /* Fallthru. */
2680 case CONST_DECL:
2681 if (!DECL_INITIAL (base)
2682 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2683 return error_mark_node;
2684 return DECL_INITIAL (base);
2686 case ARRAY_REF:
2687 case COMPONENT_REF:
2688 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2689 if (max_size == -1 || size != max_size)
2690 return NULL_TREE;
2691 *bit_offset += bit_offset2;
2692 return get_base_constructor (base, bit_offset, valueize);
2694 case STRING_CST:
2695 case CONSTRUCTOR:
2696 return base;
2698 default:
2699 return NULL_TREE;
2703 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2704 to the memory at bit OFFSET.
2706 We do only simple job of folding byte accesses. */
2708 static tree
2709 fold_string_cst_ctor_reference (tree type, tree ctor,
2710 unsigned HOST_WIDE_INT offset,
2711 unsigned HOST_WIDE_INT size)
2713 if (INTEGRAL_TYPE_P (type)
2714 && (TYPE_MODE (type)
2715 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2716 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2717 == MODE_INT)
2718 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2719 && size == BITS_PER_UNIT
2720 && !(offset % BITS_PER_UNIT))
2722 offset /= BITS_PER_UNIT;
2723 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2724 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2725 [offset]));
2726 /* Folding
2727 const char a[20]="hello";
2728 return a[10];
2730 might lead to offset greater than string length. In this case we
2731 know value is either initialized to 0 or out of bounds. Return 0
2732 in both cases. */
2733 return build_zero_cst (type);
2735 return NULL_TREE;
2738 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2739 SIZE to the memory at bit OFFSET. */
2741 static tree
2742 fold_array_ctor_reference (tree type, tree ctor,
2743 unsigned HOST_WIDE_INT offset,
2744 unsigned HOST_WIDE_INT size)
2746 unsigned HOST_WIDE_INT cnt;
2747 tree cfield, cval;
2748 double_int low_bound, elt_size;
2749 double_int index, max_index;
2750 double_int access_index;
2751 tree domain_type = NULL_TREE;
2752 HOST_WIDE_INT inner_offset;
2754 /* Compute low bound and elt size. */
2755 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2756 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2757 if (domain_type && TYPE_MIN_VALUE (domain_type))
2759 /* Static constructors for variably sized objects makes no sense. */
2760 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2761 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2763 else
2764 low_bound = double_int_zero;
2765 /* Static constructors for variably sized objects makes no sense. */
2766 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2767 == INTEGER_CST);
2768 elt_size =
2769 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2772 /* We can handle only constantly sized accesses that are known to not
2773 be larger than size of array element. */
2774 if (!TYPE_SIZE_UNIT (type)
2775 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2776 || double_int_cmp (elt_size,
2777 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2778 return NULL_TREE;
2780 /* Compute the array index we look for. */
2781 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2782 elt_size, TRUNC_DIV_EXPR);
2783 access_index = double_int_add (access_index, low_bound);
2785 /* And offset within the access. */
2786 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2788 /* See if the array field is large enough to span whole access. We do not
2789 care to fold accesses spanning multiple array indexes. */
2790 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2791 return NULL_TREE;
2793 index = double_int_sub (low_bound, double_int_one);
2794 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2796 /* Array constructor might explicitely set index, or specify range
2797 or leave index NULL meaning that it is next index after previous
2798 one. */
2799 if (cfield)
2801 if (TREE_CODE (cfield) == INTEGER_CST)
2802 max_index = index = tree_to_double_int (cfield);
2803 else
2805 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2806 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2807 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2810 else
2811 max_index = index = double_int_add (index, double_int_one);
2813 /* Do we have match? */
2814 if (double_int_cmp (access_index, index, 1) >= 0
2815 && double_int_cmp (access_index, max_index, 1) <= 0)
2816 return fold_ctor_reference (type, cval, inner_offset, size);
2818 /* When memory is not explicitely mentioned in constructor,
2819 it is 0 (or out of range). */
2820 return build_zero_cst (type);
2823 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2824 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2826 static tree
2827 fold_nonarray_ctor_reference (tree type, tree ctor,
2828 unsigned HOST_WIDE_INT offset,
2829 unsigned HOST_WIDE_INT size)
2831 unsigned HOST_WIDE_INT cnt;
2832 tree cfield, cval;
2834 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2835 cval)
2837 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2838 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2839 tree field_size = DECL_SIZE (cfield);
2840 double_int bitoffset;
2841 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2842 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2843 double_int bitoffset_end, access_end;
2845 /* Variable sized objects in static constructors makes no sense,
2846 but field_size can be NULL for flexible array members. */
2847 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2848 && TREE_CODE (byte_offset) == INTEGER_CST
2849 && (field_size != NULL_TREE
2850 ? TREE_CODE (field_size) == INTEGER_CST
2851 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2853 /* Compute bit offset of the field. */
2854 bitoffset = double_int_add (tree_to_double_int (field_offset),
2855 double_int_mul (byte_offset_cst,
2856 bits_per_unit_cst));
2857 /* Compute bit offset where the field ends. */
2858 if (field_size != NULL_TREE)
2859 bitoffset_end = double_int_add (bitoffset,
2860 tree_to_double_int (field_size));
2861 else
2862 bitoffset_end = double_int_zero;
2864 access_end = double_int_add (uhwi_to_double_int (offset),
2865 uhwi_to_double_int (size));
2867 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2868 [BITOFFSET, BITOFFSET_END)? */
2869 if (double_int_cmp (access_end, bitoffset, 0) > 0
2870 && (field_size == NULL_TREE
2871 || double_int_cmp (uhwi_to_double_int (offset),
2872 bitoffset_end, 0) < 0))
2874 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2875 bitoffset);
2876 /* We do have overlap. Now see if field is large enough to
2877 cover the access. Give up for accesses spanning multiple
2878 fields. */
2879 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2880 return NULL_TREE;
2881 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2882 return NULL_TREE;
2883 return fold_ctor_reference (type, cval,
2884 double_int_to_uhwi (inner_offset), size);
2887 /* When memory is not explicitely mentioned in constructor, it is 0. */
2888 return build_zero_cst (type);
2891 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2892 to the memory at bit OFFSET. */
2894 static tree
2895 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2896 unsigned HOST_WIDE_INT size)
2898 tree ret;
2900 /* We found the field with exact match. */
2901 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2902 && !offset)
2903 return canonicalize_constructor_val (ctor);
2905 /* We are at the end of walk, see if we can view convert the
2906 result. */
2907 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2908 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2909 && operand_equal_p (TYPE_SIZE (type),
2910 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2912 ret = canonicalize_constructor_val (ctor);
2913 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2914 if (ret)
2915 STRIP_NOPS (ret);
2916 return ret;
2918 if (TREE_CODE (ctor) == STRING_CST)
2919 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2920 if (TREE_CODE (ctor) == CONSTRUCTOR)
2923 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2924 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2925 return fold_array_ctor_reference (type, ctor, offset, size);
2926 else
2927 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2930 return NULL_TREE;
2933 /* Return the tree representing the element referenced by T if T is an
2934 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2935 names using VALUEIZE. Return NULL_TREE otherwise. */
2937 tree
2938 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2940 tree ctor, idx, base;
2941 HOST_WIDE_INT offset, size, max_size;
2942 tree tem;
2944 if (TREE_THIS_VOLATILE (t))
2945 return NULL_TREE;
2947 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2948 return get_symbol_constant_value (t);
2950 tem = fold_read_from_constant_string (t);
2951 if (tem)
2952 return tem;
2954 switch (TREE_CODE (t))
2956 case ARRAY_REF:
2957 case ARRAY_RANGE_REF:
2958 /* Constant indexes are handled well by get_base_constructor.
2959 Only special case variable offsets.
2960 FIXME: This code can't handle nested references with variable indexes
2961 (they will be handled only by iteration of ccp). Perhaps we can bring
2962 get_ref_base_and_extent here and make it use a valueize callback. */
2963 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2964 && valueize
2965 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2966 && host_integerp (idx, 0))
2968 tree low_bound, unit_size;
2970 /* If the resulting bit-offset is constant, track it. */
2971 if ((low_bound = array_ref_low_bound (t),
2972 host_integerp (low_bound, 0))
2973 && (unit_size = array_ref_element_size (t),
2974 host_integerp (unit_size, 1)))
2976 offset = TREE_INT_CST_LOW (idx);
2977 offset -= TREE_INT_CST_LOW (low_bound);
2978 offset *= TREE_INT_CST_LOW (unit_size);
2979 offset *= BITS_PER_UNIT;
2981 base = TREE_OPERAND (t, 0);
2982 ctor = get_base_constructor (base, &offset, valueize);
2983 /* Empty constructor. Always fold to 0. */
2984 if (ctor == error_mark_node)
2985 return build_zero_cst (TREE_TYPE (t));
2986 /* Out of bound array access. Value is undefined,
2987 but don't fold. */
2988 if (offset < 0)
2989 return NULL_TREE;
2990 /* We can not determine ctor. */
2991 if (!ctor)
2992 return NULL_TREE;
2993 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
2994 TREE_INT_CST_LOW (unit_size)
2995 * BITS_PER_UNIT);
2998 /* Fallthru. */
3000 case COMPONENT_REF:
3001 case BIT_FIELD_REF:
3002 case TARGET_MEM_REF:
3003 case MEM_REF:
3004 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3005 ctor = get_base_constructor (base, &offset, valueize);
3007 /* Empty constructor. Always fold to 0. */
3008 if (ctor == error_mark_node)
3009 return build_zero_cst (TREE_TYPE (t));
3010 /* We do not know precise address. */
3011 if (max_size == -1 || max_size != size)
3012 return NULL_TREE;
3013 /* We can not determine ctor. */
3014 if (!ctor)
3015 return NULL_TREE;
3017 /* Out of bound array access. Value is undefined, but don't fold. */
3018 if (offset < 0)
3019 return NULL_TREE;
3021 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3023 case REALPART_EXPR:
3024 case IMAGPART_EXPR:
3026 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3027 if (c && TREE_CODE (c) == COMPLEX_CST)
3028 return fold_build1_loc (EXPR_LOCATION (t),
3029 TREE_CODE (t), TREE_TYPE (t), c);
3030 break;
3033 default:
3034 break;
3037 return NULL_TREE;
3040 tree
3041 fold_const_aggregate_ref (tree t)
3043 return fold_const_aggregate_ref_1 (t, NULL);
3046 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3047 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3048 KNOWN_BINFO carries the binfo describing the true type of
3049 OBJ_TYPE_REF_OBJECT(REF). */
3051 tree
3052 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3054 unsigned HOST_WIDE_INT offset, size;
3055 tree v, fn;
3057 v = BINFO_VTABLE (known_binfo);
3058 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3059 if (!v)
3060 return NULL_TREE;
3062 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3064 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3065 v = TREE_OPERAND (v, 0);
3067 else
3068 offset = 0;
3070 if (TREE_CODE (v) != ADDR_EXPR)
3071 return NULL_TREE;
3072 v = TREE_OPERAND (v, 0);
3074 if (TREE_CODE (v) != VAR_DECL
3075 || !DECL_VIRTUAL_P (v)
3076 || !DECL_INITIAL (v)
3077 || DECL_INITIAL (v) == error_mark_node)
3078 return NULL_TREE;
3079 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3080 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3081 offset += token * size;
3082 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3083 offset, size);
3084 if (!fn)
3085 return NULL_TREE;
3086 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3087 || TREE_CODE (fn) == FDESC_EXPR);
3088 fn = TREE_OPERAND (fn, 0);
3089 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3091 /* When cgraph node is missing and function is not public, we cannot
3092 devirtualize. This can happen in WHOPR when the actual method
3093 ends up in other partition, because we found devirtualization
3094 possibility too late. */
3095 if (!can_refer_decl_in_current_unit_p (fn))
3096 return NULL_TREE;
3098 return fn;
3101 /* Return true iff VAL is a gimple expression that is known to be
3102 non-negative. Restricted to floating-point inputs. */
3104 bool
3105 gimple_val_nonnegative_real_p (tree val)
3107 gimple def_stmt;
3109 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3111 /* Use existing logic for non-gimple trees. */
3112 if (tree_expr_nonnegative_p (val))
3113 return true;
3115 if (TREE_CODE (val) != SSA_NAME)
3116 return false;
3118 /* Currently we look only at the immediately defining statement
3119 to make this determination, since recursion on defining
3120 statements of operands can lead to quadratic behavior in the
3121 worst case. This is expected to catch almost all occurrences
3122 in practice. It would be possible to implement limited-depth
3123 recursion if important cases are lost. Alternatively, passes
3124 that need this information (such as the pow/powi lowering code
3125 in the cse_sincos pass) could be revised to provide it through
3126 dataflow propagation. */
3128 def_stmt = SSA_NAME_DEF_STMT (val);
3130 if (is_gimple_assign (def_stmt))
3132 tree op0, op1;
3134 /* See fold-const.c:tree_expr_nonnegative_p for additional
3135 cases that could be handled with recursion. */
3137 switch (gimple_assign_rhs_code (def_stmt))
3139 case ABS_EXPR:
3140 /* Always true for floating-point operands. */
3141 return true;
3143 case MULT_EXPR:
3144 /* True if the two operands are identical (since we are
3145 restricted to floating-point inputs). */
3146 op0 = gimple_assign_rhs1 (def_stmt);
3147 op1 = gimple_assign_rhs2 (def_stmt);
3149 if (op0 == op1
3150 || operand_equal_p (op0, op1, 0))
3151 return true;
3153 default:
3154 return false;
3157 else if (is_gimple_call (def_stmt))
3159 tree fndecl = gimple_call_fndecl (def_stmt);
3160 if (fndecl
3161 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3163 tree arg1;
3165 switch (DECL_FUNCTION_CODE (fndecl))
3167 CASE_FLT_FN (BUILT_IN_ACOS):
3168 CASE_FLT_FN (BUILT_IN_ACOSH):
3169 CASE_FLT_FN (BUILT_IN_CABS):
3170 CASE_FLT_FN (BUILT_IN_COSH):
3171 CASE_FLT_FN (BUILT_IN_ERFC):
3172 CASE_FLT_FN (BUILT_IN_EXP):
3173 CASE_FLT_FN (BUILT_IN_EXP10):
3174 CASE_FLT_FN (BUILT_IN_EXP2):
3175 CASE_FLT_FN (BUILT_IN_FABS):
3176 CASE_FLT_FN (BUILT_IN_FDIM):
3177 CASE_FLT_FN (BUILT_IN_HYPOT):
3178 CASE_FLT_FN (BUILT_IN_POW10):
3179 return true;
3181 CASE_FLT_FN (BUILT_IN_SQRT):
3182 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3183 nonnegative inputs. */
3184 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3185 return true;
3187 break;
3189 CASE_FLT_FN (BUILT_IN_POWI):
3190 /* True if the second argument is an even integer. */
3191 arg1 = gimple_call_arg (def_stmt, 1);
3193 if (TREE_CODE (arg1) == INTEGER_CST
3194 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3195 return true;
3197 break;
3199 CASE_FLT_FN (BUILT_IN_POW):
3200 /* True if the second argument is an even integer-valued
3201 real. */
3202 arg1 = gimple_call_arg (def_stmt, 1);
3204 if (TREE_CODE (arg1) == REAL_CST)
3206 REAL_VALUE_TYPE c;
3207 HOST_WIDE_INT n;
3209 c = TREE_REAL_CST (arg1);
3210 n = real_to_integer (&c);
3212 if ((n & 1) == 0)
3214 REAL_VALUE_TYPE cint;
3215 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3216 if (real_identical (&c, &cint))
3217 return true;
3221 break;
3223 default:
3224 return false;
3229 return false;