re PR c++/44524 (improve diagnostic for . vs -> typo)
[official-gcc.git] / gcc / gimple-fold.c
blob7df7e694aa2b9c980f32a01d53aa70823312a134
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
103 else if (TREE_CODE (decl) == VAR_DECL)
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
109 return true;
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
115 tree
116 canonicalize_constructor_val (tree cval)
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
127 ptr,
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
131 if (TREE_CODE (cval) == ADDR_EXPR)
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
135 if (base
136 && (TREE_CODE (base) == VAR_DECL
137 || TREE_CODE (base) == FUNCTION_DECL)
138 && !can_refer_decl_in_current_unit_p (base))
139 return NULL_TREE;
140 if (cfun && base && TREE_CODE (base) == VAR_DECL)
141 add_referenced_var (base);
142 /* Fixup types in global initializers. */
143 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
144 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
146 return cval;
149 /* If SYM is a constant variable with known value, return the value.
150 NULL_TREE is returned otherwise. */
152 tree
153 get_symbol_constant_value (tree sym)
155 if (const_value_known_p (sym))
157 tree val = DECL_INITIAL (sym);
158 if (val)
160 val = canonicalize_constructor_val (val);
161 if (val && is_gimple_min_invariant (val))
162 return val;
163 else
164 return NULL_TREE;
166 /* Variables declared 'const' without an initializer
167 have zero as the initializer if they may not be
168 overridden at link or run time. */
169 if (!val
170 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
171 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
172 return build_zero_cst (TREE_TYPE (sym));
175 return NULL_TREE;
180 /* Subroutine of fold_stmt. We perform several simplifications of the
181 memory reference tree EXPR and make sure to re-gimplify them properly
182 after propagation of constant addresses. IS_LHS is true if the
183 reference is supposed to be an lvalue. */
185 static tree
186 maybe_fold_reference (tree expr, bool is_lhs)
188 tree *t = &expr;
189 tree result;
191 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
192 || TREE_CODE (expr) == REALPART_EXPR
193 || TREE_CODE (expr) == IMAGPART_EXPR)
194 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
195 return fold_unary_loc (EXPR_LOCATION (expr),
196 TREE_CODE (expr),
197 TREE_TYPE (expr),
198 TREE_OPERAND (expr, 0));
199 else if (TREE_CODE (expr) == BIT_FIELD_REF
200 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
201 return fold_ternary_loc (EXPR_LOCATION (expr),
202 TREE_CODE (expr),
203 TREE_TYPE (expr),
204 TREE_OPERAND (expr, 0),
205 TREE_OPERAND (expr, 1),
206 TREE_OPERAND (expr, 2));
208 while (handled_component_p (*t))
209 t = &TREE_OPERAND (*t, 0);
211 /* Canonicalize MEM_REFs invariant address operand. Do this first
212 to avoid feeding non-canonical MEM_REFs elsewhere. */
213 if (TREE_CODE (*t) == MEM_REF
214 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
216 bool volatile_p = TREE_THIS_VOLATILE (*t);
217 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
218 TREE_OPERAND (*t, 0),
219 TREE_OPERAND (*t, 1));
220 if (tem)
222 TREE_THIS_VOLATILE (tem) = volatile_p;
223 *t = tem;
224 tem = maybe_fold_reference (expr, is_lhs);
225 if (tem)
226 return tem;
227 return expr;
231 if (!is_lhs
232 && (result = fold_const_aggregate_ref (expr))
233 && is_gimple_min_invariant (result))
234 return result;
236 /* Fold back MEM_REFs to reference trees. */
237 if (TREE_CODE (*t) == MEM_REF
238 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
239 && integer_zerop (TREE_OPERAND (*t, 1))
240 && (TREE_THIS_VOLATILE (*t)
241 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
242 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
243 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
244 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
245 /* We have to look out here to not drop a required conversion
246 from the rhs to the lhs if is_lhs, but we don't have the
247 rhs here to verify that. Thus require strict type
248 compatibility. */
249 && types_compatible_p (TREE_TYPE (*t),
250 TREE_TYPE (TREE_OPERAND
251 (TREE_OPERAND (*t, 0), 0))))
253 tree tem;
254 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
255 tem = maybe_fold_reference (expr, is_lhs);
256 if (tem)
257 return tem;
258 return expr;
260 else if (TREE_CODE (*t) == TARGET_MEM_REF)
262 tree tem = maybe_fold_tmr (*t);
263 if (tem)
265 *t = tem;
266 tem = maybe_fold_reference (expr, is_lhs);
267 if (tem)
268 return tem;
269 return expr;
273 return NULL_TREE;
277 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
278 replacement rhs for the statement or NULL_TREE if no simplification
279 could be made. It is assumed that the operands have been previously
280 folded. */
282 static tree
283 fold_gimple_assign (gimple_stmt_iterator *si)
285 gimple stmt = gsi_stmt (*si);
286 enum tree_code subcode = gimple_assign_rhs_code (stmt);
287 location_t loc = gimple_location (stmt);
289 tree result = NULL_TREE;
291 switch (get_gimple_rhs_class (subcode))
293 case GIMPLE_SINGLE_RHS:
295 tree rhs = gimple_assign_rhs1 (stmt);
297 if (REFERENCE_CLASS_P (rhs))
298 return maybe_fold_reference (rhs, false);
300 else if (TREE_CODE (rhs) == ADDR_EXPR)
302 tree ref = TREE_OPERAND (rhs, 0);
303 tree tem = maybe_fold_reference (ref, true);
304 if (tem
305 && TREE_CODE (tem) == MEM_REF
306 && integer_zerop (TREE_OPERAND (tem, 1)))
307 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
308 else if (tem)
309 result = fold_convert (TREE_TYPE (rhs),
310 build_fold_addr_expr_loc (loc, tem));
311 else if (TREE_CODE (ref) == MEM_REF
312 && integer_zerop (TREE_OPERAND (ref, 1)))
313 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
316 else if (TREE_CODE (rhs) == CONSTRUCTOR
317 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
318 && (CONSTRUCTOR_NELTS (rhs)
319 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
321 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
322 unsigned i;
323 tree val;
325 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
326 if (TREE_CODE (val) != INTEGER_CST
327 && TREE_CODE (val) != REAL_CST
328 && TREE_CODE (val) != FIXED_CST)
329 return NULL_TREE;
331 return build_vector_from_ctor (TREE_TYPE (rhs),
332 CONSTRUCTOR_ELTS (rhs));
335 else if (DECL_P (rhs))
336 return unshare_expr (get_symbol_constant_value (rhs));
338 /* If we couldn't fold the RHS, hand over to the generic
339 fold routines. */
340 if (result == NULL_TREE)
341 result = fold (rhs);
343 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
344 that may have been added by fold, and "useless" type
345 conversions that might now be apparent due to propagation. */
346 STRIP_USELESS_TYPE_CONVERSION (result);
348 if (result != rhs && valid_gimple_rhs_p (result))
349 return result;
351 return NULL_TREE;
353 break;
355 case GIMPLE_UNARY_RHS:
357 tree rhs = gimple_assign_rhs1 (stmt);
359 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
360 if (result)
362 /* If the operation was a conversion do _not_ mark a
363 resulting constant with TREE_OVERFLOW if the original
364 constant was not. These conversions have implementation
365 defined behavior and retaining the TREE_OVERFLOW flag
366 here would confuse later passes such as VRP. */
367 if (CONVERT_EXPR_CODE_P (subcode)
368 && TREE_CODE (result) == INTEGER_CST
369 && TREE_CODE (rhs) == INTEGER_CST)
370 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
372 STRIP_USELESS_TYPE_CONVERSION (result);
373 if (valid_gimple_rhs_p (result))
374 return result;
377 break;
379 case GIMPLE_BINARY_RHS:
380 /* Try to canonicalize for boolean-typed X the comparisons
381 X == 0, X == 1, X != 0, and X != 1. */
382 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
383 || gimple_assign_rhs_code (stmt) == NE_EXPR)
385 tree lhs = gimple_assign_lhs (stmt);
386 tree op1 = gimple_assign_rhs1 (stmt);
387 tree op2 = gimple_assign_rhs2 (stmt);
388 tree type = TREE_TYPE (op1);
390 /* Check whether the comparison operands are of the same boolean
391 type as the result type is.
392 Check that second operand is an integer-constant with value
393 one or zero. */
394 if (TREE_CODE (op2) == INTEGER_CST
395 && (integer_zerop (op2) || integer_onep (op2))
396 && useless_type_conversion_p (TREE_TYPE (lhs), type))
398 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
399 bool is_logical_not = false;
401 /* X == 0 and X != 1 is a logical-not.of X
402 X == 1 and X != 0 is X */
403 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
404 || (cmp_code == NE_EXPR && integer_onep (op2)))
405 is_logical_not = true;
407 if (is_logical_not == false)
408 result = op1;
409 /* Only for one-bit precision typed X the transformation
410 !X -> ~X is valied. */
411 else if (TYPE_PRECISION (type) == 1)
412 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
413 type, op1);
414 /* Otherwise we use !X -> X ^ 1. */
415 else
416 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
417 type, op1, build_int_cst (type, 1));
422 if (!result)
423 result = fold_binary_loc (loc, subcode,
424 TREE_TYPE (gimple_assign_lhs (stmt)),
425 gimple_assign_rhs1 (stmt),
426 gimple_assign_rhs2 (stmt));
428 if (result)
430 STRIP_USELESS_TYPE_CONVERSION (result);
431 if (valid_gimple_rhs_p (result))
432 return result;
434 break;
436 case GIMPLE_TERNARY_RHS:
437 /* Try to fold a conditional expression. */
438 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
440 tree op0 = gimple_assign_rhs1 (stmt);
441 tree tem;
442 bool set = false;
443 location_t cond_loc = gimple_location (stmt);
445 if (COMPARISON_CLASS_P (op0))
447 fold_defer_overflow_warnings ();
448 tem = fold_binary_loc (cond_loc,
449 TREE_CODE (op0), TREE_TYPE (op0),
450 TREE_OPERAND (op0, 0),
451 TREE_OPERAND (op0, 1));
452 /* This is actually a conditional expression, not a GIMPLE
453 conditional statement, however, the valid_gimple_rhs_p
454 test still applies. */
455 set = (tem && is_gimple_condexpr (tem)
456 && valid_gimple_rhs_p (tem));
457 fold_undefer_overflow_warnings (set, stmt, 0);
459 else if (is_gimple_min_invariant (op0))
461 tem = op0;
462 set = true;
464 else
465 return NULL_TREE;
467 if (set)
468 result = fold_build3_loc (cond_loc, COND_EXPR,
469 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
470 gimple_assign_rhs2 (stmt),
471 gimple_assign_rhs3 (stmt));
474 if (!result)
475 result = fold_ternary_loc (loc, subcode,
476 TREE_TYPE (gimple_assign_lhs (stmt)),
477 gimple_assign_rhs1 (stmt),
478 gimple_assign_rhs2 (stmt),
479 gimple_assign_rhs3 (stmt));
481 if (result)
483 STRIP_USELESS_TYPE_CONVERSION (result);
484 if (valid_gimple_rhs_p (result))
485 return result;
487 break;
489 case GIMPLE_INVALID_RHS:
490 gcc_unreachable ();
493 return NULL_TREE;
496 /* Attempt to fold a conditional statement. Return true if any changes were
497 made. We only attempt to fold the condition expression, and do not perform
498 any transformation that would require alteration of the cfg. It is
499 assumed that the operands have been previously folded. */
501 static bool
502 fold_gimple_cond (gimple stmt)
504 tree result = fold_binary_loc (gimple_location (stmt),
505 gimple_cond_code (stmt),
506 boolean_type_node,
507 gimple_cond_lhs (stmt),
508 gimple_cond_rhs (stmt));
510 if (result)
512 STRIP_USELESS_TYPE_CONVERSION (result);
513 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
515 gimple_cond_set_condition_from_tree (stmt, result);
516 return true;
520 return false;
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
533 void
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
536 tree lhs;
537 tree tmp = NULL_TREE; /* Silence warning. */
538 gimple stmt, new_stmt;
539 gimple_stmt_iterator i;
540 gimple_seq stmts = gimple_seq_alloc();
541 struct gimplify_ctx gctx;
542 gimple last = NULL;
543 gimple laststore = NULL;
544 tree reaching_vuse;
546 stmt = gsi_stmt (*si_p);
548 gcc_assert (is_gimple_call (stmt));
550 lhs = gimple_call_lhs (stmt);
551 reaching_vuse = gimple_vuse (stmt);
553 push_gimplify_context (&gctx);
554 gctx.into_ssa = gimple_in_ssa_p (cfun);
556 if (lhs == NULL_TREE)
558 gimplify_and_add (expr, &stmts);
559 /* We can end up with folding a memcpy of an empty class assignment
560 which gets optimized away by C++ gimplification. */
561 if (gimple_seq_empty_p (stmts))
563 pop_gimplify_context (NULL);
564 if (gimple_in_ssa_p (cfun))
566 unlink_stmt_vdef (stmt);
567 release_defs (stmt);
569 gsi_remove (si_p, true);
570 return;
573 else
574 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
576 pop_gimplify_context (NULL);
578 if (gimple_has_location (stmt))
579 annotate_all_with_location (stmts, gimple_location (stmt));
581 /* The replacement can expose previously unreferenced variables. */
582 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
584 if (last)
586 gsi_insert_before (si_p, last, GSI_NEW_STMT);
587 gsi_next (si_p);
589 new_stmt = gsi_stmt (i);
590 if (gimple_in_ssa_p (cfun))
591 find_new_referenced_vars (new_stmt);
592 /* If the new statement possibly has a VUSE, update it with exact SSA
593 name we know will reach this one. */
594 if (gimple_has_mem_ops (new_stmt))
596 /* If we've also seen a previous store create a new VDEF for
597 the latter one, and make that the new reaching VUSE. */
598 if (laststore)
600 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
601 gimple_set_vdef (laststore, reaching_vuse);
602 update_stmt (laststore);
603 laststore = NULL;
605 gimple_set_vuse (new_stmt, reaching_vuse);
606 gimple_set_modified (new_stmt, true);
608 if (gimple_assign_single_p (new_stmt)
609 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
611 laststore = new_stmt;
613 last = new_stmt;
616 if (lhs == NULL_TREE)
618 /* If we replace a call without LHS that has a VDEF and our new
619 sequence ends with a store we must make that store have the same
620 vdef in order not to break the sequencing. This can happen
621 for instance when folding memcpy calls into assignments. */
622 if (gimple_vdef (stmt) && laststore)
624 gimple_set_vdef (laststore, gimple_vdef (stmt));
625 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
626 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
627 update_stmt (laststore);
629 else if (gimple_in_ssa_p (cfun))
631 unlink_stmt_vdef (stmt);
632 release_defs (stmt);
634 new_stmt = last;
636 else
638 if (last)
640 gsi_insert_before (si_p, last, GSI_NEW_STMT);
641 gsi_next (si_p);
643 if (laststore && is_gimple_reg (lhs))
645 gimple_set_vdef (laststore, gimple_vdef (stmt));
646 update_stmt (laststore);
647 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
648 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
649 laststore = NULL;
651 else if (laststore)
653 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
654 gimple_set_vdef (laststore, reaching_vuse);
655 update_stmt (laststore);
656 laststore = NULL;
658 new_stmt = gimple_build_assign (lhs, tmp);
659 if (!is_gimple_reg (tmp))
660 gimple_set_vuse (new_stmt, reaching_vuse);
661 if (!is_gimple_reg (lhs))
663 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
664 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
665 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
667 else if (reaching_vuse == gimple_vuse (stmt))
668 unlink_stmt_vdef (stmt);
671 gimple_set_location (new_stmt, gimple_location (stmt));
672 gsi_replace (si_p, new_stmt, false);
675 /* Return the string length, maximum string length or maximum value of
676 ARG in LENGTH.
677 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
678 is not NULL and, for TYPE == 0, its value is not equal to the length
679 we determine or if we are unable to determine the length or value,
680 return false. VISITED is a bitmap of visited variables.
681 TYPE is 0 if string length should be returned, 1 for maximum string
682 length and 2 for maximum value ARG can have. */
684 static bool
685 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
687 tree var, val;
688 gimple def_stmt;
690 if (TREE_CODE (arg) != SSA_NAME)
692 if (TREE_CODE (arg) == COND_EXPR)
693 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
694 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
695 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
696 else if (TREE_CODE (arg) == ADDR_EXPR
697 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
698 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
700 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
701 if (TREE_CODE (aop0) == INDIRECT_REF
702 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
703 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
704 length, visited, type);
707 if (type == 2)
709 val = arg;
710 if (TREE_CODE (val) != INTEGER_CST
711 || tree_int_cst_sgn (val) < 0)
712 return false;
714 else
715 val = c_strlen (arg, 1);
716 if (!val)
717 return false;
719 if (*length)
721 if (type > 0)
723 if (TREE_CODE (*length) != INTEGER_CST
724 || TREE_CODE (val) != INTEGER_CST)
725 return false;
727 if (tree_int_cst_lt (*length, val))
728 *length = val;
729 return true;
731 else if (simple_cst_equal (val, *length) != 1)
732 return false;
735 *length = val;
736 return true;
739 /* If we were already here, break the infinite cycle. */
740 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
741 return true;
743 var = arg;
744 def_stmt = SSA_NAME_DEF_STMT (var);
746 switch (gimple_code (def_stmt))
748 case GIMPLE_ASSIGN:
749 /* The RHS of the statement defining VAR must either have a
750 constant length or come from another SSA_NAME with a constant
751 length. */
752 if (gimple_assign_single_p (def_stmt)
753 || gimple_assign_unary_nop_p (def_stmt))
755 tree rhs = gimple_assign_rhs1 (def_stmt);
756 return get_maxval_strlen (rhs, length, visited, type);
758 return false;
760 case GIMPLE_PHI:
762 /* All the arguments of the PHI node must have the same constant
763 length. */
764 unsigned i;
766 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
768 tree arg = gimple_phi_arg (def_stmt, i)->def;
770 /* If this PHI has itself as an argument, we cannot
771 determine the string length of this argument. However,
772 if we can find a constant string length for the other
773 PHI args then we can still be sure that this is a
774 constant string length. So be optimistic and just
775 continue with the next argument. */
776 if (arg == gimple_phi_result (def_stmt))
777 continue;
779 if (!get_maxval_strlen (arg, length, visited, type))
780 return false;
783 return true;
785 default:
786 return false;
791 /* Fold builtin call in statement STMT. Returns a simplified tree.
792 We may return a non-constant expression, including another call
793 to a different function and with different arguments, e.g.,
794 substituting memcpy for strcpy when the string length is known.
795 Note that some builtins expand into inline code that may not
796 be valid in GIMPLE. Callers must take care. */
798 tree
799 gimple_fold_builtin (gimple stmt)
801 tree result, val[3];
802 tree callee, a;
803 int arg_idx, type;
804 bitmap visited;
805 bool ignore;
806 int nargs;
807 location_t loc = gimple_location (stmt);
809 gcc_assert (is_gimple_call (stmt));
811 ignore = (gimple_call_lhs (stmt) == NULL);
813 /* First try the generic builtin folder. If that succeeds, return the
814 result directly. */
815 result = fold_call_stmt (stmt, ignore);
816 if (result)
818 if (ignore)
819 STRIP_NOPS (result);
820 return result;
823 /* Ignore MD builtins. */
824 callee = gimple_call_fndecl (stmt);
825 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
826 return NULL_TREE;
828 /* Give up for always_inline inline builtins until they are
829 inlined. */
830 if (avoid_folding_inline_builtin (callee))
831 return NULL_TREE;
833 /* If the builtin could not be folded, and it has no argument list,
834 we're done. */
835 nargs = gimple_call_num_args (stmt);
836 if (nargs == 0)
837 return NULL_TREE;
839 /* Limit the work only for builtins we know how to simplify. */
840 switch (DECL_FUNCTION_CODE (callee))
842 case BUILT_IN_STRLEN:
843 case BUILT_IN_FPUTS:
844 case BUILT_IN_FPUTS_UNLOCKED:
845 arg_idx = 0;
846 type = 0;
847 break;
848 case BUILT_IN_STRCPY:
849 case BUILT_IN_STRNCPY:
850 arg_idx = 1;
851 type = 0;
852 break;
853 case BUILT_IN_MEMCPY_CHK:
854 case BUILT_IN_MEMPCPY_CHK:
855 case BUILT_IN_MEMMOVE_CHK:
856 case BUILT_IN_MEMSET_CHK:
857 case BUILT_IN_STRNCPY_CHK:
858 arg_idx = 2;
859 type = 2;
860 break;
861 case BUILT_IN_STRCPY_CHK:
862 case BUILT_IN_STPCPY_CHK:
863 arg_idx = 1;
864 type = 1;
865 break;
866 case BUILT_IN_SNPRINTF_CHK:
867 case BUILT_IN_VSNPRINTF_CHK:
868 arg_idx = 1;
869 type = 2;
870 break;
871 default:
872 return NULL_TREE;
875 if (arg_idx >= nargs)
876 return NULL_TREE;
878 /* Try to use the dataflow information gathered by the CCP process. */
879 visited = BITMAP_ALLOC (NULL);
880 bitmap_clear (visited);
882 memset (val, 0, sizeof (val));
883 a = gimple_call_arg (stmt, arg_idx);
884 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
885 val[arg_idx] = NULL_TREE;
887 BITMAP_FREE (visited);
889 result = NULL_TREE;
890 switch (DECL_FUNCTION_CODE (callee))
892 case BUILT_IN_STRLEN:
893 if (val[0] && nargs == 1)
895 tree new_val =
896 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
898 /* If the result is not a valid gimple value, or not a cast
899 of a valid gimple value, then we cannot use the result. */
900 if (is_gimple_val (new_val)
901 || (CONVERT_EXPR_P (new_val)
902 && is_gimple_val (TREE_OPERAND (new_val, 0))))
903 return new_val;
905 break;
907 case BUILT_IN_STRCPY:
908 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
909 result = fold_builtin_strcpy (loc, callee,
910 gimple_call_arg (stmt, 0),
911 gimple_call_arg (stmt, 1),
912 val[1]);
913 break;
915 case BUILT_IN_STRNCPY:
916 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
917 result = fold_builtin_strncpy (loc, callee,
918 gimple_call_arg (stmt, 0),
919 gimple_call_arg (stmt, 1),
920 gimple_call_arg (stmt, 2),
921 val[1]);
922 break;
924 case BUILT_IN_FPUTS:
925 if (nargs == 2)
926 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
927 gimple_call_arg (stmt, 1),
928 ignore, false, val[0]);
929 break;
931 case BUILT_IN_FPUTS_UNLOCKED:
932 if (nargs == 2)
933 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
934 gimple_call_arg (stmt, 1),
935 ignore, true, val[0]);
936 break;
938 case BUILT_IN_MEMCPY_CHK:
939 case BUILT_IN_MEMPCPY_CHK:
940 case BUILT_IN_MEMMOVE_CHK:
941 case BUILT_IN_MEMSET_CHK:
942 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
943 result = fold_builtin_memory_chk (loc, callee,
944 gimple_call_arg (stmt, 0),
945 gimple_call_arg (stmt, 1),
946 gimple_call_arg (stmt, 2),
947 gimple_call_arg (stmt, 3),
948 val[2], ignore,
949 DECL_FUNCTION_CODE (callee));
950 break;
952 case BUILT_IN_STRCPY_CHK:
953 case BUILT_IN_STPCPY_CHK:
954 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
955 result = fold_builtin_stxcpy_chk (loc, callee,
956 gimple_call_arg (stmt, 0),
957 gimple_call_arg (stmt, 1),
958 gimple_call_arg (stmt, 2),
959 val[1], ignore,
960 DECL_FUNCTION_CODE (callee));
961 break;
963 case BUILT_IN_STRNCPY_CHK:
964 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
965 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
966 gimple_call_arg (stmt, 1),
967 gimple_call_arg (stmt, 2),
968 gimple_call_arg (stmt, 3),
969 val[2]);
970 break;
972 case BUILT_IN_SNPRINTF_CHK:
973 case BUILT_IN_VSNPRINTF_CHK:
974 if (val[1] && is_gimple_val (val[1]))
975 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
976 DECL_FUNCTION_CODE (callee));
977 break;
979 default:
980 gcc_unreachable ();
983 if (result && ignore)
984 result = fold_ignored_result (result);
985 return result;
988 /* Generate code adjusting the first parameter of a call statement determined
989 by GSI by DELTA. */
991 void
992 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
994 gimple call_stmt = gsi_stmt (*gsi);
995 tree parm, tmp;
996 gimple new_stmt;
998 delta = convert_to_ptrofftype (delta);
999 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1000 parm = gimple_call_arg (call_stmt, 0);
1001 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1002 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1003 add_referenced_var (tmp);
1005 tmp = make_ssa_name (tmp, NULL);
1006 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1007 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1008 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1009 gimple_call_set_arg (call_stmt, 0, tmp);
1012 /* Return a binfo to be used for devirtualization of calls based on an object
1013 represented by a declaration (i.e. a global or automatically allocated one)
1014 or NULL if it cannot be found or is not safe. CST is expected to be an
1015 ADDR_EXPR of such object or the function will return NULL. Currently it is
1016 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1018 tree
1019 gimple_extract_devirt_binfo_from_cst (tree cst)
1021 HOST_WIDE_INT offset, size, max_size;
1022 tree base, type, expected_type, binfo;
1023 bool last_artificial = false;
1025 if (!flag_devirtualize
1026 || TREE_CODE (cst) != ADDR_EXPR
1027 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1028 return NULL_TREE;
1030 cst = TREE_OPERAND (cst, 0);
1031 expected_type = TREE_TYPE (cst);
1032 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1033 type = TREE_TYPE (base);
1034 if (!DECL_P (base)
1035 || max_size == -1
1036 || max_size != size
1037 || TREE_CODE (type) != RECORD_TYPE)
1038 return NULL_TREE;
1040 /* Find the sub-object the constant actually refers to and mark whether it is
1041 an artificial one (as opposed to a user-defined one). */
1042 while (true)
1044 HOST_WIDE_INT pos, size;
1045 tree fld;
1047 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1048 break;
1049 if (offset < 0)
1050 return NULL_TREE;
1052 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1054 if (TREE_CODE (fld) != FIELD_DECL)
1055 continue;
1057 pos = int_bit_position (fld);
1058 size = tree_low_cst (DECL_SIZE (fld), 1);
1059 if (pos <= offset && (pos + size) > offset)
1060 break;
1062 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1063 return NULL_TREE;
1065 last_artificial = DECL_ARTIFICIAL (fld);
1066 type = TREE_TYPE (fld);
1067 offset -= pos;
1069 /* Artifical sub-objects are ancestors, we do not want to use them for
1070 devirtualization, at least not here. */
1071 if (last_artificial)
1072 return NULL_TREE;
1073 binfo = TYPE_BINFO (type);
1074 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1075 return NULL_TREE;
1076 else
1077 return binfo;
1080 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1081 The statement may be replaced by another statement, e.g., if the call
1082 simplifies to a constant value. Return true if any changes were made.
1083 It is assumed that the operands have been previously folded. */
1085 bool
1086 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1088 gimple stmt = gsi_stmt (*gsi);
1089 tree callee;
1091 /* Check for builtins that CCP can handle using information not
1092 available in the generic fold routines. */
1093 callee = gimple_call_fndecl (stmt);
1094 if (!inplace && callee && DECL_BUILT_IN (callee))
1096 tree result = gimple_fold_builtin (stmt);
1098 if (result)
1100 if (!update_call_from_tree (gsi, result))
1101 gimplify_and_update_call_from_tree (gsi, result);
1102 return true;
1106 /* Check for virtual calls that became direct calls. */
1107 callee = gimple_call_fn (stmt);
1108 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1110 tree binfo, fndecl, obj;
1111 HOST_WIDE_INT token;
1113 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1115 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1116 return true;
1119 obj = OBJ_TYPE_REF_OBJECT (callee);
1120 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1121 if (!binfo)
1122 return false;
1123 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1124 fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1125 if (!fndecl)
1126 return false;
1127 gimple_call_set_fndecl (stmt, fndecl);
1128 return true;
1131 return false;
1134 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1135 distinguishes both cases. */
1137 static bool
1138 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1140 bool changed = false;
1141 gimple stmt = gsi_stmt (*gsi);
1142 unsigned i;
1143 gimple_stmt_iterator gsinext = *gsi;
1144 gimple next_stmt;
1146 gsi_next (&gsinext);
1147 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1149 /* Fold the main computation performed by the statement. */
1150 switch (gimple_code (stmt))
1152 case GIMPLE_ASSIGN:
1154 unsigned old_num_ops = gimple_num_ops (stmt);
1155 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1156 tree lhs = gimple_assign_lhs (stmt);
1157 tree new_rhs;
1158 /* First canonicalize operand order. This avoids building new
1159 trees if this is the only thing fold would later do. */
1160 if ((commutative_tree_code (subcode)
1161 || commutative_ternary_tree_code (subcode))
1162 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1163 gimple_assign_rhs2 (stmt), false))
1165 tree tem = gimple_assign_rhs1 (stmt);
1166 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1167 gimple_assign_set_rhs2 (stmt, tem);
1168 changed = true;
1170 new_rhs = fold_gimple_assign (gsi);
1171 if (new_rhs
1172 && !useless_type_conversion_p (TREE_TYPE (lhs),
1173 TREE_TYPE (new_rhs)))
1174 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1175 if (new_rhs
1176 && (!inplace
1177 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1179 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1180 changed = true;
1182 break;
1185 case GIMPLE_COND:
1186 changed |= fold_gimple_cond (stmt);
1187 break;
1189 case GIMPLE_CALL:
1190 /* Fold *& in call arguments. */
1191 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1192 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1194 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1195 if (tmp)
1197 gimple_call_set_arg (stmt, i, tmp);
1198 changed = true;
1201 changed |= gimple_fold_call (gsi, inplace);
1202 break;
1204 case GIMPLE_ASM:
1205 /* Fold *& in asm operands. */
1207 size_t noutputs;
1208 const char **oconstraints;
1209 const char *constraint;
1210 bool allows_mem, allows_reg;
1212 noutputs = gimple_asm_noutputs (stmt);
1213 oconstraints = XALLOCAVEC (const char *, noutputs);
1215 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1217 tree link = gimple_asm_output_op (stmt, i);
1218 tree op = TREE_VALUE (link);
1219 oconstraints[i]
1220 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1221 if (REFERENCE_CLASS_P (op)
1222 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1224 TREE_VALUE (link) = op;
1225 changed = true;
1228 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1230 tree link = gimple_asm_input_op (stmt, i);
1231 tree op = TREE_VALUE (link);
1232 constraint
1233 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1234 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1235 oconstraints, &allows_mem, &allows_reg);
1236 if (REFERENCE_CLASS_P (op)
1237 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1238 != NULL_TREE)
1240 TREE_VALUE (link) = op;
1241 changed = true;
1245 break;
1247 case GIMPLE_DEBUG:
1248 if (gimple_debug_bind_p (stmt))
1250 tree val = gimple_debug_bind_get_value (stmt);
1251 if (val
1252 && REFERENCE_CLASS_P (val))
1254 tree tem = maybe_fold_reference (val, false);
1255 if (tem)
1257 gimple_debug_bind_set_value (stmt, tem);
1258 changed = true;
1262 break;
1264 default:;
1267 /* If stmt folds into nothing and it was the last stmt in a bb,
1268 don't call gsi_stmt. */
1269 if (gsi_end_p (*gsi))
1271 gcc_assert (next_stmt == NULL);
1272 return changed;
1275 stmt = gsi_stmt (*gsi);
1277 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1278 as we'd changing the next stmt. */
1279 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1281 tree lhs = gimple_get_lhs (stmt);
1282 if (lhs && REFERENCE_CLASS_P (lhs))
1284 tree new_lhs = maybe_fold_reference (lhs, true);
1285 if (new_lhs)
1287 gimple_set_lhs (stmt, new_lhs);
1288 changed = true;
1293 return changed;
1296 /* Fold the statement pointed to by GSI. In some cases, this function may
1297 replace the whole statement with a new one. Returns true iff folding
1298 makes any changes.
1299 The statement pointed to by GSI should be in valid gimple form but may
1300 be in unfolded state as resulting from for example constant propagation
1301 which can produce *&x = 0. */
1303 bool
1304 fold_stmt (gimple_stmt_iterator *gsi)
1306 return fold_stmt_1 (gsi, false);
1309 /* Perform the minimal folding on statement *GSI. Only operations like
1310 *&x created by constant propagation are handled. The statement cannot
1311 be replaced with a new one. Return true if the statement was
1312 changed, false otherwise.
1313 The statement *GSI should be in valid gimple form but may
1314 be in unfolded state as resulting from for example constant propagation
1315 which can produce *&x = 0. */
1317 bool
1318 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1320 gimple stmt = gsi_stmt (*gsi);
1321 bool changed = fold_stmt_1 (gsi, true);
1322 gcc_assert (gsi_stmt (*gsi) == stmt);
1323 return changed;
1326 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1327 if EXPR is null or we don't know how.
1328 If non-null, the result always has boolean type. */
1330 static tree
1331 canonicalize_bool (tree expr, bool invert)
1333 if (!expr)
1334 return NULL_TREE;
1335 else if (invert)
1337 if (integer_nonzerop (expr))
1338 return boolean_false_node;
1339 else if (integer_zerop (expr))
1340 return boolean_true_node;
1341 else if (TREE_CODE (expr) == SSA_NAME)
1342 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1343 build_int_cst (TREE_TYPE (expr), 0));
1344 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1345 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1346 boolean_type_node,
1347 TREE_OPERAND (expr, 0),
1348 TREE_OPERAND (expr, 1));
1349 else
1350 return NULL_TREE;
1352 else
1354 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1355 return expr;
1356 if (integer_nonzerop (expr))
1357 return boolean_true_node;
1358 else if (integer_zerop (expr))
1359 return boolean_false_node;
1360 else if (TREE_CODE (expr) == SSA_NAME)
1361 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1362 build_int_cst (TREE_TYPE (expr), 0));
1363 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1364 return fold_build2 (TREE_CODE (expr),
1365 boolean_type_node,
1366 TREE_OPERAND (expr, 0),
1367 TREE_OPERAND (expr, 1));
1368 else
1369 return NULL_TREE;
1373 /* Check to see if a boolean expression EXPR is logically equivalent to the
1374 comparison (OP1 CODE OP2). Check for various identities involving
1375 SSA_NAMEs. */
1377 static bool
1378 same_bool_comparison_p (const_tree expr, enum tree_code code,
1379 const_tree op1, const_tree op2)
1381 gimple s;
1383 /* The obvious case. */
1384 if (TREE_CODE (expr) == code
1385 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1386 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1387 return true;
1389 /* Check for comparing (name, name != 0) and the case where expr
1390 is an SSA_NAME with a definition matching the comparison. */
1391 if (TREE_CODE (expr) == SSA_NAME
1392 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1394 if (operand_equal_p (expr, op1, 0))
1395 return ((code == NE_EXPR && integer_zerop (op2))
1396 || (code == EQ_EXPR && integer_nonzerop (op2)));
1397 s = SSA_NAME_DEF_STMT (expr);
1398 if (is_gimple_assign (s)
1399 && gimple_assign_rhs_code (s) == code
1400 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1401 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1402 return true;
1405 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1406 of name is a comparison, recurse. */
1407 if (TREE_CODE (op1) == SSA_NAME
1408 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1410 s = SSA_NAME_DEF_STMT (op1);
1411 if (is_gimple_assign (s)
1412 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1414 enum tree_code c = gimple_assign_rhs_code (s);
1415 if ((c == NE_EXPR && integer_zerop (op2))
1416 || (c == EQ_EXPR && integer_nonzerop (op2)))
1417 return same_bool_comparison_p (expr, c,
1418 gimple_assign_rhs1 (s),
1419 gimple_assign_rhs2 (s));
1420 if ((c == EQ_EXPR && integer_zerop (op2))
1421 || (c == NE_EXPR && integer_nonzerop (op2)))
1422 return same_bool_comparison_p (expr,
1423 invert_tree_comparison (c, false),
1424 gimple_assign_rhs1 (s),
1425 gimple_assign_rhs2 (s));
1428 return false;
1431 /* Check to see if two boolean expressions OP1 and OP2 are logically
1432 equivalent. */
1434 static bool
1435 same_bool_result_p (const_tree op1, const_tree op2)
1437 /* Simple cases first. */
1438 if (operand_equal_p (op1, op2, 0))
1439 return true;
1441 /* Check the cases where at least one of the operands is a comparison.
1442 These are a bit smarter than operand_equal_p in that they apply some
1443 identifies on SSA_NAMEs. */
1444 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1445 && same_bool_comparison_p (op1, TREE_CODE (op2),
1446 TREE_OPERAND (op2, 0),
1447 TREE_OPERAND (op2, 1)))
1448 return true;
1449 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1450 && same_bool_comparison_p (op2, TREE_CODE (op1),
1451 TREE_OPERAND (op1, 0),
1452 TREE_OPERAND (op1, 1)))
1453 return true;
1455 /* Default case. */
1456 return false;
1459 /* Forward declarations for some mutually recursive functions. */
1461 static tree
1462 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1463 enum tree_code code2, tree op2a, tree op2b);
1464 static tree
1465 and_var_with_comparison (tree var, bool invert,
1466 enum tree_code code2, tree op2a, tree op2b);
1467 static tree
1468 and_var_with_comparison_1 (gimple stmt,
1469 enum tree_code code2, tree op2a, tree op2b);
1470 static tree
1471 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1472 enum tree_code code2, tree op2a, tree op2b);
1473 static tree
1474 or_var_with_comparison (tree var, bool invert,
1475 enum tree_code code2, tree op2a, tree op2b);
1476 static tree
1477 or_var_with_comparison_1 (gimple stmt,
1478 enum tree_code code2, tree op2a, tree op2b);
1480 /* Helper function for and_comparisons_1: try to simplify the AND of the
1481 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1482 If INVERT is true, invert the value of the VAR before doing the AND.
1483 Return NULL_EXPR if we can't simplify this to a single expression. */
1485 static tree
1486 and_var_with_comparison (tree var, bool invert,
1487 enum tree_code code2, tree op2a, tree op2b)
1489 tree t;
1490 gimple stmt = SSA_NAME_DEF_STMT (var);
1492 /* We can only deal with variables whose definitions are assignments. */
1493 if (!is_gimple_assign (stmt))
1494 return NULL_TREE;
1496 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1497 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1498 Then we only have to consider the simpler non-inverted cases. */
1499 if (invert)
1500 t = or_var_with_comparison_1 (stmt,
1501 invert_tree_comparison (code2, false),
1502 op2a, op2b);
1503 else
1504 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1505 return canonicalize_bool (t, invert);
1508 /* Try to simplify the AND of the ssa variable defined by the assignment
1509 STMT with the comparison specified by (OP2A CODE2 OP2B).
1510 Return NULL_EXPR if we can't simplify this to a single expression. */
1512 static tree
1513 and_var_with_comparison_1 (gimple stmt,
1514 enum tree_code code2, tree op2a, tree op2b)
1516 tree var = gimple_assign_lhs (stmt);
1517 tree true_test_var = NULL_TREE;
1518 tree false_test_var = NULL_TREE;
1519 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1521 /* Check for identities like (var AND (var == 0)) => false. */
1522 if (TREE_CODE (op2a) == SSA_NAME
1523 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1525 if ((code2 == NE_EXPR && integer_zerop (op2b))
1526 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1528 true_test_var = op2a;
1529 if (var == true_test_var)
1530 return var;
1532 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1533 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1535 false_test_var = op2a;
1536 if (var == false_test_var)
1537 return boolean_false_node;
1541 /* If the definition is a comparison, recurse on it. */
1542 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1544 tree t = and_comparisons_1 (innercode,
1545 gimple_assign_rhs1 (stmt),
1546 gimple_assign_rhs2 (stmt),
1547 code2,
1548 op2a,
1549 op2b);
1550 if (t)
1551 return t;
1554 /* If the definition is an AND or OR expression, we may be able to
1555 simplify by reassociating. */
1556 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1557 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1559 tree inner1 = gimple_assign_rhs1 (stmt);
1560 tree inner2 = gimple_assign_rhs2 (stmt);
1561 gimple s;
1562 tree t;
1563 tree partial = NULL_TREE;
1564 bool is_and = (innercode == BIT_AND_EXPR);
1566 /* Check for boolean identities that don't require recursive examination
1567 of inner1/inner2:
1568 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1569 inner1 AND (inner1 OR inner2) => inner1
1570 !inner1 AND (inner1 AND inner2) => false
1571 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1572 Likewise for similar cases involving inner2. */
1573 if (inner1 == true_test_var)
1574 return (is_and ? var : inner1);
1575 else if (inner2 == true_test_var)
1576 return (is_and ? var : inner2);
1577 else if (inner1 == false_test_var)
1578 return (is_and
1579 ? boolean_false_node
1580 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1581 else if (inner2 == false_test_var)
1582 return (is_and
1583 ? boolean_false_node
1584 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1586 /* Next, redistribute/reassociate the AND across the inner tests.
1587 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1588 if (TREE_CODE (inner1) == SSA_NAME
1589 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1590 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1591 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1592 gimple_assign_rhs1 (s),
1593 gimple_assign_rhs2 (s),
1594 code2, op2a, op2b)))
1596 /* Handle the AND case, where we are reassociating:
1597 (inner1 AND inner2) AND (op2a code2 op2b)
1598 => (t AND inner2)
1599 If the partial result t is a constant, we win. Otherwise
1600 continue on to try reassociating with the other inner test. */
1601 if (is_and)
1603 if (integer_onep (t))
1604 return inner2;
1605 else if (integer_zerop (t))
1606 return boolean_false_node;
1609 /* Handle the OR case, where we are redistributing:
1610 (inner1 OR inner2) AND (op2a code2 op2b)
1611 => (t OR (inner2 AND (op2a code2 op2b))) */
1612 else if (integer_onep (t))
1613 return boolean_true_node;
1615 /* Save partial result for later. */
1616 partial = t;
1619 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1620 if (TREE_CODE (inner2) == SSA_NAME
1621 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1622 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1623 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1624 gimple_assign_rhs1 (s),
1625 gimple_assign_rhs2 (s),
1626 code2, op2a, op2b)))
1628 /* Handle the AND case, where we are reassociating:
1629 (inner1 AND inner2) AND (op2a code2 op2b)
1630 => (inner1 AND t) */
1631 if (is_and)
1633 if (integer_onep (t))
1634 return inner1;
1635 else if (integer_zerop (t))
1636 return boolean_false_node;
1637 /* If both are the same, we can apply the identity
1638 (x AND x) == x. */
1639 else if (partial && same_bool_result_p (t, partial))
1640 return t;
1643 /* Handle the OR case. where we are redistributing:
1644 (inner1 OR inner2) AND (op2a code2 op2b)
1645 => (t OR (inner1 AND (op2a code2 op2b)))
1646 => (t OR partial) */
1647 else
1649 if (integer_onep (t))
1650 return boolean_true_node;
1651 else if (partial)
1653 /* We already got a simplification for the other
1654 operand to the redistributed OR expression. The
1655 interesting case is when at least one is false.
1656 Or, if both are the same, we can apply the identity
1657 (x OR x) == x. */
1658 if (integer_zerop (partial))
1659 return t;
1660 else if (integer_zerop (t))
1661 return partial;
1662 else if (same_bool_result_p (t, partial))
1663 return t;
1668 return NULL_TREE;
1671 /* Try to simplify the AND of two comparisons defined by
1672 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1673 If this can be done without constructing an intermediate value,
1674 return the resulting tree; otherwise NULL_TREE is returned.
1675 This function is deliberately asymmetric as it recurses on SSA_DEFs
1676 in the first comparison but not the second. */
1678 static tree
1679 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1680 enum tree_code code2, tree op2a, tree op2b)
1682 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1683 if (operand_equal_p (op1a, op2a, 0)
1684 && operand_equal_p (op1b, op2b, 0))
1686 /* Result will be either NULL_TREE, or a combined comparison. */
1687 tree t = combine_comparisons (UNKNOWN_LOCATION,
1688 TRUTH_ANDIF_EXPR, code1, code2,
1689 boolean_type_node, op1a, op1b);
1690 if (t)
1691 return t;
1694 /* Likewise the swapped case of the above. */
1695 if (operand_equal_p (op1a, op2b, 0)
1696 && operand_equal_p (op1b, op2a, 0))
1698 /* Result will be either NULL_TREE, or a combined comparison. */
1699 tree t = combine_comparisons (UNKNOWN_LOCATION,
1700 TRUTH_ANDIF_EXPR, code1,
1701 swap_tree_comparison (code2),
1702 boolean_type_node, op1a, op1b);
1703 if (t)
1704 return t;
1707 /* If both comparisons are of the same value against constants, we might
1708 be able to merge them. */
1709 if (operand_equal_p (op1a, op2a, 0)
1710 && TREE_CODE (op1b) == INTEGER_CST
1711 && TREE_CODE (op2b) == INTEGER_CST)
1713 int cmp = tree_int_cst_compare (op1b, op2b);
1715 /* If we have (op1a == op1b), we should either be able to
1716 return that or FALSE, depending on whether the constant op1b
1717 also satisfies the other comparison against op2b. */
1718 if (code1 == EQ_EXPR)
1720 bool done = true;
1721 bool val;
1722 switch (code2)
1724 case EQ_EXPR: val = (cmp == 0); break;
1725 case NE_EXPR: val = (cmp != 0); break;
1726 case LT_EXPR: val = (cmp < 0); break;
1727 case GT_EXPR: val = (cmp > 0); break;
1728 case LE_EXPR: val = (cmp <= 0); break;
1729 case GE_EXPR: val = (cmp >= 0); break;
1730 default: done = false;
1732 if (done)
1734 if (val)
1735 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1736 else
1737 return boolean_false_node;
1740 /* Likewise if the second comparison is an == comparison. */
1741 else if (code2 == EQ_EXPR)
1743 bool done = true;
1744 bool val;
1745 switch (code1)
1747 case EQ_EXPR: val = (cmp == 0); break;
1748 case NE_EXPR: val = (cmp != 0); break;
1749 case LT_EXPR: val = (cmp > 0); break;
1750 case GT_EXPR: val = (cmp < 0); break;
1751 case LE_EXPR: val = (cmp >= 0); break;
1752 case GE_EXPR: val = (cmp <= 0); break;
1753 default: done = false;
1755 if (done)
1757 if (val)
1758 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1759 else
1760 return boolean_false_node;
1764 /* Same business with inequality tests. */
1765 else if (code1 == NE_EXPR)
1767 bool val;
1768 switch (code2)
1770 case EQ_EXPR: val = (cmp != 0); break;
1771 case NE_EXPR: val = (cmp == 0); break;
1772 case LT_EXPR: val = (cmp >= 0); break;
1773 case GT_EXPR: val = (cmp <= 0); break;
1774 case LE_EXPR: val = (cmp > 0); break;
1775 case GE_EXPR: val = (cmp < 0); break;
1776 default:
1777 val = false;
1779 if (val)
1780 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1782 else if (code2 == NE_EXPR)
1784 bool val;
1785 switch (code1)
1787 case EQ_EXPR: val = (cmp == 0); break;
1788 case NE_EXPR: val = (cmp != 0); break;
1789 case LT_EXPR: val = (cmp <= 0); break;
1790 case GT_EXPR: val = (cmp >= 0); break;
1791 case LE_EXPR: val = (cmp < 0); break;
1792 case GE_EXPR: val = (cmp > 0); break;
1793 default:
1794 val = false;
1796 if (val)
1797 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1800 /* Chose the more restrictive of two < or <= comparisons. */
1801 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1802 && (code2 == LT_EXPR || code2 == LE_EXPR))
1804 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1805 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1806 else
1807 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1810 /* Likewise chose the more restrictive of two > or >= comparisons. */
1811 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1812 && (code2 == GT_EXPR || code2 == GE_EXPR))
1814 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1815 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1816 else
1817 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1820 /* Check for singleton ranges. */
1821 else if (cmp == 0
1822 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1823 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1824 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1826 /* Check for disjoint ranges. */
1827 else if (cmp <= 0
1828 && (code1 == LT_EXPR || code1 == LE_EXPR)
1829 && (code2 == GT_EXPR || code2 == GE_EXPR))
1830 return boolean_false_node;
1831 else if (cmp >= 0
1832 && (code1 == GT_EXPR || code1 == GE_EXPR)
1833 && (code2 == LT_EXPR || code2 == LE_EXPR))
1834 return boolean_false_node;
1837 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1838 NAME's definition is a truth value. See if there are any simplifications
1839 that can be done against the NAME's definition. */
1840 if (TREE_CODE (op1a) == SSA_NAME
1841 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1842 && (integer_zerop (op1b) || integer_onep (op1b)))
1844 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1845 || (code1 == NE_EXPR && integer_onep (op1b)));
1846 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1847 switch (gimple_code (stmt))
1849 case GIMPLE_ASSIGN:
1850 /* Try to simplify by copy-propagating the definition. */
1851 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1853 case GIMPLE_PHI:
1854 /* If every argument to the PHI produces the same result when
1855 ANDed with the second comparison, we win.
1856 Do not do this unless the type is bool since we need a bool
1857 result here anyway. */
1858 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1860 tree result = NULL_TREE;
1861 unsigned i;
1862 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1864 tree arg = gimple_phi_arg_def (stmt, i);
1866 /* If this PHI has itself as an argument, ignore it.
1867 If all the other args produce the same result,
1868 we're still OK. */
1869 if (arg == gimple_phi_result (stmt))
1870 continue;
1871 else if (TREE_CODE (arg) == INTEGER_CST)
1873 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1875 if (!result)
1876 result = boolean_false_node;
1877 else if (!integer_zerop (result))
1878 return NULL_TREE;
1880 else if (!result)
1881 result = fold_build2 (code2, boolean_type_node,
1882 op2a, op2b);
1883 else if (!same_bool_comparison_p (result,
1884 code2, op2a, op2b))
1885 return NULL_TREE;
1887 else if (TREE_CODE (arg) == SSA_NAME
1888 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1890 tree temp;
1891 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1892 /* In simple cases we can look through PHI nodes,
1893 but we have to be careful with loops.
1894 See PR49073. */
1895 if (! dom_info_available_p (CDI_DOMINATORS)
1896 || gimple_bb (def_stmt) == gimple_bb (stmt)
1897 || dominated_by_p (CDI_DOMINATORS,
1898 gimple_bb (def_stmt),
1899 gimple_bb (stmt)))
1900 return NULL_TREE;
1901 temp = and_var_with_comparison (arg, invert, code2,
1902 op2a, op2b);
1903 if (!temp)
1904 return NULL_TREE;
1905 else if (!result)
1906 result = temp;
1907 else if (!same_bool_result_p (result, temp))
1908 return NULL_TREE;
1910 else
1911 return NULL_TREE;
1913 return result;
1916 default:
1917 break;
1920 return NULL_TREE;
1923 /* Try to simplify the AND of two comparisons, specified by
1924 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1925 If this can be simplified to a single expression (without requiring
1926 introducing more SSA variables to hold intermediate values),
1927 return the resulting tree. Otherwise return NULL_TREE.
1928 If the result expression is non-null, it has boolean type. */
1930 tree
1931 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1932 enum tree_code code2, tree op2a, tree op2b)
1934 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1935 if (t)
1936 return t;
1937 else
1938 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1941 /* Helper function for or_comparisons_1: try to simplify the OR of the
1942 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1943 If INVERT is true, invert the value of VAR before doing the OR.
1944 Return NULL_EXPR if we can't simplify this to a single expression. */
1946 static tree
1947 or_var_with_comparison (tree var, bool invert,
1948 enum tree_code code2, tree op2a, tree op2b)
1950 tree t;
1951 gimple stmt = SSA_NAME_DEF_STMT (var);
1953 /* We can only deal with variables whose definitions are assignments. */
1954 if (!is_gimple_assign (stmt))
1955 return NULL_TREE;
1957 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1958 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1959 Then we only have to consider the simpler non-inverted cases. */
1960 if (invert)
1961 t = and_var_with_comparison_1 (stmt,
1962 invert_tree_comparison (code2, false),
1963 op2a, op2b);
1964 else
1965 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1966 return canonicalize_bool (t, invert);
1969 /* Try to simplify the OR of the ssa variable defined by the assignment
1970 STMT with the comparison specified by (OP2A CODE2 OP2B).
1971 Return NULL_EXPR if we can't simplify this to a single expression. */
1973 static tree
1974 or_var_with_comparison_1 (gimple stmt,
1975 enum tree_code code2, tree op2a, tree op2b)
1977 tree var = gimple_assign_lhs (stmt);
1978 tree true_test_var = NULL_TREE;
1979 tree false_test_var = NULL_TREE;
1980 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1982 /* Check for identities like (var OR (var != 0)) => true . */
1983 if (TREE_CODE (op2a) == SSA_NAME
1984 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1986 if ((code2 == NE_EXPR && integer_zerop (op2b))
1987 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1989 true_test_var = op2a;
1990 if (var == true_test_var)
1991 return var;
1993 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1994 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1996 false_test_var = op2a;
1997 if (var == false_test_var)
1998 return boolean_true_node;
2002 /* If the definition is a comparison, recurse on it. */
2003 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2005 tree t = or_comparisons_1 (innercode,
2006 gimple_assign_rhs1 (stmt),
2007 gimple_assign_rhs2 (stmt),
2008 code2,
2009 op2a,
2010 op2b);
2011 if (t)
2012 return t;
2015 /* If the definition is an AND or OR expression, we may be able to
2016 simplify by reassociating. */
2017 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2018 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2020 tree inner1 = gimple_assign_rhs1 (stmt);
2021 tree inner2 = gimple_assign_rhs2 (stmt);
2022 gimple s;
2023 tree t;
2024 tree partial = NULL_TREE;
2025 bool is_or = (innercode == BIT_IOR_EXPR);
2027 /* Check for boolean identities that don't require recursive examination
2028 of inner1/inner2:
2029 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2030 inner1 OR (inner1 AND inner2) => inner1
2031 !inner1 OR (inner1 OR inner2) => true
2032 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2034 if (inner1 == true_test_var)
2035 return (is_or ? var : inner1);
2036 else if (inner2 == true_test_var)
2037 return (is_or ? var : inner2);
2038 else if (inner1 == false_test_var)
2039 return (is_or
2040 ? boolean_true_node
2041 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2042 else if (inner2 == false_test_var)
2043 return (is_or
2044 ? boolean_true_node
2045 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2047 /* Next, redistribute/reassociate the OR across the inner tests.
2048 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2049 if (TREE_CODE (inner1) == SSA_NAME
2050 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2051 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2052 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2053 gimple_assign_rhs1 (s),
2054 gimple_assign_rhs2 (s),
2055 code2, op2a, op2b)))
2057 /* Handle the OR case, where we are reassociating:
2058 (inner1 OR inner2) OR (op2a code2 op2b)
2059 => (t OR inner2)
2060 If the partial result t is a constant, we win. Otherwise
2061 continue on to try reassociating with the other inner test. */
2062 if (is_or)
2064 if (integer_onep (t))
2065 return boolean_true_node;
2066 else if (integer_zerop (t))
2067 return inner2;
2070 /* Handle the AND case, where we are redistributing:
2071 (inner1 AND inner2) OR (op2a code2 op2b)
2072 => (t AND (inner2 OR (op2a code op2b))) */
2073 else if (integer_zerop (t))
2074 return boolean_false_node;
2076 /* Save partial result for later. */
2077 partial = t;
2080 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2081 if (TREE_CODE (inner2) == SSA_NAME
2082 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2083 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2084 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2085 gimple_assign_rhs1 (s),
2086 gimple_assign_rhs2 (s),
2087 code2, op2a, op2b)))
2089 /* Handle the OR case, where we are reassociating:
2090 (inner1 OR inner2) OR (op2a code2 op2b)
2091 => (inner1 OR t)
2092 => (t OR partial) */
2093 if (is_or)
2095 if (integer_zerop (t))
2096 return inner1;
2097 else if (integer_onep (t))
2098 return boolean_true_node;
2099 /* If both are the same, we can apply the identity
2100 (x OR x) == x. */
2101 else if (partial && same_bool_result_p (t, partial))
2102 return t;
2105 /* Handle the AND case, where we are redistributing:
2106 (inner1 AND inner2) OR (op2a code2 op2b)
2107 => (t AND (inner1 OR (op2a code2 op2b)))
2108 => (t AND partial) */
2109 else
2111 if (integer_zerop (t))
2112 return boolean_false_node;
2113 else if (partial)
2115 /* We already got a simplification for the other
2116 operand to the redistributed AND expression. The
2117 interesting case is when at least one is true.
2118 Or, if both are the same, we can apply the identity
2119 (x AND x) == x. */
2120 if (integer_onep (partial))
2121 return t;
2122 else if (integer_onep (t))
2123 return partial;
2124 else if (same_bool_result_p (t, partial))
2125 return t;
2130 return NULL_TREE;
2133 /* Try to simplify the OR of two comparisons defined by
2134 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2135 If this can be done without constructing an intermediate value,
2136 return the resulting tree; otherwise NULL_TREE is returned.
2137 This function is deliberately asymmetric as it recurses on SSA_DEFs
2138 in the first comparison but not the second. */
2140 static tree
2141 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2142 enum tree_code code2, tree op2a, tree op2b)
2144 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2145 if (operand_equal_p (op1a, op2a, 0)
2146 && operand_equal_p (op1b, op2b, 0))
2148 /* Result will be either NULL_TREE, or a combined comparison. */
2149 tree t = combine_comparisons (UNKNOWN_LOCATION,
2150 TRUTH_ORIF_EXPR, code1, code2,
2151 boolean_type_node, op1a, op1b);
2152 if (t)
2153 return t;
2156 /* Likewise the swapped case of the above. */
2157 if (operand_equal_p (op1a, op2b, 0)
2158 && operand_equal_p (op1b, op2a, 0))
2160 /* Result will be either NULL_TREE, or a combined comparison. */
2161 tree t = combine_comparisons (UNKNOWN_LOCATION,
2162 TRUTH_ORIF_EXPR, code1,
2163 swap_tree_comparison (code2),
2164 boolean_type_node, op1a, op1b);
2165 if (t)
2166 return t;
2169 /* If both comparisons are of the same value against constants, we might
2170 be able to merge them. */
2171 if (operand_equal_p (op1a, op2a, 0)
2172 && TREE_CODE (op1b) == INTEGER_CST
2173 && TREE_CODE (op2b) == INTEGER_CST)
2175 int cmp = tree_int_cst_compare (op1b, op2b);
2177 /* If we have (op1a != op1b), we should either be able to
2178 return that or TRUE, depending on whether the constant op1b
2179 also satisfies the other comparison against op2b. */
2180 if (code1 == NE_EXPR)
2182 bool done = true;
2183 bool val;
2184 switch (code2)
2186 case EQ_EXPR: val = (cmp == 0); break;
2187 case NE_EXPR: val = (cmp != 0); break;
2188 case LT_EXPR: val = (cmp < 0); break;
2189 case GT_EXPR: val = (cmp > 0); break;
2190 case LE_EXPR: val = (cmp <= 0); break;
2191 case GE_EXPR: val = (cmp >= 0); break;
2192 default: done = false;
2194 if (done)
2196 if (val)
2197 return boolean_true_node;
2198 else
2199 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2202 /* Likewise if the second comparison is a != comparison. */
2203 else if (code2 == NE_EXPR)
2205 bool done = true;
2206 bool val;
2207 switch (code1)
2209 case EQ_EXPR: val = (cmp == 0); break;
2210 case NE_EXPR: val = (cmp != 0); break;
2211 case LT_EXPR: val = (cmp > 0); break;
2212 case GT_EXPR: val = (cmp < 0); break;
2213 case LE_EXPR: val = (cmp >= 0); break;
2214 case GE_EXPR: val = (cmp <= 0); break;
2215 default: done = false;
2217 if (done)
2219 if (val)
2220 return boolean_true_node;
2221 else
2222 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2226 /* See if an equality test is redundant with the other comparison. */
2227 else if (code1 == EQ_EXPR)
2229 bool val;
2230 switch (code2)
2232 case EQ_EXPR: val = (cmp == 0); break;
2233 case NE_EXPR: val = (cmp != 0); break;
2234 case LT_EXPR: val = (cmp < 0); break;
2235 case GT_EXPR: val = (cmp > 0); break;
2236 case LE_EXPR: val = (cmp <= 0); break;
2237 case GE_EXPR: val = (cmp >= 0); break;
2238 default:
2239 val = false;
2241 if (val)
2242 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2244 else if (code2 == EQ_EXPR)
2246 bool val;
2247 switch (code1)
2249 case EQ_EXPR: val = (cmp == 0); break;
2250 case NE_EXPR: val = (cmp != 0); break;
2251 case LT_EXPR: val = (cmp > 0); break;
2252 case GT_EXPR: val = (cmp < 0); break;
2253 case LE_EXPR: val = (cmp >= 0); break;
2254 case GE_EXPR: val = (cmp <= 0); break;
2255 default:
2256 val = false;
2258 if (val)
2259 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2262 /* Chose the less restrictive of two < or <= comparisons. */
2263 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2264 && (code2 == LT_EXPR || code2 == LE_EXPR))
2266 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2267 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2268 else
2269 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2272 /* Likewise chose the less restrictive of two > or >= comparisons. */
2273 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2274 && (code2 == GT_EXPR || code2 == GE_EXPR))
2276 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2277 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2278 else
2279 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2282 /* Check for singleton ranges. */
2283 else if (cmp == 0
2284 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2285 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2286 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2288 /* Check for less/greater pairs that don't restrict the range at all. */
2289 else if (cmp >= 0
2290 && (code1 == LT_EXPR || code1 == LE_EXPR)
2291 && (code2 == GT_EXPR || code2 == GE_EXPR))
2292 return boolean_true_node;
2293 else if (cmp <= 0
2294 && (code1 == GT_EXPR || code1 == GE_EXPR)
2295 && (code2 == LT_EXPR || code2 == LE_EXPR))
2296 return boolean_true_node;
2299 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2300 NAME's definition is a truth value. See if there are any simplifications
2301 that can be done against the NAME's definition. */
2302 if (TREE_CODE (op1a) == SSA_NAME
2303 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2304 && (integer_zerop (op1b) || integer_onep (op1b)))
2306 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2307 || (code1 == NE_EXPR && integer_onep (op1b)));
2308 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2309 switch (gimple_code (stmt))
2311 case GIMPLE_ASSIGN:
2312 /* Try to simplify by copy-propagating the definition. */
2313 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2315 case GIMPLE_PHI:
2316 /* If every argument to the PHI produces the same result when
2317 ORed with the second comparison, we win.
2318 Do not do this unless the type is bool since we need a bool
2319 result here anyway. */
2320 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2322 tree result = NULL_TREE;
2323 unsigned i;
2324 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2326 tree arg = gimple_phi_arg_def (stmt, i);
2328 /* If this PHI has itself as an argument, ignore it.
2329 If all the other args produce the same result,
2330 we're still OK. */
2331 if (arg == gimple_phi_result (stmt))
2332 continue;
2333 else if (TREE_CODE (arg) == INTEGER_CST)
2335 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2337 if (!result)
2338 result = boolean_true_node;
2339 else if (!integer_onep (result))
2340 return NULL_TREE;
2342 else if (!result)
2343 result = fold_build2 (code2, boolean_type_node,
2344 op2a, op2b);
2345 else if (!same_bool_comparison_p (result,
2346 code2, op2a, op2b))
2347 return NULL_TREE;
2349 else if (TREE_CODE (arg) == SSA_NAME
2350 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2352 tree temp;
2353 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2354 /* In simple cases we can look through PHI nodes,
2355 but we have to be careful with loops.
2356 See PR49073. */
2357 if (! dom_info_available_p (CDI_DOMINATORS)
2358 || gimple_bb (def_stmt) == gimple_bb (stmt)
2359 || dominated_by_p (CDI_DOMINATORS,
2360 gimple_bb (def_stmt),
2361 gimple_bb (stmt)))
2362 return NULL_TREE;
2363 temp = or_var_with_comparison (arg, invert, code2,
2364 op2a, op2b);
2365 if (!temp)
2366 return NULL_TREE;
2367 else if (!result)
2368 result = temp;
2369 else if (!same_bool_result_p (result, temp))
2370 return NULL_TREE;
2372 else
2373 return NULL_TREE;
2375 return result;
2378 default:
2379 break;
2382 return NULL_TREE;
2385 /* Try to simplify the OR of two comparisons, specified by
2386 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2387 If this can be simplified to a single expression (without requiring
2388 introducing more SSA variables to hold intermediate values),
2389 return the resulting tree. Otherwise return NULL_TREE.
2390 If the result expression is non-null, it has boolean type. */
2392 tree
2393 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2394 enum tree_code code2, tree op2a, tree op2b)
2396 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2397 if (t)
2398 return t;
2399 else
2400 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2404 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2406 Either NULL_TREE, a simplified but non-constant or a constant
2407 is returned.
2409 ??? This should go into a gimple-fold-inline.h file to be eventually
2410 privatized with the single valueize function used in the various TUs
2411 to avoid the indirect function call overhead. */
2413 tree
2414 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2416 location_t loc = gimple_location (stmt);
2417 switch (gimple_code (stmt))
2419 case GIMPLE_ASSIGN:
2421 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2423 switch (get_gimple_rhs_class (subcode))
2425 case GIMPLE_SINGLE_RHS:
2427 tree rhs = gimple_assign_rhs1 (stmt);
2428 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2430 if (TREE_CODE (rhs) == SSA_NAME)
2432 /* If the RHS is an SSA_NAME, return its known constant value,
2433 if any. */
2434 return (*valueize) (rhs);
2436 /* Handle propagating invariant addresses into address
2437 operations. */
2438 else if (TREE_CODE (rhs) == ADDR_EXPR
2439 && !is_gimple_min_invariant (rhs))
2441 HOST_WIDE_INT offset;
2442 tree base;
2443 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2444 &offset,
2445 valueize);
2446 if (base
2447 && (CONSTANT_CLASS_P (base)
2448 || decl_address_invariant_p (base)))
2449 return build_invariant_address (TREE_TYPE (rhs),
2450 base, offset);
2452 else if (TREE_CODE (rhs) == CONSTRUCTOR
2453 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2454 && (CONSTRUCTOR_NELTS (rhs)
2455 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2457 unsigned i;
2458 tree val, list;
2460 list = NULL_TREE;
2461 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2463 val = (*valueize) (val);
2464 if (TREE_CODE (val) == INTEGER_CST
2465 || TREE_CODE (val) == REAL_CST
2466 || TREE_CODE (val) == FIXED_CST)
2467 list = tree_cons (NULL_TREE, val, list);
2468 else
2469 return NULL_TREE;
2472 return build_vector (TREE_TYPE (rhs), nreverse (list));
2475 if (kind == tcc_reference)
2477 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2478 || TREE_CODE (rhs) == REALPART_EXPR
2479 || TREE_CODE (rhs) == IMAGPART_EXPR)
2480 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2482 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2483 return fold_unary_loc (EXPR_LOCATION (rhs),
2484 TREE_CODE (rhs),
2485 TREE_TYPE (rhs), val);
2487 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2488 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2490 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2491 return fold_ternary_loc (EXPR_LOCATION (rhs),
2492 TREE_CODE (rhs),
2493 TREE_TYPE (rhs), val,
2494 TREE_OPERAND (rhs, 1),
2495 TREE_OPERAND (rhs, 2));
2497 else if (TREE_CODE (rhs) == MEM_REF
2498 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2500 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2501 if (TREE_CODE (val) == ADDR_EXPR
2502 && is_gimple_min_invariant (val))
2504 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2505 unshare_expr (val),
2506 TREE_OPERAND (rhs, 1));
2507 if (tem)
2508 rhs = tem;
2511 return fold_const_aggregate_ref_1 (rhs, valueize);
2513 else if (kind == tcc_declaration)
2514 return get_symbol_constant_value (rhs);
2515 return rhs;
2518 case GIMPLE_UNARY_RHS:
2520 /* Handle unary operators that can appear in GIMPLE form.
2521 Note that we know the single operand must be a constant,
2522 so this should almost always return a simplified RHS. */
2523 tree lhs = gimple_assign_lhs (stmt);
2524 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2526 /* Conversions are useless for CCP purposes if they are
2527 value-preserving. Thus the restrictions that
2528 useless_type_conversion_p places for restrict qualification
2529 of pointer types should not apply here.
2530 Substitution later will only substitute to allowed places. */
2531 if (CONVERT_EXPR_CODE_P (subcode)
2532 && POINTER_TYPE_P (TREE_TYPE (lhs))
2533 && POINTER_TYPE_P (TREE_TYPE (op0))
2534 && (TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2535 == TYPE_ADDR_SPACE (TREE_TYPE (op0))))
2536 return op0;
2538 return
2539 fold_unary_ignore_overflow_loc (loc, subcode,
2540 gimple_expr_type (stmt), op0);
2543 case GIMPLE_BINARY_RHS:
2545 /* Handle binary operators that can appear in GIMPLE form. */
2546 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2547 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2549 /* Translate &x + CST into an invariant form suitable for
2550 further propagation. */
2551 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2552 && TREE_CODE (op0) == ADDR_EXPR
2553 && TREE_CODE (op1) == INTEGER_CST)
2555 tree off = fold_convert (ptr_type_node, op1);
2556 return build_fold_addr_expr_loc
2557 (loc,
2558 fold_build2 (MEM_REF,
2559 TREE_TYPE (TREE_TYPE (op0)),
2560 unshare_expr (op0), off));
2563 return fold_binary_loc (loc, subcode,
2564 gimple_expr_type (stmt), op0, op1);
2567 case GIMPLE_TERNARY_RHS:
2569 /* Handle ternary operators that can appear in GIMPLE form. */
2570 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2571 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2572 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2574 /* Fold embedded expressions in ternary codes. */
2575 if ((subcode == COND_EXPR
2576 || subcode == VEC_COND_EXPR)
2577 && COMPARISON_CLASS_P (op0))
2579 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2580 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2581 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2582 TREE_TYPE (op0), op00, op01);
2583 if (tem)
2584 op0 = tem;
2587 return fold_ternary_loc (loc, subcode,
2588 gimple_expr_type (stmt), op0, op1, op2);
2591 default:
2592 gcc_unreachable ();
2596 case GIMPLE_CALL:
2598 tree fn;
2600 if (gimple_call_internal_p (stmt))
2601 /* No folding yet for these functions. */
2602 return NULL_TREE;
2604 fn = (*valueize) (gimple_call_fn (stmt));
2605 if (TREE_CODE (fn) == ADDR_EXPR
2606 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2607 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2609 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2610 tree call, retval;
2611 unsigned i;
2612 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2613 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2614 call = build_call_array_loc (loc,
2615 gimple_call_return_type (stmt),
2616 fn, gimple_call_num_args (stmt), args);
2617 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2618 if (retval)
2619 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2620 STRIP_NOPS (retval);
2621 return retval;
2623 return NULL_TREE;
2626 default:
2627 return NULL_TREE;
2631 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2632 Returns NULL_TREE if folding to a constant is not possible, otherwise
2633 returns a constant according to is_gimple_min_invariant. */
2635 tree
2636 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2638 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2639 if (res && is_gimple_min_invariant (res))
2640 return res;
2641 return NULL_TREE;
2645 /* The following set of functions are supposed to fold references using
2646 their constant initializers. */
2648 static tree fold_ctor_reference (tree type, tree ctor,
2649 unsigned HOST_WIDE_INT offset,
2650 unsigned HOST_WIDE_INT size);
2652 /* See if we can find constructor defining value of BASE.
2653 When we know the consructor with constant offset (such as
2654 base is array[40] and we do know constructor of array), then
2655 BIT_OFFSET is adjusted accordingly.
2657 As a special case, return error_mark_node when constructor
2658 is not explicitly available, but it is known to be zero
2659 such as 'static const int a;'. */
2660 static tree
2661 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2662 tree (*valueize)(tree))
2664 HOST_WIDE_INT bit_offset2, size, max_size;
2665 if (TREE_CODE (base) == MEM_REF)
2667 if (!integer_zerop (TREE_OPERAND (base, 1)))
2669 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2670 return NULL_TREE;
2671 *bit_offset += (mem_ref_offset (base).low
2672 * BITS_PER_UNIT);
2675 if (valueize
2676 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2677 base = valueize (TREE_OPERAND (base, 0));
2678 if (!base || TREE_CODE (base) != ADDR_EXPR)
2679 return NULL_TREE;
2680 base = TREE_OPERAND (base, 0);
2683 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2684 DECL_INITIAL. If BASE is a nested reference into another
2685 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2686 the inner reference. */
2687 switch (TREE_CODE (base))
2689 case VAR_DECL:
2690 if (!const_value_known_p (base))
2691 return NULL_TREE;
2693 /* Fallthru. */
2694 case CONST_DECL:
2695 if (!DECL_INITIAL (base)
2696 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2697 return error_mark_node;
2698 return DECL_INITIAL (base);
2700 case ARRAY_REF:
2701 case COMPONENT_REF:
2702 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2703 if (max_size == -1 || size != max_size)
2704 return NULL_TREE;
2705 *bit_offset += bit_offset2;
2706 return get_base_constructor (base, bit_offset, valueize);
2708 case STRING_CST:
2709 case CONSTRUCTOR:
2710 return base;
2712 default:
2713 return NULL_TREE;
2717 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2718 to the memory at bit OFFSET.
2720 We do only simple job of folding byte accesses. */
2722 static tree
2723 fold_string_cst_ctor_reference (tree type, tree ctor,
2724 unsigned HOST_WIDE_INT offset,
2725 unsigned HOST_WIDE_INT size)
2727 if (INTEGRAL_TYPE_P (type)
2728 && (TYPE_MODE (type)
2729 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2730 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2731 == MODE_INT)
2732 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2733 && size == BITS_PER_UNIT
2734 && !(offset % BITS_PER_UNIT))
2736 offset /= BITS_PER_UNIT;
2737 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2738 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2739 [offset]));
2740 /* Folding
2741 const char a[20]="hello";
2742 return a[10];
2744 might lead to offset greater than string length. In this case we
2745 know value is either initialized to 0 or out of bounds. Return 0
2746 in both cases. */
2747 return build_zero_cst (type);
2749 return NULL_TREE;
2752 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2753 SIZE to the memory at bit OFFSET. */
2755 static tree
2756 fold_array_ctor_reference (tree type, tree ctor,
2757 unsigned HOST_WIDE_INT offset,
2758 unsigned HOST_WIDE_INT size)
2760 unsigned HOST_WIDE_INT cnt;
2761 tree cfield, cval;
2762 double_int low_bound, elt_size;
2763 double_int index, max_index;
2764 double_int access_index;
2765 tree domain_type = NULL_TREE;
2766 HOST_WIDE_INT inner_offset;
2768 /* Compute low bound and elt size. */
2769 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2770 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2771 if (domain_type && TYPE_MIN_VALUE (domain_type))
2773 /* Static constructors for variably sized objects makes no sense. */
2774 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2775 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2777 else
2778 low_bound = double_int_zero;
2779 /* Static constructors for variably sized objects makes no sense. */
2780 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2781 == INTEGER_CST);
2782 elt_size =
2783 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2786 /* We can handle only constantly sized accesses that are known to not
2787 be larger than size of array element. */
2788 if (!TYPE_SIZE_UNIT (type)
2789 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2790 || double_int_cmp (elt_size,
2791 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2792 return NULL_TREE;
2794 /* Compute the array index we look for. */
2795 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2796 elt_size, TRUNC_DIV_EXPR);
2797 access_index = double_int_add (access_index, low_bound);
2799 /* And offset within the access. */
2800 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2802 /* See if the array field is large enough to span whole access. We do not
2803 care to fold accesses spanning multiple array indexes. */
2804 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2805 return NULL_TREE;
2807 index = double_int_sub (low_bound, double_int_one);
2808 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2810 /* Array constructor might explicitely set index, or specify range
2811 or leave index NULL meaning that it is next index after previous
2812 one. */
2813 if (cfield)
2815 if (TREE_CODE (cfield) == INTEGER_CST)
2816 max_index = index = tree_to_double_int (cfield);
2817 else
2819 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2820 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2821 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2824 else
2825 max_index = index = double_int_add (index, double_int_one);
2827 /* Do we have match? */
2828 if (double_int_cmp (access_index, index, 1) >= 0
2829 && double_int_cmp (access_index, max_index, 1) <= 0)
2830 return fold_ctor_reference (type, cval, inner_offset, size);
2832 /* When memory is not explicitely mentioned in constructor,
2833 it is 0 (or out of range). */
2834 return build_zero_cst (type);
2837 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2838 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2840 static tree
2841 fold_nonarray_ctor_reference (tree type, tree ctor,
2842 unsigned HOST_WIDE_INT offset,
2843 unsigned HOST_WIDE_INT size)
2845 unsigned HOST_WIDE_INT cnt;
2846 tree cfield, cval;
2848 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2849 cval)
2851 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2852 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2853 tree field_size = DECL_SIZE (cfield);
2854 double_int bitoffset;
2855 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2856 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2857 double_int bitoffset_end, access_end;
2859 /* Variable sized objects in static constructors makes no sense,
2860 but field_size can be NULL for flexible array members. */
2861 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2862 && TREE_CODE (byte_offset) == INTEGER_CST
2863 && (field_size != NULL_TREE
2864 ? TREE_CODE (field_size) == INTEGER_CST
2865 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2867 /* Compute bit offset of the field. */
2868 bitoffset = double_int_add (tree_to_double_int (field_offset),
2869 double_int_mul (byte_offset_cst,
2870 bits_per_unit_cst));
2871 /* Compute bit offset where the field ends. */
2872 if (field_size != NULL_TREE)
2873 bitoffset_end = double_int_add (bitoffset,
2874 tree_to_double_int (field_size));
2875 else
2876 bitoffset_end = double_int_zero;
2878 access_end = double_int_add (uhwi_to_double_int (offset),
2879 uhwi_to_double_int (size));
2881 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2882 [BITOFFSET, BITOFFSET_END)? */
2883 if (double_int_cmp (access_end, bitoffset, 0) > 0
2884 && (field_size == NULL_TREE
2885 || double_int_cmp (uhwi_to_double_int (offset),
2886 bitoffset_end, 0) < 0))
2888 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2889 bitoffset);
2890 /* We do have overlap. Now see if field is large enough to
2891 cover the access. Give up for accesses spanning multiple
2892 fields. */
2893 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2894 return NULL_TREE;
2895 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2896 return NULL_TREE;
2897 return fold_ctor_reference (type, cval,
2898 double_int_to_uhwi (inner_offset), size);
2901 /* When memory is not explicitely mentioned in constructor, it is 0. */
2902 return build_zero_cst (type);
2905 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2906 to the memory at bit OFFSET. */
2908 static tree
2909 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2910 unsigned HOST_WIDE_INT size)
2912 tree ret;
2914 /* We found the field with exact match. */
2915 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2916 && !offset)
2917 return canonicalize_constructor_val (ctor);
2919 /* We are at the end of walk, see if we can view convert the
2920 result. */
2921 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2922 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2923 && operand_equal_p (TYPE_SIZE (type),
2924 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2926 ret = canonicalize_constructor_val (ctor);
2927 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2928 if (ret)
2929 STRIP_NOPS (ret);
2930 return ret;
2932 if (TREE_CODE (ctor) == STRING_CST)
2933 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2934 if (TREE_CODE (ctor) == CONSTRUCTOR)
2937 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2938 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2939 return fold_array_ctor_reference (type, ctor, offset, size);
2940 else
2941 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2944 return NULL_TREE;
2947 /* Return the tree representing the element referenced by T if T is an
2948 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2949 names using VALUEIZE. Return NULL_TREE otherwise. */
2951 tree
2952 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2954 tree ctor, idx, base;
2955 HOST_WIDE_INT offset, size, max_size;
2956 tree tem;
2958 if (TREE_THIS_VOLATILE (t))
2959 return NULL_TREE;
2961 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2962 return get_symbol_constant_value (t);
2964 tem = fold_read_from_constant_string (t);
2965 if (tem)
2966 return tem;
2968 switch (TREE_CODE (t))
2970 case ARRAY_REF:
2971 case ARRAY_RANGE_REF:
2972 /* Constant indexes are handled well by get_base_constructor.
2973 Only special case variable offsets.
2974 FIXME: This code can't handle nested references with variable indexes
2975 (they will be handled only by iteration of ccp). Perhaps we can bring
2976 get_ref_base_and_extent here and make it use a valueize callback. */
2977 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2978 && valueize
2979 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2980 && host_integerp (idx, 0))
2982 tree low_bound, unit_size;
2984 /* If the resulting bit-offset is constant, track it. */
2985 if ((low_bound = array_ref_low_bound (t),
2986 host_integerp (low_bound, 0))
2987 && (unit_size = array_ref_element_size (t),
2988 host_integerp (unit_size, 1)))
2990 offset = TREE_INT_CST_LOW (idx);
2991 offset -= TREE_INT_CST_LOW (low_bound);
2992 offset *= TREE_INT_CST_LOW (unit_size);
2993 offset *= BITS_PER_UNIT;
2995 base = TREE_OPERAND (t, 0);
2996 ctor = get_base_constructor (base, &offset, valueize);
2997 /* Empty constructor. Always fold to 0. */
2998 if (ctor == error_mark_node)
2999 return build_zero_cst (TREE_TYPE (t));
3000 /* Out of bound array access. Value is undefined,
3001 but don't fold. */
3002 if (offset < 0)
3003 return NULL_TREE;
3004 /* We can not determine ctor. */
3005 if (!ctor)
3006 return NULL_TREE;
3007 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3008 TREE_INT_CST_LOW (unit_size)
3009 * BITS_PER_UNIT);
3012 /* Fallthru. */
3014 case COMPONENT_REF:
3015 case BIT_FIELD_REF:
3016 case TARGET_MEM_REF:
3017 case MEM_REF:
3018 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3019 ctor = get_base_constructor (base, &offset, valueize);
3021 /* Empty constructor. Always fold to 0. */
3022 if (ctor == error_mark_node)
3023 return build_zero_cst (TREE_TYPE (t));
3024 /* We do not know precise address. */
3025 if (max_size == -1 || max_size != size)
3026 return NULL_TREE;
3027 /* We can not determine ctor. */
3028 if (!ctor)
3029 return NULL_TREE;
3031 /* Out of bound array access. Value is undefined, but don't fold. */
3032 if (offset < 0)
3033 return NULL_TREE;
3035 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3037 case REALPART_EXPR:
3038 case IMAGPART_EXPR:
3040 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3041 if (c && TREE_CODE (c) == COMPLEX_CST)
3042 return fold_build1_loc (EXPR_LOCATION (t),
3043 TREE_CODE (t), TREE_TYPE (t), c);
3044 break;
3047 default:
3048 break;
3051 return NULL_TREE;
3054 tree
3055 fold_const_aggregate_ref (tree t)
3057 return fold_const_aggregate_ref_1 (t, NULL);
3060 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3061 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3062 KNOWN_BINFO carries the binfo describing the true type of
3063 OBJ_TYPE_REF_OBJECT(REF). */
3065 tree
3066 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3068 unsigned HOST_WIDE_INT offset, size;
3069 tree v, fn;
3071 v = BINFO_VTABLE (known_binfo);
3072 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3073 if (!v)
3074 return NULL_TREE;
3076 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3078 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3079 v = TREE_OPERAND (v, 0);
3081 else
3082 offset = 0;
3084 if (TREE_CODE (v) != ADDR_EXPR)
3085 return NULL_TREE;
3086 v = TREE_OPERAND (v, 0);
3088 if (TREE_CODE (v) != VAR_DECL
3089 || !DECL_VIRTUAL_P (v)
3090 || !DECL_INITIAL (v)
3091 || DECL_INITIAL (v) == error_mark_node)
3092 return NULL_TREE;
3093 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3094 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3095 offset += token * size;
3096 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3097 offset, size);
3098 if (!fn)
3099 return NULL_TREE;
3100 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3101 || TREE_CODE (fn) == FDESC_EXPR);
3102 fn = TREE_OPERAND (fn, 0);
3103 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3105 /* When cgraph node is missing and function is not public, we cannot
3106 devirtualize. This can happen in WHOPR when the actual method
3107 ends up in other partition, because we found devirtualization
3108 possibility too late. */
3109 if (!can_refer_decl_in_current_unit_p (fn))
3110 return NULL_TREE;
3112 return fn;
3115 /* Return true iff VAL is a gimple expression that is known to be
3116 non-negative. Restricted to floating-point inputs. */
3118 bool
3119 gimple_val_nonnegative_real_p (tree val)
3121 gimple def_stmt;
3123 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3125 /* Use existing logic for non-gimple trees. */
3126 if (tree_expr_nonnegative_p (val))
3127 return true;
3129 if (TREE_CODE (val) != SSA_NAME)
3130 return false;
3132 /* Currently we look only at the immediately defining statement
3133 to make this determination, since recursion on defining
3134 statements of operands can lead to quadratic behavior in the
3135 worst case. This is expected to catch almost all occurrences
3136 in practice. It would be possible to implement limited-depth
3137 recursion if important cases are lost. Alternatively, passes
3138 that need this information (such as the pow/powi lowering code
3139 in the cse_sincos pass) could be revised to provide it through
3140 dataflow propagation. */
3142 def_stmt = SSA_NAME_DEF_STMT (val);
3144 if (is_gimple_assign (def_stmt))
3146 tree op0, op1;
3148 /* See fold-const.c:tree_expr_nonnegative_p for additional
3149 cases that could be handled with recursion. */
3151 switch (gimple_assign_rhs_code (def_stmt))
3153 case ABS_EXPR:
3154 /* Always true for floating-point operands. */
3155 return true;
3157 case MULT_EXPR:
3158 /* True if the two operands are identical (since we are
3159 restricted to floating-point inputs). */
3160 op0 = gimple_assign_rhs1 (def_stmt);
3161 op1 = gimple_assign_rhs2 (def_stmt);
3163 if (op0 == op1
3164 || operand_equal_p (op0, op1, 0))
3165 return true;
3167 default:
3168 return false;
3171 else if (is_gimple_call (def_stmt))
3173 tree fndecl = gimple_call_fndecl (def_stmt);
3174 if (fndecl
3175 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3177 tree arg1;
3179 switch (DECL_FUNCTION_CODE (fndecl))
3181 CASE_FLT_FN (BUILT_IN_ACOS):
3182 CASE_FLT_FN (BUILT_IN_ACOSH):
3183 CASE_FLT_FN (BUILT_IN_CABS):
3184 CASE_FLT_FN (BUILT_IN_COSH):
3185 CASE_FLT_FN (BUILT_IN_ERFC):
3186 CASE_FLT_FN (BUILT_IN_EXP):
3187 CASE_FLT_FN (BUILT_IN_EXP10):
3188 CASE_FLT_FN (BUILT_IN_EXP2):
3189 CASE_FLT_FN (BUILT_IN_FABS):
3190 CASE_FLT_FN (BUILT_IN_FDIM):
3191 CASE_FLT_FN (BUILT_IN_HYPOT):
3192 CASE_FLT_FN (BUILT_IN_POW10):
3193 return true;
3195 CASE_FLT_FN (BUILT_IN_SQRT):
3196 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3197 nonnegative inputs. */
3198 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3199 return true;
3201 break;
3203 CASE_FLT_FN (BUILT_IN_POWI):
3204 /* True if the second argument is an even integer. */
3205 arg1 = gimple_call_arg (def_stmt, 1);
3207 if (TREE_CODE (arg1) == INTEGER_CST
3208 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3209 return true;
3211 break;
3213 CASE_FLT_FN (BUILT_IN_POW):
3214 /* True if the second argument is an even integer-valued
3215 real. */
3216 arg1 = gimple_call_arg (def_stmt, 1);
3218 if (TREE_CODE (arg1) == REAL_CST)
3220 REAL_VALUE_TYPE c;
3221 HOST_WIDE_INT n;
3223 c = TREE_REAL_CST (arg1);
3224 n = real_to_integer (&c);
3226 if ((n & 1) == 0)
3228 REAL_VALUE_TYPE cint;
3229 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3230 if (real_identical (&c, &cint))
3231 return true;
3235 break;
3237 default:
3238 return false;
3243 return false;