* config/rl78/mulsi3.S: Remove a few unneeded moves and branches.
[official-gcc.git] / gcc / gimple-fold.c
blob98c3a153f3f643b9e1a80391e4c6c9baca610bed
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "tree-ssa.h"
30 #include "tree-ssa-propagate.h"
31 #include "target.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
37 reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61 symtab_node snode;
63 /* We will later output the initializer, so we can refer to it.
64 So we are concerned only when DECL comes from initializer of
65 external var. */
66 if (!from_decl
67 || TREE_CODE (from_decl) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl)
69 || (flag_ltrans
70 && symtab_get_node (from_decl)->symbol.in_other_partition))
71 return true;
72 /* We are concerned only about static/external vars and functions. */
73 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
74 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
75 return true;
76 /* We are folding reference from external vtable. The vtable may reffer
77 to a symbol keyed to other compilation unit. The other compilation
78 unit may be in separate DSO and the symbol may be hidden. */
79 if (DECL_VISIBILITY_SPECIFIED (decl)
80 && DECL_EXTERNAL (decl)
81 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
82 return false;
83 /* When function is public, we always can introduce new reference.
84 Exception are the COMDAT functions where introducing a direct
85 reference imply need to include function body in the curren tunit. */
86 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
87 return true;
88 /* We are not at ltrans stage; so don't worry about WHOPR.
89 Also when still gimplifying all referred comdat functions will be
90 produced.
92 As observed in PR20991 for already optimized out comdat virtual functions
93 it may be tempting to not necessarily give up because the copy will be
94 output elsewhere when corresponding vtable is output.
95 This is however not possible - ABI specify that COMDATs are output in
96 units where they are used and when the other unit was compiled with LTO
97 it is possible that vtable was kept public while the function itself
98 was privatized. */
99 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
100 return true;
102 /* OK we are seeing either COMDAT or static variable. In this case we must
103 check that the definition is still around so we can refer it. */
104 if (TREE_CODE (decl) == FUNCTION_DECL)
106 node = cgraph_get_node (decl);
107 /* Check that we still have function body and that we didn't took
108 the decision to eliminate offline copy of the function yet.
109 The second is important when devirtualization happens during final
110 compilation stage when making a new reference no longer makes callee
111 to be compiled. */
112 if (!node || !node->symbol.definition || node->global.inlined_to)
114 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
115 return false;
118 else if (TREE_CODE (decl) == VAR_DECL)
120 vnode = varpool_get_node (decl);
121 if (!vnode || !vnode->symbol.definition)
123 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
124 return false;
127 return true;
130 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
131 acceptable form for is_gimple_min_invariant.
132 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
134 tree
135 canonicalize_constructor_val (tree cval, tree from_decl)
137 tree orig_cval = cval;
138 STRIP_NOPS (cval);
139 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
140 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
142 tree ptr = TREE_OPERAND (cval, 0);
143 if (is_gimple_min_invariant (ptr))
144 cval = build1_loc (EXPR_LOCATION (cval),
145 ADDR_EXPR, TREE_TYPE (ptr),
146 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
147 ptr,
148 fold_convert (ptr_type_node,
149 TREE_OPERAND (cval, 1))));
151 if (TREE_CODE (cval) == ADDR_EXPR)
153 tree base = NULL_TREE;
154 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
156 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
157 if (base)
158 TREE_OPERAND (cval, 0) = base;
160 else
161 base = get_base_address (TREE_OPERAND (cval, 0));
162 if (!base)
163 return NULL_TREE;
165 if ((TREE_CODE (base) == VAR_DECL
166 || TREE_CODE (base) == FUNCTION_DECL)
167 && !can_refer_decl_in_current_unit_p (base, from_decl))
168 return NULL_TREE;
169 if (TREE_CODE (base) == VAR_DECL)
170 TREE_ADDRESSABLE (base) = 1;
171 else if (TREE_CODE (base) == FUNCTION_DECL)
173 /* Make sure we create a cgraph node for functions we'll reference.
174 They can be non-existent if the reference comes from an entry
175 of an external vtable for example. */
176 cgraph_get_create_real_symbol_node (base);
178 /* Fixup types in global initializers. */
179 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
180 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
182 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
183 cval = fold_convert (TREE_TYPE (orig_cval), cval);
184 return cval;
186 return orig_cval;
189 /* If SYM is a constant variable with known value, return the value.
190 NULL_TREE is returned otherwise. */
192 tree
193 get_symbol_constant_value (tree sym)
195 tree val = ctor_for_folding (sym);
196 if (val != error_mark_node)
198 if (val)
200 val = canonicalize_constructor_val (unshare_expr (val), sym);
201 if (val && is_gimple_min_invariant (val))
202 return val;
203 else
204 return NULL_TREE;
206 /* Variables declared 'const' without an initializer
207 have zero as the initializer if they may not be
208 overridden at link or run time. */
209 if (!val
210 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
211 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
212 return build_zero_cst (TREE_TYPE (sym));
215 return NULL_TREE;
220 /* Subroutine of fold_stmt. We perform several simplifications of the
221 memory reference tree EXPR and make sure to re-gimplify them properly
222 after propagation of constant addresses. IS_LHS is true if the
223 reference is supposed to be an lvalue. */
225 static tree
226 maybe_fold_reference (tree expr, bool is_lhs)
228 tree *t = &expr;
229 tree result;
231 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
232 || TREE_CODE (expr) == REALPART_EXPR
233 || TREE_CODE (expr) == IMAGPART_EXPR)
234 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
235 return fold_unary_loc (EXPR_LOCATION (expr),
236 TREE_CODE (expr),
237 TREE_TYPE (expr),
238 TREE_OPERAND (expr, 0));
239 else if (TREE_CODE (expr) == BIT_FIELD_REF
240 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
241 return fold_ternary_loc (EXPR_LOCATION (expr),
242 TREE_CODE (expr),
243 TREE_TYPE (expr),
244 TREE_OPERAND (expr, 0),
245 TREE_OPERAND (expr, 1),
246 TREE_OPERAND (expr, 2));
248 while (handled_component_p (*t))
249 t = &TREE_OPERAND (*t, 0);
251 /* Canonicalize MEM_REFs invariant address operand. Do this first
252 to avoid feeding non-canonical MEM_REFs elsewhere. */
253 if (TREE_CODE (*t) == MEM_REF
254 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
256 bool volatile_p = TREE_THIS_VOLATILE (*t);
257 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
258 TREE_OPERAND (*t, 0),
259 TREE_OPERAND (*t, 1));
260 if (tem)
262 TREE_THIS_VOLATILE (tem) = volatile_p;
263 *t = tem;
264 tem = maybe_fold_reference (expr, is_lhs);
265 if (tem)
266 return tem;
267 return expr;
271 if (!is_lhs
272 && (result = fold_const_aggregate_ref (expr))
273 && is_gimple_min_invariant (result))
274 return result;
276 /* Fold back MEM_REFs to reference trees. */
277 if (TREE_CODE (*t) == MEM_REF
278 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
279 && integer_zerop (TREE_OPERAND (*t, 1))
280 && (TREE_THIS_VOLATILE (*t)
281 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
282 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
283 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
284 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
285 /* We have to look out here to not drop a required conversion
286 from the rhs to the lhs if is_lhs, but we don't have the
287 rhs here to verify that. Thus require strict type
288 compatibility. */
289 && types_compatible_p (TREE_TYPE (*t),
290 TREE_TYPE (TREE_OPERAND
291 (TREE_OPERAND (*t, 0), 0))))
293 tree tem;
294 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
295 tem = maybe_fold_reference (expr, is_lhs);
296 if (tem)
297 return tem;
298 return expr;
300 else if (TREE_CODE (*t) == TARGET_MEM_REF)
302 tree tem = maybe_fold_tmr (*t);
303 if (tem)
305 *t = tem;
306 tem = maybe_fold_reference (expr, is_lhs);
307 if (tem)
308 return tem;
309 return expr;
313 return NULL_TREE;
317 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
318 replacement rhs for the statement or NULL_TREE if no simplification
319 could be made. It is assumed that the operands have been previously
320 folded. */
322 static tree
323 fold_gimple_assign (gimple_stmt_iterator *si)
325 gimple stmt = gsi_stmt (*si);
326 enum tree_code subcode = gimple_assign_rhs_code (stmt);
327 location_t loc = gimple_location (stmt);
329 tree result = NULL_TREE;
331 switch (get_gimple_rhs_class (subcode))
333 case GIMPLE_SINGLE_RHS:
335 tree rhs = gimple_assign_rhs1 (stmt);
337 if (REFERENCE_CLASS_P (rhs))
338 return maybe_fold_reference (rhs, false);
340 else if (TREE_CODE (rhs) == ADDR_EXPR)
342 tree ref = TREE_OPERAND (rhs, 0);
343 tree tem = maybe_fold_reference (ref, true);
344 if (tem
345 && TREE_CODE (tem) == MEM_REF
346 && integer_zerop (TREE_OPERAND (tem, 1)))
347 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
348 else if (tem)
349 result = fold_convert (TREE_TYPE (rhs),
350 build_fold_addr_expr_loc (loc, tem));
351 else if (TREE_CODE (ref) == MEM_REF
352 && integer_zerop (TREE_OPERAND (ref, 1)))
353 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
356 else if (TREE_CODE (rhs) == CONSTRUCTOR
357 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
358 && (CONSTRUCTOR_NELTS (rhs)
359 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
361 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
362 unsigned i;
363 tree val;
365 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
366 if (TREE_CODE (val) != INTEGER_CST
367 && TREE_CODE (val) != REAL_CST
368 && TREE_CODE (val) != FIXED_CST)
369 return NULL_TREE;
371 return build_vector_from_ctor (TREE_TYPE (rhs),
372 CONSTRUCTOR_ELTS (rhs));
375 else if (DECL_P (rhs))
376 return get_symbol_constant_value (rhs);
378 /* If we couldn't fold the RHS, hand over to the generic
379 fold routines. */
380 if (result == NULL_TREE)
381 result = fold (rhs);
383 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
384 that may have been added by fold, and "useless" type
385 conversions that might now be apparent due to propagation. */
386 STRIP_USELESS_TYPE_CONVERSION (result);
388 if (result != rhs && valid_gimple_rhs_p (result))
389 return result;
391 return NULL_TREE;
393 break;
395 case GIMPLE_UNARY_RHS:
397 tree rhs = gimple_assign_rhs1 (stmt);
399 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
400 if (result)
402 /* If the operation was a conversion do _not_ mark a
403 resulting constant with TREE_OVERFLOW if the original
404 constant was not. These conversions have implementation
405 defined behavior and retaining the TREE_OVERFLOW flag
406 here would confuse later passes such as VRP. */
407 if (CONVERT_EXPR_CODE_P (subcode)
408 && TREE_CODE (result) == INTEGER_CST
409 && TREE_CODE (rhs) == INTEGER_CST)
410 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
412 STRIP_USELESS_TYPE_CONVERSION (result);
413 if (valid_gimple_rhs_p (result))
414 return result;
417 break;
419 case GIMPLE_BINARY_RHS:
420 /* Try to canonicalize for boolean-typed X the comparisons
421 X == 0, X == 1, X != 0, and X != 1. */
422 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
423 || gimple_assign_rhs_code (stmt) == NE_EXPR)
425 tree lhs = gimple_assign_lhs (stmt);
426 tree op1 = gimple_assign_rhs1 (stmt);
427 tree op2 = gimple_assign_rhs2 (stmt);
428 tree type = TREE_TYPE (op1);
430 /* Check whether the comparison operands are of the same boolean
431 type as the result type is.
432 Check that second operand is an integer-constant with value
433 one or zero. */
434 if (TREE_CODE (op2) == INTEGER_CST
435 && (integer_zerop (op2) || integer_onep (op2))
436 && useless_type_conversion_p (TREE_TYPE (lhs), type))
438 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
439 bool is_logical_not = false;
441 /* X == 0 and X != 1 is a logical-not.of X
442 X == 1 and X != 0 is X */
443 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
444 || (cmp_code == NE_EXPR && integer_onep (op2)))
445 is_logical_not = true;
447 if (is_logical_not == false)
448 result = op1;
449 /* Only for one-bit precision typed X the transformation
450 !X -> ~X is valied. */
451 else if (TYPE_PRECISION (type) == 1)
452 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
453 type, op1);
454 /* Otherwise we use !X -> X ^ 1. */
455 else
456 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
457 type, op1, build_int_cst (type, 1));
462 if (!result)
463 result = fold_binary_loc (loc, subcode,
464 TREE_TYPE (gimple_assign_lhs (stmt)),
465 gimple_assign_rhs1 (stmt),
466 gimple_assign_rhs2 (stmt));
468 if (result)
470 STRIP_USELESS_TYPE_CONVERSION (result);
471 if (valid_gimple_rhs_p (result))
472 return result;
474 break;
476 case GIMPLE_TERNARY_RHS:
477 /* Try to fold a conditional expression. */
478 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
480 tree op0 = gimple_assign_rhs1 (stmt);
481 tree tem;
482 bool set = false;
483 location_t cond_loc = gimple_location (stmt);
485 if (COMPARISON_CLASS_P (op0))
487 fold_defer_overflow_warnings ();
488 tem = fold_binary_loc (cond_loc,
489 TREE_CODE (op0), TREE_TYPE (op0),
490 TREE_OPERAND (op0, 0),
491 TREE_OPERAND (op0, 1));
492 /* This is actually a conditional expression, not a GIMPLE
493 conditional statement, however, the valid_gimple_rhs_p
494 test still applies. */
495 set = (tem && is_gimple_condexpr (tem)
496 && valid_gimple_rhs_p (tem));
497 fold_undefer_overflow_warnings (set, stmt, 0);
499 else if (is_gimple_min_invariant (op0))
501 tem = op0;
502 set = true;
504 else
505 return NULL_TREE;
507 if (set)
508 result = fold_build3_loc (cond_loc, COND_EXPR,
509 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
510 gimple_assign_rhs2 (stmt),
511 gimple_assign_rhs3 (stmt));
514 if (!result)
515 result = fold_ternary_loc (loc, subcode,
516 TREE_TYPE (gimple_assign_lhs (stmt)),
517 gimple_assign_rhs1 (stmt),
518 gimple_assign_rhs2 (stmt),
519 gimple_assign_rhs3 (stmt));
521 if (result)
523 STRIP_USELESS_TYPE_CONVERSION (result);
524 if (valid_gimple_rhs_p (result))
525 return result;
527 break;
529 case GIMPLE_INVALID_RHS:
530 gcc_unreachable ();
533 return NULL_TREE;
536 /* Attempt to fold a conditional statement. Return true if any changes were
537 made. We only attempt to fold the condition expression, and do not perform
538 any transformation that would require alteration of the cfg. It is
539 assumed that the operands have been previously folded. */
541 static bool
542 fold_gimple_cond (gimple stmt)
544 tree result = fold_binary_loc (gimple_location (stmt),
545 gimple_cond_code (stmt),
546 boolean_type_node,
547 gimple_cond_lhs (stmt),
548 gimple_cond_rhs (stmt));
550 if (result)
552 STRIP_USELESS_TYPE_CONVERSION (result);
553 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
555 gimple_cond_set_condition_from_tree (stmt, result);
556 return true;
560 return false;
563 /* Convert EXPR into a GIMPLE value suitable for substitution on the
564 RHS of an assignment. Insert the necessary statements before
565 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
566 is replaced. If the call is expected to produces a result, then it
567 is replaced by an assignment of the new RHS to the result variable.
568 If the result is to be ignored, then the call is replaced by a
569 GIMPLE_NOP. A proper VDEF chain is retained by making the first
570 VUSE and the last VDEF of the whole sequence be the same as the replaced
571 statement and using new SSA names for stores in between. */
573 void
574 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
576 tree lhs;
577 gimple stmt, new_stmt;
578 gimple_stmt_iterator i;
579 gimple_seq stmts = NULL;
580 struct gimplify_ctx gctx;
581 gimple laststore;
582 tree reaching_vuse;
584 stmt = gsi_stmt (*si_p);
586 gcc_assert (is_gimple_call (stmt));
588 push_gimplify_context (&gctx);
589 gctx.into_ssa = gimple_in_ssa_p (cfun);
591 lhs = gimple_call_lhs (stmt);
592 if (lhs == NULL_TREE)
594 gimplify_and_add (expr, &stmts);
595 /* We can end up with folding a memcpy of an empty class assignment
596 which gets optimized away by C++ gimplification. */
597 if (gimple_seq_empty_p (stmts))
599 pop_gimplify_context (NULL);
600 if (gimple_in_ssa_p (cfun))
602 unlink_stmt_vdef (stmt);
603 release_defs (stmt);
605 gsi_replace (si_p, gimple_build_nop (), true);
606 return;
609 else
611 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
612 new_stmt = gimple_build_assign (lhs, tmp);
613 i = gsi_last (stmts);
614 gsi_insert_after_without_update (&i, new_stmt,
615 GSI_CONTINUE_LINKING);
618 pop_gimplify_context (NULL);
620 if (gimple_has_location (stmt))
621 annotate_all_with_location (stmts, gimple_location (stmt));
623 /* First iterate over the replacement statements backward, assigning
624 virtual operands to their defining statements. */
625 laststore = NULL;
626 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
628 new_stmt = gsi_stmt (i);
629 if ((gimple_assign_single_p (new_stmt)
630 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
631 || (is_gimple_call (new_stmt)
632 && (gimple_call_flags (new_stmt)
633 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
635 tree vdef;
636 if (!laststore)
637 vdef = gimple_vdef (stmt);
638 else
639 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
640 gimple_set_vdef (new_stmt, vdef);
641 if (vdef && TREE_CODE (vdef) == SSA_NAME)
642 SSA_NAME_DEF_STMT (vdef) = new_stmt;
643 laststore = new_stmt;
647 /* Second iterate over the statements forward, assigning virtual
648 operands to their uses. */
649 reaching_vuse = gimple_vuse (stmt);
650 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
652 new_stmt = gsi_stmt (i);
653 /* If the new statement possibly has a VUSE, update it with exact SSA
654 name we know will reach this one. */
655 if (gimple_has_mem_ops (new_stmt))
656 gimple_set_vuse (new_stmt, reaching_vuse);
657 gimple_set_modified (new_stmt, true);
658 if (gimple_vdef (new_stmt))
659 reaching_vuse = gimple_vdef (new_stmt);
662 /* If the new sequence does not do a store release the virtual
663 definition of the original statement. */
664 if (reaching_vuse
665 && reaching_vuse == gimple_vuse (stmt))
667 tree vdef = gimple_vdef (stmt);
668 if (vdef
669 && TREE_CODE (vdef) == SSA_NAME)
671 unlink_stmt_vdef (stmt);
672 release_ssa_name (vdef);
676 /* Finally replace the original statement with the sequence. */
677 gsi_replace_with_seq (si_p, stmts, false);
680 /* Return the string length, maximum string length or maximum value of
681 ARG in LENGTH.
682 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
683 is not NULL and, for TYPE == 0, its value is not equal to the length
684 we determine or if we are unable to determine the length or value,
685 return false. VISITED is a bitmap of visited variables.
686 TYPE is 0 if string length should be returned, 1 for maximum string
687 length and 2 for maximum value ARG can have. */
689 static bool
690 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
692 tree var, val;
693 gimple def_stmt;
695 if (TREE_CODE (arg) != SSA_NAME)
697 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
698 if (TREE_CODE (arg) == ADDR_EXPR
699 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
700 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
702 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
703 if (TREE_CODE (aop0) == INDIRECT_REF
704 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
705 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
706 length, visited, type);
709 if (type == 2)
711 val = arg;
712 if (TREE_CODE (val) != INTEGER_CST
713 || tree_int_cst_sgn (val) < 0)
714 return false;
716 else
717 val = c_strlen (arg, 1);
718 if (!val)
719 return false;
721 if (*length)
723 if (type > 0)
725 if (TREE_CODE (*length) != INTEGER_CST
726 || TREE_CODE (val) != INTEGER_CST)
727 return false;
729 if (tree_int_cst_lt (*length, val))
730 *length = val;
731 return true;
733 else if (simple_cst_equal (val, *length) != 1)
734 return false;
737 *length = val;
738 return true;
741 /* If ARG is registered for SSA update we cannot look at its defining
742 statement. */
743 if (name_registered_for_update_p (arg))
744 return false;
746 /* If we were already here, break the infinite cycle. */
747 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
748 return true;
750 var = arg;
751 def_stmt = SSA_NAME_DEF_STMT (var);
753 switch (gimple_code (def_stmt))
755 case GIMPLE_ASSIGN:
756 /* The RHS of the statement defining VAR must either have a
757 constant length or come from another SSA_NAME with a constant
758 length. */
759 if (gimple_assign_single_p (def_stmt)
760 || gimple_assign_unary_nop_p (def_stmt))
762 tree rhs = gimple_assign_rhs1 (def_stmt);
763 return get_maxval_strlen (rhs, length, visited, type);
765 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
767 tree op2 = gimple_assign_rhs2 (def_stmt);
768 tree op3 = gimple_assign_rhs3 (def_stmt);
769 return get_maxval_strlen (op2, length, visited, type)
770 && get_maxval_strlen (op3, length, visited, type);
772 return false;
774 case GIMPLE_PHI:
776 /* All the arguments of the PHI node must have the same constant
777 length. */
778 unsigned i;
780 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
782 tree arg = gimple_phi_arg (def_stmt, i)->def;
784 /* If this PHI has itself as an argument, we cannot
785 determine the string length of this argument. However,
786 if we can find a constant string length for the other
787 PHI args then we can still be sure that this is a
788 constant string length. So be optimistic and just
789 continue with the next argument. */
790 if (arg == gimple_phi_result (def_stmt))
791 continue;
793 if (!get_maxval_strlen (arg, length, visited, type))
794 return false;
797 return true;
799 default:
800 return false;
805 /* Fold builtin call in statement STMT. Returns a simplified tree.
806 We may return a non-constant expression, including another call
807 to a different function and with different arguments, e.g.,
808 substituting memcpy for strcpy when the string length is known.
809 Note that some builtins expand into inline code that may not
810 be valid in GIMPLE. Callers must take care. */
812 tree
813 gimple_fold_builtin (gimple stmt)
815 tree result, val[3];
816 tree callee, a;
817 int arg_idx, type;
818 bitmap visited;
819 bool ignore;
820 int nargs;
821 location_t loc = gimple_location (stmt);
823 gcc_assert (is_gimple_call (stmt));
825 ignore = (gimple_call_lhs (stmt) == NULL);
827 /* First try the generic builtin folder. If that succeeds, return the
828 result directly. */
829 result = fold_call_stmt (stmt, ignore);
830 if (result)
832 if (ignore)
833 STRIP_NOPS (result);
834 return result;
837 /* Ignore MD builtins. */
838 callee = gimple_call_fndecl (stmt);
839 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
840 return NULL_TREE;
842 /* Give up for always_inline inline builtins until they are
843 inlined. */
844 if (avoid_folding_inline_builtin (callee))
845 return NULL_TREE;
847 /* If the builtin could not be folded, and it has no argument list,
848 we're done. */
849 nargs = gimple_call_num_args (stmt);
850 if (nargs == 0)
851 return NULL_TREE;
853 /* Limit the work only for builtins we know how to simplify. */
854 switch (DECL_FUNCTION_CODE (callee))
856 case BUILT_IN_STRLEN:
857 case BUILT_IN_FPUTS:
858 case BUILT_IN_FPUTS_UNLOCKED:
859 arg_idx = 0;
860 type = 0;
861 break;
862 case BUILT_IN_STRCPY:
863 case BUILT_IN_STRNCPY:
864 arg_idx = 1;
865 type = 0;
866 break;
867 case BUILT_IN_MEMCPY_CHK:
868 case BUILT_IN_MEMPCPY_CHK:
869 case BUILT_IN_MEMMOVE_CHK:
870 case BUILT_IN_MEMSET_CHK:
871 case BUILT_IN_STRNCPY_CHK:
872 case BUILT_IN_STPNCPY_CHK:
873 arg_idx = 2;
874 type = 2;
875 break;
876 case BUILT_IN_STRCPY_CHK:
877 case BUILT_IN_STPCPY_CHK:
878 arg_idx = 1;
879 type = 1;
880 break;
881 case BUILT_IN_SNPRINTF_CHK:
882 case BUILT_IN_VSNPRINTF_CHK:
883 arg_idx = 1;
884 type = 2;
885 break;
886 default:
887 return NULL_TREE;
890 if (arg_idx >= nargs)
891 return NULL_TREE;
893 /* Try to use the dataflow information gathered by the CCP process. */
894 visited = BITMAP_ALLOC (NULL);
895 bitmap_clear (visited);
897 memset (val, 0, sizeof (val));
898 a = gimple_call_arg (stmt, arg_idx);
899 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
900 val[arg_idx] = NULL_TREE;
902 BITMAP_FREE (visited);
904 result = NULL_TREE;
905 switch (DECL_FUNCTION_CODE (callee))
907 case BUILT_IN_STRLEN:
908 if (val[0] && nargs == 1)
910 tree new_val =
911 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
913 /* If the result is not a valid gimple value, or not a cast
914 of a valid gimple value, then we cannot use the result. */
915 if (is_gimple_val (new_val)
916 || (CONVERT_EXPR_P (new_val)
917 && is_gimple_val (TREE_OPERAND (new_val, 0))))
918 return new_val;
920 break;
922 case BUILT_IN_STRCPY:
923 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
924 result = fold_builtin_strcpy (loc, callee,
925 gimple_call_arg (stmt, 0),
926 gimple_call_arg (stmt, 1),
927 val[1]);
928 break;
930 case BUILT_IN_STRNCPY:
931 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
932 result = fold_builtin_strncpy (loc, callee,
933 gimple_call_arg (stmt, 0),
934 gimple_call_arg (stmt, 1),
935 gimple_call_arg (stmt, 2),
936 val[1]);
937 break;
939 case BUILT_IN_FPUTS:
940 if (nargs == 2)
941 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
942 gimple_call_arg (stmt, 1),
943 ignore, false, val[0]);
944 break;
946 case BUILT_IN_FPUTS_UNLOCKED:
947 if (nargs == 2)
948 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
949 gimple_call_arg (stmt, 1),
950 ignore, true, val[0]);
951 break;
953 case BUILT_IN_MEMCPY_CHK:
954 case BUILT_IN_MEMPCPY_CHK:
955 case BUILT_IN_MEMMOVE_CHK:
956 case BUILT_IN_MEMSET_CHK:
957 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
958 result = fold_builtin_memory_chk (loc, callee,
959 gimple_call_arg (stmt, 0),
960 gimple_call_arg (stmt, 1),
961 gimple_call_arg (stmt, 2),
962 gimple_call_arg (stmt, 3),
963 val[2], ignore,
964 DECL_FUNCTION_CODE (callee));
965 break;
967 case BUILT_IN_STRCPY_CHK:
968 case BUILT_IN_STPCPY_CHK:
969 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
970 result = fold_builtin_stxcpy_chk (loc, callee,
971 gimple_call_arg (stmt, 0),
972 gimple_call_arg (stmt, 1),
973 gimple_call_arg (stmt, 2),
974 val[1], ignore,
975 DECL_FUNCTION_CODE (callee));
976 break;
978 case BUILT_IN_STRNCPY_CHK:
979 case BUILT_IN_STPNCPY_CHK:
980 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
981 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
982 gimple_call_arg (stmt, 1),
983 gimple_call_arg (stmt, 2),
984 gimple_call_arg (stmt, 3),
985 val[2], ignore,
986 DECL_FUNCTION_CODE (callee));
987 break;
989 case BUILT_IN_SNPRINTF_CHK:
990 case BUILT_IN_VSNPRINTF_CHK:
991 if (val[1] && is_gimple_val (val[1]))
992 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
993 DECL_FUNCTION_CODE (callee));
994 break;
996 default:
997 gcc_unreachable ();
1000 if (result && ignore)
1001 result = fold_ignored_result (result);
1002 return result;
1006 /* Return a binfo to be used for devirtualization of calls based on an object
1007 represented by a declaration (i.e. a global or automatically allocated one)
1008 or NULL if it cannot be found or is not safe. CST is expected to be an
1009 ADDR_EXPR of such object or the function will return NULL. Currently it is
1010 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1011 EXPECTED_TYPE is type of the class virtual belongs to. */
1013 tree
1014 gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
1016 HOST_WIDE_INT offset, size, max_size;
1017 tree base, type, binfo;
1018 bool last_artificial = false;
1020 if (!flag_devirtualize
1021 || TREE_CODE (cst) != ADDR_EXPR
1022 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1023 return NULL_TREE;
1025 cst = TREE_OPERAND (cst, 0);
1026 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1027 type = TREE_TYPE (base);
1028 if (!DECL_P (base)
1029 || max_size == -1
1030 || max_size != size
1031 || TREE_CODE (type) != RECORD_TYPE)
1032 return NULL_TREE;
1034 /* Find the sub-object the constant actually refers to and mark whether it is
1035 an artificial one (as opposed to a user-defined one). */
1036 while (true)
1038 HOST_WIDE_INT pos, size;
1039 tree fld;
1041 if (types_same_for_odr (type, expected_type))
1042 break;
1043 if (offset < 0)
1044 return NULL_TREE;
1046 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1048 if (TREE_CODE (fld) != FIELD_DECL)
1049 continue;
1051 pos = int_bit_position (fld);
1052 size = tree_low_cst (DECL_SIZE (fld), 1);
1053 if (pos <= offset && (pos + size) > offset)
1054 break;
1056 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1057 return NULL_TREE;
1059 last_artificial = DECL_ARTIFICIAL (fld);
1060 type = TREE_TYPE (fld);
1061 offset -= pos;
1063 /* Artificial sub-objects are ancestors, we do not want to use them for
1064 devirtualization, at least not here. */
1065 if (last_artificial)
1066 return NULL_TREE;
1067 binfo = TYPE_BINFO (type);
1068 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1069 return NULL_TREE;
1070 else
1071 return binfo;
1074 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1075 The statement may be replaced by another statement, e.g., if the call
1076 simplifies to a constant value. Return true if any changes were made.
1077 It is assumed that the operands have been previously folded. */
1079 static bool
1080 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1082 gimple stmt = gsi_stmt (*gsi);
1083 tree callee;
1084 bool changed = false;
1085 unsigned i;
1087 /* Fold *& in call arguments. */
1088 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1089 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1091 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1092 if (tmp)
1094 gimple_call_set_arg (stmt, i, tmp);
1095 changed = true;
1099 /* Check for virtual calls that became direct calls. */
1100 callee = gimple_call_fn (stmt);
1101 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1103 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1105 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1106 changed = true;
1108 else if (virtual_method_call_p (callee))
1110 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1111 tree binfo = gimple_extract_devirt_binfo_from_cst
1112 (obj, obj_type_ref_class (callee));
1113 if (binfo)
1115 HOST_WIDE_INT token
1116 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1117 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1118 if (fndecl)
1120 gimple_call_set_fndecl (stmt, fndecl);
1121 changed = true;
1127 if (inplace)
1128 return changed;
1130 /* Check for builtins that CCP can handle using information not
1131 available in the generic fold routines. */
1132 callee = gimple_call_fndecl (stmt);
1133 if (callee && DECL_BUILT_IN (callee))
1135 tree result = gimple_fold_builtin (stmt);
1136 if (result)
1138 if (!update_call_from_tree (gsi, result))
1139 gimplify_and_update_call_from_tree (gsi, result);
1140 changed = true;
1142 else if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1143 changed |= targetm.gimple_fold_builtin (gsi);
1146 return changed;
1149 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1150 distinguishes both cases. */
1152 static bool
1153 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1155 bool changed = false;
1156 gimple stmt = gsi_stmt (*gsi);
1157 unsigned i;
1159 /* Fold the main computation performed by the statement. */
1160 switch (gimple_code (stmt))
1162 case GIMPLE_ASSIGN:
1164 unsigned old_num_ops = gimple_num_ops (stmt);
1165 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1166 tree lhs = gimple_assign_lhs (stmt);
1167 tree new_rhs;
1168 /* First canonicalize operand order. This avoids building new
1169 trees if this is the only thing fold would later do. */
1170 if ((commutative_tree_code (subcode)
1171 || commutative_ternary_tree_code (subcode))
1172 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1173 gimple_assign_rhs2 (stmt), false))
1175 tree tem = gimple_assign_rhs1 (stmt);
1176 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1177 gimple_assign_set_rhs2 (stmt, tem);
1178 changed = true;
1180 new_rhs = fold_gimple_assign (gsi);
1181 if (new_rhs
1182 && !useless_type_conversion_p (TREE_TYPE (lhs),
1183 TREE_TYPE (new_rhs)))
1184 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1185 if (new_rhs
1186 && (!inplace
1187 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1189 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1190 changed = true;
1192 break;
1195 case GIMPLE_COND:
1196 changed |= fold_gimple_cond (stmt);
1197 break;
1199 case GIMPLE_CALL:
1200 changed |= gimple_fold_call (gsi, inplace);
1201 break;
1203 case GIMPLE_ASM:
1204 /* Fold *& in asm operands. */
1206 size_t noutputs;
1207 const char **oconstraints;
1208 const char *constraint;
1209 bool allows_mem, allows_reg;
1211 noutputs = gimple_asm_noutputs (stmt);
1212 oconstraints = XALLOCAVEC (const char *, noutputs);
1214 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1216 tree link = gimple_asm_output_op (stmt, i);
1217 tree op = TREE_VALUE (link);
1218 oconstraints[i]
1219 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1220 if (REFERENCE_CLASS_P (op)
1221 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1223 TREE_VALUE (link) = op;
1224 changed = true;
1227 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1229 tree link = gimple_asm_input_op (stmt, i);
1230 tree op = TREE_VALUE (link);
1231 constraint
1232 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1233 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1234 oconstraints, &allows_mem, &allows_reg);
1235 if (REFERENCE_CLASS_P (op)
1236 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1237 != NULL_TREE)
1239 TREE_VALUE (link) = op;
1240 changed = true;
1244 break;
1246 case GIMPLE_DEBUG:
1247 if (gimple_debug_bind_p (stmt))
1249 tree val = gimple_debug_bind_get_value (stmt);
1250 if (val
1251 && REFERENCE_CLASS_P (val))
1253 tree tem = maybe_fold_reference (val, false);
1254 if (tem)
1256 gimple_debug_bind_set_value (stmt, tem);
1257 changed = true;
1260 else if (val
1261 && TREE_CODE (val) == ADDR_EXPR)
1263 tree ref = TREE_OPERAND (val, 0);
1264 tree tem = maybe_fold_reference (ref, false);
1265 if (tem)
1267 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1268 gimple_debug_bind_set_value (stmt, tem);
1269 changed = true;
1273 break;
1275 default:;
1278 stmt = gsi_stmt (*gsi);
1280 /* Fold *& on the lhs. */
1281 if (gimple_has_lhs (stmt))
1283 tree lhs = gimple_get_lhs (stmt);
1284 if (lhs && REFERENCE_CLASS_P (lhs))
1286 tree new_lhs = maybe_fold_reference (lhs, true);
1287 if (new_lhs)
1289 gimple_set_lhs (stmt, new_lhs);
1290 changed = true;
1295 return changed;
1298 /* Fold the statement pointed to by GSI. In some cases, this function may
1299 replace the whole statement with a new one. Returns true iff folding
1300 makes any changes.
1301 The statement pointed to by GSI should be in valid gimple form but may
1302 be in unfolded state as resulting from for example constant propagation
1303 which can produce *&x = 0. */
1305 bool
1306 fold_stmt (gimple_stmt_iterator *gsi)
1308 return fold_stmt_1 (gsi, false);
1311 /* Perform the minimal folding on statement *GSI. Only operations like
1312 *&x created by constant propagation are handled. The statement cannot
1313 be replaced with a new one. Return true if the statement was
1314 changed, false otherwise.
1315 The statement *GSI should be in valid gimple form but may
1316 be in unfolded state as resulting from for example constant propagation
1317 which can produce *&x = 0. */
1319 bool
1320 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1322 gimple stmt = gsi_stmt (*gsi);
1323 bool changed = fold_stmt_1 (gsi, true);
1324 gcc_assert (gsi_stmt (*gsi) == stmt);
1325 return changed;
1328 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1329 if EXPR is null or we don't know how.
1330 If non-null, the result always has boolean type. */
1332 static tree
1333 canonicalize_bool (tree expr, bool invert)
1335 if (!expr)
1336 return NULL_TREE;
1337 else if (invert)
1339 if (integer_nonzerop (expr))
1340 return boolean_false_node;
1341 else if (integer_zerop (expr))
1342 return boolean_true_node;
1343 else if (TREE_CODE (expr) == SSA_NAME)
1344 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1345 build_int_cst (TREE_TYPE (expr), 0));
1346 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1347 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1348 boolean_type_node,
1349 TREE_OPERAND (expr, 0),
1350 TREE_OPERAND (expr, 1));
1351 else
1352 return NULL_TREE;
1354 else
1356 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1357 return expr;
1358 if (integer_nonzerop (expr))
1359 return boolean_true_node;
1360 else if (integer_zerop (expr))
1361 return boolean_false_node;
1362 else if (TREE_CODE (expr) == SSA_NAME)
1363 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1364 build_int_cst (TREE_TYPE (expr), 0));
1365 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1366 return fold_build2 (TREE_CODE (expr),
1367 boolean_type_node,
1368 TREE_OPERAND (expr, 0),
1369 TREE_OPERAND (expr, 1));
1370 else
1371 return NULL_TREE;
1375 /* Check to see if a boolean expression EXPR is logically equivalent to the
1376 comparison (OP1 CODE OP2). Check for various identities involving
1377 SSA_NAMEs. */
1379 static bool
1380 same_bool_comparison_p (const_tree expr, enum tree_code code,
1381 const_tree op1, const_tree op2)
1383 gimple s;
1385 /* The obvious case. */
1386 if (TREE_CODE (expr) == code
1387 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1388 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1389 return true;
1391 /* Check for comparing (name, name != 0) and the case where expr
1392 is an SSA_NAME with a definition matching the comparison. */
1393 if (TREE_CODE (expr) == SSA_NAME
1394 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1396 if (operand_equal_p (expr, op1, 0))
1397 return ((code == NE_EXPR && integer_zerop (op2))
1398 || (code == EQ_EXPR && integer_nonzerop (op2)));
1399 s = SSA_NAME_DEF_STMT (expr);
1400 if (is_gimple_assign (s)
1401 && gimple_assign_rhs_code (s) == code
1402 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1403 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1404 return true;
1407 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1408 of name is a comparison, recurse. */
1409 if (TREE_CODE (op1) == SSA_NAME
1410 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1412 s = SSA_NAME_DEF_STMT (op1);
1413 if (is_gimple_assign (s)
1414 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1416 enum tree_code c = gimple_assign_rhs_code (s);
1417 if ((c == NE_EXPR && integer_zerop (op2))
1418 || (c == EQ_EXPR && integer_nonzerop (op2)))
1419 return same_bool_comparison_p (expr, c,
1420 gimple_assign_rhs1 (s),
1421 gimple_assign_rhs2 (s));
1422 if ((c == EQ_EXPR && integer_zerop (op2))
1423 || (c == NE_EXPR && integer_nonzerop (op2)))
1424 return same_bool_comparison_p (expr,
1425 invert_tree_comparison (c, false),
1426 gimple_assign_rhs1 (s),
1427 gimple_assign_rhs2 (s));
1430 return false;
1433 /* Check to see if two boolean expressions OP1 and OP2 are logically
1434 equivalent. */
1436 static bool
1437 same_bool_result_p (const_tree op1, const_tree op2)
1439 /* Simple cases first. */
1440 if (operand_equal_p (op1, op2, 0))
1441 return true;
1443 /* Check the cases where at least one of the operands is a comparison.
1444 These are a bit smarter than operand_equal_p in that they apply some
1445 identifies on SSA_NAMEs. */
1446 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1447 && same_bool_comparison_p (op1, TREE_CODE (op2),
1448 TREE_OPERAND (op2, 0),
1449 TREE_OPERAND (op2, 1)))
1450 return true;
1451 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1452 && same_bool_comparison_p (op2, TREE_CODE (op1),
1453 TREE_OPERAND (op1, 0),
1454 TREE_OPERAND (op1, 1)))
1455 return true;
1457 /* Default case. */
1458 return false;
1461 /* Forward declarations for some mutually recursive functions. */
1463 static tree
1464 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1465 enum tree_code code2, tree op2a, tree op2b);
1466 static tree
1467 and_var_with_comparison (tree var, bool invert,
1468 enum tree_code code2, tree op2a, tree op2b);
1469 static tree
1470 and_var_with_comparison_1 (gimple stmt,
1471 enum tree_code code2, tree op2a, tree op2b);
1472 static tree
1473 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1474 enum tree_code code2, tree op2a, tree op2b);
1475 static tree
1476 or_var_with_comparison (tree var, bool invert,
1477 enum tree_code code2, tree op2a, tree op2b);
1478 static tree
1479 or_var_with_comparison_1 (gimple stmt,
1480 enum tree_code code2, tree op2a, tree op2b);
1482 /* Helper function for and_comparisons_1: try to simplify the AND of the
1483 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1484 If INVERT is true, invert the value of the VAR before doing the AND.
1485 Return NULL_EXPR if we can't simplify this to a single expression. */
1487 static tree
1488 and_var_with_comparison (tree var, bool invert,
1489 enum tree_code code2, tree op2a, tree op2b)
1491 tree t;
1492 gimple stmt = SSA_NAME_DEF_STMT (var);
1494 /* We can only deal with variables whose definitions are assignments. */
1495 if (!is_gimple_assign (stmt))
1496 return NULL_TREE;
1498 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1499 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1500 Then we only have to consider the simpler non-inverted cases. */
1501 if (invert)
1502 t = or_var_with_comparison_1 (stmt,
1503 invert_tree_comparison (code2, false),
1504 op2a, op2b);
1505 else
1506 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1507 return canonicalize_bool (t, invert);
1510 /* Try to simplify the AND of the ssa variable defined by the assignment
1511 STMT with the comparison specified by (OP2A CODE2 OP2B).
1512 Return NULL_EXPR if we can't simplify this to a single expression. */
1514 static tree
1515 and_var_with_comparison_1 (gimple stmt,
1516 enum tree_code code2, tree op2a, tree op2b)
1518 tree var = gimple_assign_lhs (stmt);
1519 tree true_test_var = NULL_TREE;
1520 tree false_test_var = NULL_TREE;
1521 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1523 /* Check for identities like (var AND (var == 0)) => false. */
1524 if (TREE_CODE (op2a) == SSA_NAME
1525 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1527 if ((code2 == NE_EXPR && integer_zerop (op2b))
1528 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1530 true_test_var = op2a;
1531 if (var == true_test_var)
1532 return var;
1534 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1535 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1537 false_test_var = op2a;
1538 if (var == false_test_var)
1539 return boolean_false_node;
1543 /* If the definition is a comparison, recurse on it. */
1544 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1546 tree t = and_comparisons_1 (innercode,
1547 gimple_assign_rhs1 (stmt),
1548 gimple_assign_rhs2 (stmt),
1549 code2,
1550 op2a,
1551 op2b);
1552 if (t)
1553 return t;
1556 /* If the definition is an AND or OR expression, we may be able to
1557 simplify by reassociating. */
1558 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1559 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1561 tree inner1 = gimple_assign_rhs1 (stmt);
1562 tree inner2 = gimple_assign_rhs2 (stmt);
1563 gimple s;
1564 tree t;
1565 tree partial = NULL_TREE;
1566 bool is_and = (innercode == BIT_AND_EXPR);
1568 /* Check for boolean identities that don't require recursive examination
1569 of inner1/inner2:
1570 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1571 inner1 AND (inner1 OR inner2) => inner1
1572 !inner1 AND (inner1 AND inner2) => false
1573 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1574 Likewise for similar cases involving inner2. */
1575 if (inner1 == true_test_var)
1576 return (is_and ? var : inner1);
1577 else if (inner2 == true_test_var)
1578 return (is_and ? var : inner2);
1579 else if (inner1 == false_test_var)
1580 return (is_and
1581 ? boolean_false_node
1582 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1583 else if (inner2 == false_test_var)
1584 return (is_and
1585 ? boolean_false_node
1586 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1588 /* Next, redistribute/reassociate the AND across the inner tests.
1589 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1590 if (TREE_CODE (inner1) == SSA_NAME
1591 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1592 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1593 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1594 gimple_assign_rhs1 (s),
1595 gimple_assign_rhs2 (s),
1596 code2, op2a, op2b)))
1598 /* Handle the AND case, where we are reassociating:
1599 (inner1 AND inner2) AND (op2a code2 op2b)
1600 => (t AND inner2)
1601 If the partial result t is a constant, we win. Otherwise
1602 continue on to try reassociating with the other inner test. */
1603 if (is_and)
1605 if (integer_onep (t))
1606 return inner2;
1607 else if (integer_zerop (t))
1608 return boolean_false_node;
1611 /* Handle the OR case, where we are redistributing:
1612 (inner1 OR inner2) AND (op2a code2 op2b)
1613 => (t OR (inner2 AND (op2a code2 op2b))) */
1614 else if (integer_onep (t))
1615 return boolean_true_node;
1617 /* Save partial result for later. */
1618 partial = t;
1621 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1622 if (TREE_CODE (inner2) == SSA_NAME
1623 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1624 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1625 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1626 gimple_assign_rhs1 (s),
1627 gimple_assign_rhs2 (s),
1628 code2, op2a, op2b)))
1630 /* Handle the AND case, where we are reassociating:
1631 (inner1 AND inner2) AND (op2a code2 op2b)
1632 => (inner1 AND t) */
1633 if (is_and)
1635 if (integer_onep (t))
1636 return inner1;
1637 else if (integer_zerop (t))
1638 return boolean_false_node;
1639 /* If both are the same, we can apply the identity
1640 (x AND x) == x. */
1641 else if (partial && same_bool_result_p (t, partial))
1642 return t;
1645 /* Handle the OR case. where we are redistributing:
1646 (inner1 OR inner2) AND (op2a code2 op2b)
1647 => (t OR (inner1 AND (op2a code2 op2b)))
1648 => (t OR partial) */
1649 else
1651 if (integer_onep (t))
1652 return boolean_true_node;
1653 else if (partial)
1655 /* We already got a simplification for the other
1656 operand to the redistributed OR expression. The
1657 interesting case is when at least one is false.
1658 Or, if both are the same, we can apply the identity
1659 (x OR x) == x. */
1660 if (integer_zerop (partial))
1661 return t;
1662 else if (integer_zerop (t))
1663 return partial;
1664 else if (same_bool_result_p (t, partial))
1665 return t;
1670 return NULL_TREE;
1673 /* Try to simplify the AND of two comparisons defined by
1674 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1675 If this can be done without constructing an intermediate value,
1676 return the resulting tree; otherwise NULL_TREE is returned.
1677 This function is deliberately asymmetric as it recurses on SSA_DEFs
1678 in the first comparison but not the second. */
1680 static tree
1681 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1682 enum tree_code code2, tree op2a, tree op2b)
1684 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1686 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1687 if (operand_equal_p (op1a, op2a, 0)
1688 && operand_equal_p (op1b, op2b, 0))
1690 /* Result will be either NULL_TREE, or a combined comparison. */
1691 tree t = combine_comparisons (UNKNOWN_LOCATION,
1692 TRUTH_ANDIF_EXPR, code1, code2,
1693 truth_type, op1a, op1b);
1694 if (t)
1695 return t;
1698 /* Likewise the swapped case of the above. */
1699 if (operand_equal_p (op1a, op2b, 0)
1700 && operand_equal_p (op1b, op2a, 0))
1702 /* Result will be either NULL_TREE, or a combined comparison. */
1703 tree t = combine_comparisons (UNKNOWN_LOCATION,
1704 TRUTH_ANDIF_EXPR, code1,
1705 swap_tree_comparison (code2),
1706 truth_type, op1a, op1b);
1707 if (t)
1708 return t;
1711 /* If both comparisons are of the same value against constants, we might
1712 be able to merge them. */
1713 if (operand_equal_p (op1a, op2a, 0)
1714 && TREE_CODE (op1b) == INTEGER_CST
1715 && TREE_CODE (op2b) == INTEGER_CST)
1717 int cmp = tree_int_cst_compare (op1b, op2b);
1719 /* If we have (op1a == op1b), we should either be able to
1720 return that or FALSE, depending on whether the constant op1b
1721 also satisfies the other comparison against op2b. */
1722 if (code1 == EQ_EXPR)
1724 bool done = true;
1725 bool val;
1726 switch (code2)
1728 case EQ_EXPR: val = (cmp == 0); break;
1729 case NE_EXPR: val = (cmp != 0); break;
1730 case LT_EXPR: val = (cmp < 0); break;
1731 case GT_EXPR: val = (cmp > 0); break;
1732 case LE_EXPR: val = (cmp <= 0); break;
1733 case GE_EXPR: val = (cmp >= 0); break;
1734 default: done = false;
1736 if (done)
1738 if (val)
1739 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1740 else
1741 return boolean_false_node;
1744 /* Likewise if the second comparison is an == comparison. */
1745 else if (code2 == EQ_EXPR)
1747 bool done = true;
1748 bool val;
1749 switch (code1)
1751 case EQ_EXPR: val = (cmp == 0); break;
1752 case NE_EXPR: val = (cmp != 0); break;
1753 case LT_EXPR: val = (cmp > 0); break;
1754 case GT_EXPR: val = (cmp < 0); break;
1755 case LE_EXPR: val = (cmp >= 0); break;
1756 case GE_EXPR: val = (cmp <= 0); break;
1757 default: done = false;
1759 if (done)
1761 if (val)
1762 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1763 else
1764 return boolean_false_node;
1768 /* Same business with inequality tests. */
1769 else if (code1 == NE_EXPR)
1771 bool val;
1772 switch (code2)
1774 case EQ_EXPR: val = (cmp != 0); break;
1775 case NE_EXPR: val = (cmp == 0); break;
1776 case LT_EXPR: val = (cmp >= 0); break;
1777 case GT_EXPR: val = (cmp <= 0); break;
1778 case LE_EXPR: val = (cmp > 0); break;
1779 case GE_EXPR: val = (cmp < 0); break;
1780 default:
1781 val = false;
1783 if (val)
1784 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1786 else if (code2 == NE_EXPR)
1788 bool val;
1789 switch (code1)
1791 case EQ_EXPR: val = (cmp == 0); break;
1792 case NE_EXPR: val = (cmp != 0); break;
1793 case LT_EXPR: val = (cmp <= 0); break;
1794 case GT_EXPR: val = (cmp >= 0); break;
1795 case LE_EXPR: val = (cmp < 0); break;
1796 case GE_EXPR: val = (cmp > 0); break;
1797 default:
1798 val = false;
1800 if (val)
1801 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1804 /* Chose the more restrictive of two < or <= comparisons. */
1805 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1806 && (code2 == LT_EXPR || code2 == LE_EXPR))
1808 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1809 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1810 else
1811 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1814 /* Likewise chose the more restrictive of two > or >= comparisons. */
1815 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1816 && (code2 == GT_EXPR || code2 == GE_EXPR))
1818 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1819 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1820 else
1821 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1824 /* Check for singleton ranges. */
1825 else if (cmp == 0
1826 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1827 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1828 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1830 /* Check for disjoint ranges. */
1831 else if (cmp <= 0
1832 && (code1 == LT_EXPR || code1 == LE_EXPR)
1833 && (code2 == GT_EXPR || code2 == GE_EXPR))
1834 return boolean_false_node;
1835 else if (cmp >= 0
1836 && (code1 == GT_EXPR || code1 == GE_EXPR)
1837 && (code2 == LT_EXPR || code2 == LE_EXPR))
1838 return boolean_false_node;
1841 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1842 NAME's definition is a truth value. See if there are any simplifications
1843 that can be done against the NAME's definition. */
1844 if (TREE_CODE (op1a) == SSA_NAME
1845 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1846 && (integer_zerop (op1b) || integer_onep (op1b)))
1848 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1849 || (code1 == NE_EXPR && integer_onep (op1b)));
1850 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1851 switch (gimple_code (stmt))
1853 case GIMPLE_ASSIGN:
1854 /* Try to simplify by copy-propagating the definition. */
1855 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1857 case GIMPLE_PHI:
1858 /* If every argument to the PHI produces the same result when
1859 ANDed with the second comparison, we win.
1860 Do not do this unless the type is bool since we need a bool
1861 result here anyway. */
1862 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1864 tree result = NULL_TREE;
1865 unsigned i;
1866 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1868 tree arg = gimple_phi_arg_def (stmt, i);
1870 /* If this PHI has itself as an argument, ignore it.
1871 If all the other args produce the same result,
1872 we're still OK. */
1873 if (arg == gimple_phi_result (stmt))
1874 continue;
1875 else if (TREE_CODE (arg) == INTEGER_CST)
1877 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1879 if (!result)
1880 result = boolean_false_node;
1881 else if (!integer_zerop (result))
1882 return NULL_TREE;
1884 else if (!result)
1885 result = fold_build2 (code2, boolean_type_node,
1886 op2a, op2b);
1887 else if (!same_bool_comparison_p (result,
1888 code2, op2a, op2b))
1889 return NULL_TREE;
1891 else if (TREE_CODE (arg) == SSA_NAME
1892 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1894 tree temp;
1895 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1896 /* In simple cases we can look through PHI nodes,
1897 but we have to be careful with loops.
1898 See PR49073. */
1899 if (! dom_info_available_p (CDI_DOMINATORS)
1900 || gimple_bb (def_stmt) == gimple_bb (stmt)
1901 || dominated_by_p (CDI_DOMINATORS,
1902 gimple_bb (def_stmt),
1903 gimple_bb (stmt)))
1904 return NULL_TREE;
1905 temp = and_var_with_comparison (arg, invert, code2,
1906 op2a, op2b);
1907 if (!temp)
1908 return NULL_TREE;
1909 else if (!result)
1910 result = temp;
1911 else if (!same_bool_result_p (result, temp))
1912 return NULL_TREE;
1914 else
1915 return NULL_TREE;
1917 return result;
1920 default:
1921 break;
1924 return NULL_TREE;
1927 /* Try to simplify the AND of two comparisons, specified by
1928 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1929 If this can be simplified to a single expression (without requiring
1930 introducing more SSA variables to hold intermediate values),
1931 return the resulting tree. Otherwise return NULL_TREE.
1932 If the result expression is non-null, it has boolean type. */
1934 tree
1935 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1936 enum tree_code code2, tree op2a, tree op2b)
1938 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1939 if (t)
1940 return t;
1941 else
1942 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1945 /* Helper function for or_comparisons_1: try to simplify the OR of the
1946 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1947 If INVERT is true, invert the value of VAR before doing the OR.
1948 Return NULL_EXPR if we can't simplify this to a single expression. */
1950 static tree
1951 or_var_with_comparison (tree var, bool invert,
1952 enum tree_code code2, tree op2a, tree op2b)
1954 tree t;
1955 gimple stmt = SSA_NAME_DEF_STMT (var);
1957 /* We can only deal with variables whose definitions are assignments. */
1958 if (!is_gimple_assign (stmt))
1959 return NULL_TREE;
1961 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1962 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1963 Then we only have to consider the simpler non-inverted cases. */
1964 if (invert)
1965 t = and_var_with_comparison_1 (stmt,
1966 invert_tree_comparison (code2, false),
1967 op2a, op2b);
1968 else
1969 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1970 return canonicalize_bool (t, invert);
1973 /* Try to simplify the OR of the ssa variable defined by the assignment
1974 STMT with the comparison specified by (OP2A CODE2 OP2B).
1975 Return NULL_EXPR if we can't simplify this to a single expression. */
1977 static tree
1978 or_var_with_comparison_1 (gimple stmt,
1979 enum tree_code code2, tree op2a, tree op2b)
1981 tree var = gimple_assign_lhs (stmt);
1982 tree true_test_var = NULL_TREE;
1983 tree false_test_var = NULL_TREE;
1984 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1986 /* Check for identities like (var OR (var != 0)) => true . */
1987 if (TREE_CODE (op2a) == SSA_NAME
1988 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1990 if ((code2 == NE_EXPR && integer_zerop (op2b))
1991 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1993 true_test_var = op2a;
1994 if (var == true_test_var)
1995 return var;
1997 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1998 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2000 false_test_var = op2a;
2001 if (var == false_test_var)
2002 return boolean_true_node;
2006 /* If the definition is a comparison, recurse on it. */
2007 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2009 tree t = or_comparisons_1 (innercode,
2010 gimple_assign_rhs1 (stmt),
2011 gimple_assign_rhs2 (stmt),
2012 code2,
2013 op2a,
2014 op2b);
2015 if (t)
2016 return t;
2019 /* If the definition is an AND or OR expression, we may be able to
2020 simplify by reassociating. */
2021 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2022 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2024 tree inner1 = gimple_assign_rhs1 (stmt);
2025 tree inner2 = gimple_assign_rhs2 (stmt);
2026 gimple s;
2027 tree t;
2028 tree partial = NULL_TREE;
2029 bool is_or = (innercode == BIT_IOR_EXPR);
2031 /* Check for boolean identities that don't require recursive examination
2032 of inner1/inner2:
2033 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2034 inner1 OR (inner1 AND inner2) => inner1
2035 !inner1 OR (inner1 OR inner2) => true
2036 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2038 if (inner1 == true_test_var)
2039 return (is_or ? var : inner1);
2040 else if (inner2 == true_test_var)
2041 return (is_or ? var : inner2);
2042 else if (inner1 == false_test_var)
2043 return (is_or
2044 ? boolean_true_node
2045 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2046 else if (inner2 == false_test_var)
2047 return (is_or
2048 ? boolean_true_node
2049 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2051 /* Next, redistribute/reassociate the OR across the inner tests.
2052 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2053 if (TREE_CODE (inner1) == SSA_NAME
2054 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2055 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2056 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2057 gimple_assign_rhs1 (s),
2058 gimple_assign_rhs2 (s),
2059 code2, op2a, op2b)))
2061 /* Handle the OR case, where we are reassociating:
2062 (inner1 OR inner2) OR (op2a code2 op2b)
2063 => (t OR inner2)
2064 If the partial result t is a constant, we win. Otherwise
2065 continue on to try reassociating with the other inner test. */
2066 if (is_or)
2068 if (integer_onep (t))
2069 return boolean_true_node;
2070 else if (integer_zerop (t))
2071 return inner2;
2074 /* Handle the AND case, where we are redistributing:
2075 (inner1 AND inner2) OR (op2a code2 op2b)
2076 => (t AND (inner2 OR (op2a code op2b))) */
2077 else if (integer_zerop (t))
2078 return boolean_false_node;
2080 /* Save partial result for later. */
2081 partial = t;
2084 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2085 if (TREE_CODE (inner2) == SSA_NAME
2086 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2087 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2088 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2089 gimple_assign_rhs1 (s),
2090 gimple_assign_rhs2 (s),
2091 code2, op2a, op2b)))
2093 /* Handle the OR case, where we are reassociating:
2094 (inner1 OR inner2) OR (op2a code2 op2b)
2095 => (inner1 OR t)
2096 => (t OR partial) */
2097 if (is_or)
2099 if (integer_zerop (t))
2100 return inner1;
2101 else if (integer_onep (t))
2102 return boolean_true_node;
2103 /* If both are the same, we can apply the identity
2104 (x OR x) == x. */
2105 else if (partial && same_bool_result_p (t, partial))
2106 return t;
2109 /* Handle the AND case, where we are redistributing:
2110 (inner1 AND inner2) OR (op2a code2 op2b)
2111 => (t AND (inner1 OR (op2a code2 op2b)))
2112 => (t AND partial) */
2113 else
2115 if (integer_zerop (t))
2116 return boolean_false_node;
2117 else if (partial)
2119 /* We already got a simplification for the other
2120 operand to the redistributed AND expression. The
2121 interesting case is when at least one is true.
2122 Or, if both are the same, we can apply the identity
2123 (x AND x) == x. */
2124 if (integer_onep (partial))
2125 return t;
2126 else if (integer_onep (t))
2127 return partial;
2128 else if (same_bool_result_p (t, partial))
2129 return t;
2134 return NULL_TREE;
2137 /* Try to simplify the OR of two comparisons defined by
2138 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2139 If this can be done without constructing an intermediate value,
2140 return the resulting tree; otherwise NULL_TREE is returned.
2141 This function is deliberately asymmetric as it recurses on SSA_DEFs
2142 in the first comparison but not the second. */
2144 static tree
2145 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2146 enum tree_code code2, tree op2a, tree op2b)
2148 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2150 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2151 if (operand_equal_p (op1a, op2a, 0)
2152 && operand_equal_p (op1b, op2b, 0))
2154 /* Result will be either NULL_TREE, or a combined comparison. */
2155 tree t = combine_comparisons (UNKNOWN_LOCATION,
2156 TRUTH_ORIF_EXPR, code1, code2,
2157 truth_type, op1a, op1b);
2158 if (t)
2159 return t;
2162 /* Likewise the swapped case of the above. */
2163 if (operand_equal_p (op1a, op2b, 0)
2164 && operand_equal_p (op1b, op2a, 0))
2166 /* Result will be either NULL_TREE, or a combined comparison. */
2167 tree t = combine_comparisons (UNKNOWN_LOCATION,
2168 TRUTH_ORIF_EXPR, code1,
2169 swap_tree_comparison (code2),
2170 truth_type, op1a, op1b);
2171 if (t)
2172 return t;
2175 /* If both comparisons are of the same value against constants, we might
2176 be able to merge them. */
2177 if (operand_equal_p (op1a, op2a, 0)
2178 && TREE_CODE (op1b) == INTEGER_CST
2179 && TREE_CODE (op2b) == INTEGER_CST)
2181 int cmp = tree_int_cst_compare (op1b, op2b);
2183 /* If we have (op1a != op1b), we should either be able to
2184 return that or TRUE, depending on whether the constant op1b
2185 also satisfies the other comparison against op2b. */
2186 if (code1 == NE_EXPR)
2188 bool done = true;
2189 bool val;
2190 switch (code2)
2192 case EQ_EXPR: val = (cmp == 0); break;
2193 case NE_EXPR: val = (cmp != 0); break;
2194 case LT_EXPR: val = (cmp < 0); break;
2195 case GT_EXPR: val = (cmp > 0); break;
2196 case LE_EXPR: val = (cmp <= 0); break;
2197 case GE_EXPR: val = (cmp >= 0); break;
2198 default: done = false;
2200 if (done)
2202 if (val)
2203 return boolean_true_node;
2204 else
2205 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2208 /* Likewise if the second comparison is a != comparison. */
2209 else if (code2 == NE_EXPR)
2211 bool done = true;
2212 bool val;
2213 switch (code1)
2215 case EQ_EXPR: val = (cmp == 0); break;
2216 case NE_EXPR: val = (cmp != 0); break;
2217 case LT_EXPR: val = (cmp > 0); break;
2218 case GT_EXPR: val = (cmp < 0); break;
2219 case LE_EXPR: val = (cmp >= 0); break;
2220 case GE_EXPR: val = (cmp <= 0); break;
2221 default: done = false;
2223 if (done)
2225 if (val)
2226 return boolean_true_node;
2227 else
2228 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2232 /* See if an equality test is redundant with the other comparison. */
2233 else if (code1 == EQ_EXPR)
2235 bool val;
2236 switch (code2)
2238 case EQ_EXPR: val = (cmp == 0); break;
2239 case NE_EXPR: val = (cmp != 0); break;
2240 case LT_EXPR: val = (cmp < 0); break;
2241 case GT_EXPR: val = (cmp > 0); break;
2242 case LE_EXPR: val = (cmp <= 0); break;
2243 case GE_EXPR: val = (cmp >= 0); break;
2244 default:
2245 val = false;
2247 if (val)
2248 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2250 else if (code2 == EQ_EXPR)
2252 bool val;
2253 switch (code1)
2255 case EQ_EXPR: val = (cmp == 0); break;
2256 case NE_EXPR: val = (cmp != 0); break;
2257 case LT_EXPR: val = (cmp > 0); break;
2258 case GT_EXPR: val = (cmp < 0); break;
2259 case LE_EXPR: val = (cmp >= 0); break;
2260 case GE_EXPR: val = (cmp <= 0); break;
2261 default:
2262 val = false;
2264 if (val)
2265 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2268 /* Chose the less restrictive of two < or <= comparisons. */
2269 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2270 && (code2 == LT_EXPR || code2 == LE_EXPR))
2272 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2273 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2274 else
2275 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2278 /* Likewise chose the less restrictive of two > or >= comparisons. */
2279 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2280 && (code2 == GT_EXPR || code2 == GE_EXPR))
2282 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2283 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2284 else
2285 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2288 /* Check for singleton ranges. */
2289 else if (cmp == 0
2290 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2291 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2292 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2294 /* Check for less/greater pairs that don't restrict the range at all. */
2295 else if (cmp >= 0
2296 && (code1 == LT_EXPR || code1 == LE_EXPR)
2297 && (code2 == GT_EXPR || code2 == GE_EXPR))
2298 return boolean_true_node;
2299 else if (cmp <= 0
2300 && (code1 == GT_EXPR || code1 == GE_EXPR)
2301 && (code2 == LT_EXPR || code2 == LE_EXPR))
2302 return boolean_true_node;
2305 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2306 NAME's definition is a truth value. See if there are any simplifications
2307 that can be done against the NAME's definition. */
2308 if (TREE_CODE (op1a) == SSA_NAME
2309 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2310 && (integer_zerop (op1b) || integer_onep (op1b)))
2312 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2313 || (code1 == NE_EXPR && integer_onep (op1b)));
2314 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2315 switch (gimple_code (stmt))
2317 case GIMPLE_ASSIGN:
2318 /* Try to simplify by copy-propagating the definition. */
2319 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2321 case GIMPLE_PHI:
2322 /* If every argument to the PHI produces the same result when
2323 ORed with the second comparison, we win.
2324 Do not do this unless the type is bool since we need a bool
2325 result here anyway. */
2326 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2328 tree result = NULL_TREE;
2329 unsigned i;
2330 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2332 tree arg = gimple_phi_arg_def (stmt, i);
2334 /* If this PHI has itself as an argument, ignore it.
2335 If all the other args produce the same result,
2336 we're still OK. */
2337 if (arg == gimple_phi_result (stmt))
2338 continue;
2339 else if (TREE_CODE (arg) == INTEGER_CST)
2341 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2343 if (!result)
2344 result = boolean_true_node;
2345 else if (!integer_onep (result))
2346 return NULL_TREE;
2348 else if (!result)
2349 result = fold_build2 (code2, boolean_type_node,
2350 op2a, op2b);
2351 else if (!same_bool_comparison_p (result,
2352 code2, op2a, op2b))
2353 return NULL_TREE;
2355 else if (TREE_CODE (arg) == SSA_NAME
2356 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2358 tree temp;
2359 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2360 /* In simple cases we can look through PHI nodes,
2361 but we have to be careful with loops.
2362 See PR49073. */
2363 if (! dom_info_available_p (CDI_DOMINATORS)
2364 || gimple_bb (def_stmt) == gimple_bb (stmt)
2365 || dominated_by_p (CDI_DOMINATORS,
2366 gimple_bb (def_stmt),
2367 gimple_bb (stmt)))
2368 return NULL_TREE;
2369 temp = or_var_with_comparison (arg, invert, code2,
2370 op2a, op2b);
2371 if (!temp)
2372 return NULL_TREE;
2373 else if (!result)
2374 result = temp;
2375 else if (!same_bool_result_p (result, temp))
2376 return NULL_TREE;
2378 else
2379 return NULL_TREE;
2381 return result;
2384 default:
2385 break;
2388 return NULL_TREE;
2391 /* Try to simplify the OR of two comparisons, specified by
2392 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2393 If this can be simplified to a single expression (without requiring
2394 introducing more SSA variables to hold intermediate values),
2395 return the resulting tree. Otherwise return NULL_TREE.
2396 If the result expression is non-null, it has boolean type. */
2398 tree
2399 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2400 enum tree_code code2, tree op2a, tree op2b)
2402 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2403 if (t)
2404 return t;
2405 else
2406 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2410 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2412 Either NULL_TREE, a simplified but non-constant or a constant
2413 is returned.
2415 ??? This should go into a gimple-fold-inline.h file to be eventually
2416 privatized with the single valueize function used in the various TUs
2417 to avoid the indirect function call overhead. */
2419 tree
2420 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2422 location_t loc = gimple_location (stmt);
2423 switch (gimple_code (stmt))
2425 case GIMPLE_ASSIGN:
2427 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2429 switch (get_gimple_rhs_class (subcode))
2431 case GIMPLE_SINGLE_RHS:
2433 tree rhs = gimple_assign_rhs1 (stmt);
2434 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2436 if (TREE_CODE (rhs) == SSA_NAME)
2438 /* If the RHS is an SSA_NAME, return its known constant value,
2439 if any. */
2440 return (*valueize) (rhs);
2442 /* Handle propagating invariant addresses into address
2443 operations. */
2444 else if (TREE_CODE (rhs) == ADDR_EXPR
2445 && !is_gimple_min_invariant (rhs))
2447 HOST_WIDE_INT offset = 0;
2448 tree base;
2449 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2450 &offset,
2451 valueize);
2452 if (base
2453 && (CONSTANT_CLASS_P (base)
2454 || decl_address_invariant_p (base)))
2455 return build_invariant_address (TREE_TYPE (rhs),
2456 base, offset);
2458 else if (TREE_CODE (rhs) == CONSTRUCTOR
2459 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2460 && (CONSTRUCTOR_NELTS (rhs)
2461 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2463 unsigned i;
2464 tree val, *vec;
2466 vec = XALLOCAVEC (tree,
2467 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2468 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2470 val = (*valueize) (val);
2471 if (TREE_CODE (val) == INTEGER_CST
2472 || TREE_CODE (val) == REAL_CST
2473 || TREE_CODE (val) == FIXED_CST)
2474 vec[i] = val;
2475 else
2476 return NULL_TREE;
2479 return build_vector (TREE_TYPE (rhs), vec);
2482 if (kind == tcc_reference)
2484 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2485 || TREE_CODE (rhs) == REALPART_EXPR
2486 || TREE_CODE (rhs) == IMAGPART_EXPR)
2487 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2489 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2490 return fold_unary_loc (EXPR_LOCATION (rhs),
2491 TREE_CODE (rhs),
2492 TREE_TYPE (rhs), val);
2494 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2495 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2497 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2498 return fold_ternary_loc (EXPR_LOCATION (rhs),
2499 TREE_CODE (rhs),
2500 TREE_TYPE (rhs), val,
2501 TREE_OPERAND (rhs, 1),
2502 TREE_OPERAND (rhs, 2));
2504 else if (TREE_CODE (rhs) == MEM_REF
2505 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2507 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2508 if (TREE_CODE (val) == ADDR_EXPR
2509 && is_gimple_min_invariant (val))
2511 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2512 unshare_expr (val),
2513 TREE_OPERAND (rhs, 1));
2514 if (tem)
2515 rhs = tem;
2518 return fold_const_aggregate_ref_1 (rhs, valueize);
2520 else if (kind == tcc_declaration)
2521 return get_symbol_constant_value (rhs);
2522 return rhs;
2525 case GIMPLE_UNARY_RHS:
2527 /* Handle unary operators that can appear in GIMPLE form.
2528 Note that we know the single operand must be a constant,
2529 so this should almost always return a simplified RHS. */
2530 tree lhs = gimple_assign_lhs (stmt);
2531 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2533 /* Conversions are useless for CCP purposes if they are
2534 value-preserving. Thus the restrictions that
2535 useless_type_conversion_p places for restrict qualification
2536 of pointer types should not apply here.
2537 Substitution later will only substitute to allowed places. */
2538 if (CONVERT_EXPR_CODE_P (subcode)
2539 && POINTER_TYPE_P (TREE_TYPE (lhs))
2540 && POINTER_TYPE_P (TREE_TYPE (op0))
2541 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2542 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2543 && TYPE_MODE (TREE_TYPE (lhs))
2544 == TYPE_MODE (TREE_TYPE (op0)))
2545 return op0;
2547 return
2548 fold_unary_ignore_overflow_loc (loc, subcode,
2549 gimple_expr_type (stmt), op0);
2552 case GIMPLE_BINARY_RHS:
2554 /* Handle binary operators that can appear in GIMPLE form. */
2555 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2556 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2558 /* Translate &x + CST into an invariant form suitable for
2559 further propagation. */
2560 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2561 && TREE_CODE (op0) == ADDR_EXPR
2562 && TREE_CODE (op1) == INTEGER_CST)
2564 tree off = fold_convert (ptr_type_node, op1);
2565 return build_fold_addr_expr_loc
2566 (loc,
2567 fold_build2 (MEM_REF,
2568 TREE_TYPE (TREE_TYPE (op0)),
2569 unshare_expr (op0), off));
2572 return fold_binary_loc (loc, subcode,
2573 gimple_expr_type (stmt), op0, op1);
2576 case GIMPLE_TERNARY_RHS:
2578 /* Handle ternary operators that can appear in GIMPLE form. */
2579 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2580 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2581 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2583 /* Fold embedded expressions in ternary codes. */
2584 if ((subcode == COND_EXPR
2585 || subcode == VEC_COND_EXPR)
2586 && COMPARISON_CLASS_P (op0))
2588 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2589 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2590 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2591 TREE_TYPE (op0), op00, op01);
2592 if (tem)
2593 op0 = tem;
2596 return fold_ternary_loc (loc, subcode,
2597 gimple_expr_type (stmt), op0, op1, op2);
2600 default:
2601 gcc_unreachable ();
2605 case GIMPLE_CALL:
2607 tree fn;
2609 if (gimple_call_internal_p (stmt))
2610 /* No folding yet for these functions. */
2611 return NULL_TREE;
2613 fn = (*valueize) (gimple_call_fn (stmt));
2614 if (TREE_CODE (fn) == ADDR_EXPR
2615 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2616 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2618 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2619 tree call, retval;
2620 unsigned i;
2621 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2622 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2623 call = build_call_array_loc (loc,
2624 gimple_call_return_type (stmt),
2625 fn, gimple_call_num_args (stmt), args);
2626 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2627 if (retval)
2628 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2629 STRIP_NOPS (retval);
2630 return retval;
2632 return NULL_TREE;
2635 default:
2636 return NULL_TREE;
2640 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2641 Returns NULL_TREE if folding to a constant is not possible, otherwise
2642 returns a constant according to is_gimple_min_invariant. */
2644 tree
2645 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2647 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2648 if (res && is_gimple_min_invariant (res))
2649 return res;
2650 return NULL_TREE;
2654 /* The following set of functions are supposed to fold references using
2655 their constant initializers. */
2657 static tree fold_ctor_reference (tree type, tree ctor,
2658 unsigned HOST_WIDE_INT offset,
2659 unsigned HOST_WIDE_INT size, tree);
2661 /* See if we can find constructor defining value of BASE.
2662 When we know the consructor with constant offset (such as
2663 base is array[40] and we do know constructor of array), then
2664 BIT_OFFSET is adjusted accordingly.
2666 As a special case, return error_mark_node when constructor
2667 is not explicitly available, but it is known to be zero
2668 such as 'static const int a;'. */
2669 static tree
2670 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2671 tree (*valueize)(tree))
2673 HOST_WIDE_INT bit_offset2, size, max_size;
2674 if (TREE_CODE (base) == MEM_REF)
2676 if (!integer_zerop (TREE_OPERAND (base, 1)))
2678 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2679 return NULL_TREE;
2680 *bit_offset += (mem_ref_offset (base).low
2681 * BITS_PER_UNIT);
2684 if (valueize
2685 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2686 base = valueize (TREE_OPERAND (base, 0));
2687 if (!base || TREE_CODE (base) != ADDR_EXPR)
2688 return NULL_TREE;
2689 base = TREE_OPERAND (base, 0);
2692 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2693 DECL_INITIAL. If BASE is a nested reference into another
2694 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2695 the inner reference. */
2696 switch (TREE_CODE (base))
2698 case VAR_DECL:
2699 case CONST_DECL:
2701 tree init = ctor_for_folding (base);
2703 /* Our semantic is exact opposite of ctor_for_folding;
2704 NULL means unknown, while error_mark_node is 0. */
2705 if (init == error_mark_node)
2706 return NULL_TREE;
2707 if (!init)
2708 return error_mark_node;
2709 return init;
2712 case ARRAY_REF:
2713 case COMPONENT_REF:
2714 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2715 if (max_size == -1 || size != max_size)
2716 return NULL_TREE;
2717 *bit_offset += bit_offset2;
2718 return get_base_constructor (base, bit_offset, valueize);
2720 case STRING_CST:
2721 case CONSTRUCTOR:
2722 return base;
2724 default:
2725 return NULL_TREE;
2729 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2730 to the memory at bit OFFSET.
2732 We do only simple job of folding byte accesses. */
2734 static tree
2735 fold_string_cst_ctor_reference (tree type, tree ctor,
2736 unsigned HOST_WIDE_INT offset,
2737 unsigned HOST_WIDE_INT size)
2739 if (INTEGRAL_TYPE_P (type)
2740 && (TYPE_MODE (type)
2741 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2742 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2743 == MODE_INT)
2744 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2745 && size == BITS_PER_UNIT
2746 && !(offset % BITS_PER_UNIT))
2748 offset /= BITS_PER_UNIT;
2749 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2750 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2751 [offset]));
2752 /* Folding
2753 const char a[20]="hello";
2754 return a[10];
2756 might lead to offset greater than string length. In this case we
2757 know value is either initialized to 0 or out of bounds. Return 0
2758 in both cases. */
2759 return build_zero_cst (type);
2761 return NULL_TREE;
2764 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2765 SIZE to the memory at bit OFFSET. */
2767 static tree
2768 fold_array_ctor_reference (tree type, tree ctor,
2769 unsigned HOST_WIDE_INT offset,
2770 unsigned HOST_WIDE_INT size,
2771 tree from_decl)
2773 unsigned HOST_WIDE_INT cnt;
2774 tree cfield, cval;
2775 double_int low_bound, elt_size;
2776 double_int index, max_index;
2777 double_int access_index;
2778 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2779 HOST_WIDE_INT inner_offset;
2781 /* Compute low bound and elt size. */
2782 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2783 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2784 if (domain_type && TYPE_MIN_VALUE (domain_type))
2786 /* Static constructors for variably sized objects makes no sense. */
2787 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2788 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2789 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2791 else
2792 low_bound = double_int_zero;
2793 /* Static constructors for variably sized objects makes no sense. */
2794 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2795 == INTEGER_CST);
2796 elt_size =
2797 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2800 /* We can handle only constantly sized accesses that are known to not
2801 be larger than size of array element. */
2802 if (!TYPE_SIZE_UNIT (type)
2803 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2804 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2805 return NULL_TREE;
2807 /* Compute the array index we look for. */
2808 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2809 .udiv (elt_size, TRUNC_DIV_EXPR);
2810 access_index += low_bound;
2811 if (index_type)
2812 access_index = access_index.ext (TYPE_PRECISION (index_type),
2813 TYPE_UNSIGNED (index_type));
2815 /* And offset within the access. */
2816 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2818 /* See if the array field is large enough to span whole access. We do not
2819 care to fold accesses spanning multiple array indexes. */
2820 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2821 return NULL_TREE;
2823 index = low_bound - double_int_one;
2824 if (index_type)
2825 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2827 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2829 /* Array constructor might explicitely set index, or specify range
2830 or leave index NULL meaning that it is next index after previous
2831 one. */
2832 if (cfield)
2834 if (TREE_CODE (cfield) == INTEGER_CST)
2835 max_index = index = tree_to_double_int (cfield);
2836 else
2838 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2839 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2840 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2843 else
2845 index += double_int_one;
2846 if (index_type)
2847 index = index.ext (TYPE_PRECISION (index_type),
2848 TYPE_UNSIGNED (index_type));
2849 max_index = index;
2852 /* Do we have match? */
2853 if (access_index.cmp (index, 1) >= 0
2854 && access_index.cmp (max_index, 1) <= 0)
2855 return fold_ctor_reference (type, cval, inner_offset, size,
2856 from_decl);
2858 /* When memory is not explicitely mentioned in constructor,
2859 it is 0 (or out of range). */
2860 return build_zero_cst (type);
2863 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2864 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2866 static tree
2867 fold_nonarray_ctor_reference (tree type, tree ctor,
2868 unsigned HOST_WIDE_INT offset,
2869 unsigned HOST_WIDE_INT size,
2870 tree from_decl)
2872 unsigned HOST_WIDE_INT cnt;
2873 tree cfield, cval;
2875 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2876 cval)
2878 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2879 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2880 tree field_size = DECL_SIZE (cfield);
2881 double_int bitoffset;
2882 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2883 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2884 double_int bitoffset_end, access_end;
2886 /* Variable sized objects in static constructors makes no sense,
2887 but field_size can be NULL for flexible array members. */
2888 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2889 && TREE_CODE (byte_offset) == INTEGER_CST
2890 && (field_size != NULL_TREE
2891 ? TREE_CODE (field_size) == INTEGER_CST
2892 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2894 /* Compute bit offset of the field. */
2895 bitoffset = tree_to_double_int (field_offset)
2896 + byte_offset_cst * bits_per_unit_cst;
2897 /* Compute bit offset where the field ends. */
2898 if (field_size != NULL_TREE)
2899 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2900 else
2901 bitoffset_end = double_int_zero;
2903 access_end = double_int::from_uhwi (offset)
2904 + double_int::from_uhwi (size);
2906 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2907 [BITOFFSET, BITOFFSET_END)? */
2908 if (access_end.cmp (bitoffset, 0) > 0
2909 && (field_size == NULL_TREE
2910 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2912 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2913 /* We do have overlap. Now see if field is large enough to
2914 cover the access. Give up for accesses spanning multiple
2915 fields. */
2916 if (access_end.cmp (bitoffset_end, 0) > 0)
2917 return NULL_TREE;
2918 if (double_int::from_uhwi (offset).slt (bitoffset))
2919 return NULL_TREE;
2920 return fold_ctor_reference (type, cval,
2921 inner_offset.to_uhwi (), size,
2922 from_decl);
2925 /* When memory is not explicitely mentioned in constructor, it is 0. */
2926 return build_zero_cst (type);
2929 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2930 to the memory at bit OFFSET. */
2932 static tree
2933 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2934 unsigned HOST_WIDE_INT size, tree from_decl)
2936 tree ret;
2938 /* We found the field with exact match. */
2939 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2940 && !offset)
2941 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2943 /* We are at the end of walk, see if we can view convert the
2944 result. */
2945 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2946 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2947 && operand_equal_p (TYPE_SIZE (type),
2948 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2950 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2951 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2952 if (ret)
2953 STRIP_NOPS (ret);
2954 return ret;
2956 if (TREE_CODE (ctor) == STRING_CST)
2957 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2958 if (TREE_CODE (ctor) == CONSTRUCTOR)
2961 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2962 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2963 return fold_array_ctor_reference (type, ctor, offset, size,
2964 from_decl);
2965 else
2966 return fold_nonarray_ctor_reference (type, ctor, offset, size,
2967 from_decl);
2970 return NULL_TREE;
2973 /* Return the tree representing the element referenced by T if T is an
2974 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2975 names using VALUEIZE. Return NULL_TREE otherwise. */
2977 tree
2978 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2980 tree ctor, idx, base;
2981 HOST_WIDE_INT offset, size, max_size;
2982 tree tem;
2984 if (TREE_THIS_VOLATILE (t))
2985 return NULL_TREE;
2987 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2988 return get_symbol_constant_value (t);
2990 tem = fold_read_from_constant_string (t);
2991 if (tem)
2992 return tem;
2994 switch (TREE_CODE (t))
2996 case ARRAY_REF:
2997 case ARRAY_RANGE_REF:
2998 /* Constant indexes are handled well by get_base_constructor.
2999 Only special case variable offsets.
3000 FIXME: This code can't handle nested references with variable indexes
3001 (they will be handled only by iteration of ccp). Perhaps we can bring
3002 get_ref_base_and_extent here and make it use a valueize callback. */
3003 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3004 && valueize
3005 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3006 && TREE_CODE (idx) == INTEGER_CST)
3008 tree low_bound, unit_size;
3009 double_int doffset;
3011 /* If the resulting bit-offset is constant, track it. */
3012 if ((low_bound = array_ref_low_bound (t),
3013 TREE_CODE (low_bound) == INTEGER_CST)
3014 && (unit_size = array_ref_element_size (t),
3015 host_integerp (unit_size, 1))
3016 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3017 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3018 doffset.fits_shwi ()))
3020 offset = doffset.to_shwi ();
3021 offset *= TREE_INT_CST_LOW (unit_size);
3022 offset *= BITS_PER_UNIT;
3024 base = TREE_OPERAND (t, 0);
3025 ctor = get_base_constructor (base, &offset, valueize);
3026 /* Empty constructor. Always fold to 0. */
3027 if (ctor == error_mark_node)
3028 return build_zero_cst (TREE_TYPE (t));
3029 /* Out of bound array access. Value is undefined,
3030 but don't fold. */
3031 if (offset < 0)
3032 return NULL_TREE;
3033 /* We can not determine ctor. */
3034 if (!ctor)
3035 return NULL_TREE;
3036 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3037 TREE_INT_CST_LOW (unit_size)
3038 * BITS_PER_UNIT,
3039 base);
3042 /* Fallthru. */
3044 case COMPONENT_REF:
3045 case BIT_FIELD_REF:
3046 case TARGET_MEM_REF:
3047 case MEM_REF:
3048 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3049 ctor = get_base_constructor (base, &offset, valueize);
3051 /* Empty constructor. Always fold to 0. */
3052 if (ctor == error_mark_node)
3053 return build_zero_cst (TREE_TYPE (t));
3054 /* We do not know precise address. */
3055 if (max_size == -1 || max_size != size)
3056 return NULL_TREE;
3057 /* We can not determine ctor. */
3058 if (!ctor)
3059 return NULL_TREE;
3061 /* Out of bound array access. Value is undefined, but don't fold. */
3062 if (offset < 0)
3063 return NULL_TREE;
3065 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3066 base);
3068 case REALPART_EXPR:
3069 case IMAGPART_EXPR:
3071 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3072 if (c && TREE_CODE (c) == COMPLEX_CST)
3073 return fold_build1_loc (EXPR_LOCATION (t),
3074 TREE_CODE (t), TREE_TYPE (t), c);
3075 break;
3078 default:
3079 break;
3082 return NULL_TREE;
3085 tree
3086 fold_const_aggregate_ref (tree t)
3088 return fold_const_aggregate_ref_1 (t, NULL);
3091 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3092 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3093 KNOWN_BINFO carries the binfo describing the true type of
3094 OBJ_TYPE_REF_OBJECT(REF). */
3096 tree
3097 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3099 unsigned HOST_WIDE_INT offset, size;
3100 tree v, fn, vtable, init;
3102 vtable = v = BINFO_VTABLE (known_binfo);
3103 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3104 if (!v)
3105 return NULL_TREE;
3107 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3109 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3110 v = TREE_OPERAND (v, 0);
3112 else
3113 offset = 0;
3115 if (TREE_CODE (v) != ADDR_EXPR)
3116 return NULL_TREE;
3117 v = TREE_OPERAND (v, 0);
3119 if (TREE_CODE (v) != VAR_DECL
3120 || !DECL_VIRTUAL_P (v))
3121 return NULL_TREE;
3122 init = ctor_for_folding (v);
3124 /* The virtual tables should always be born with constructors.
3125 and we always should assume that they are avaialble for
3126 folding. At the moment we do not stream them in all cases,
3127 but it should never happen that ctor seem unreachable. */
3128 gcc_assert (init);
3129 if (init == error_mark_node)
3131 gcc_assert (in_lto_p);
3132 return NULL_TREE;
3134 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3135 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3136 offset += token * size;
3137 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3138 offset, size, v);
3139 if (!fn || integer_zerop (fn))
3140 return NULL_TREE;
3141 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3142 || TREE_CODE (fn) == FDESC_EXPR);
3143 fn = TREE_OPERAND (fn, 0);
3144 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3146 /* When cgraph node is missing and function is not public, we cannot
3147 devirtualize. This can happen in WHOPR when the actual method
3148 ends up in other partition, because we found devirtualization
3149 possibility too late. */
3150 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3151 return NULL_TREE;
3153 /* Make sure we create a cgraph node for functions we'll reference.
3154 They can be non-existent if the reference comes from an entry
3155 of an external vtable for example. */
3156 cgraph_get_create_node (fn);
3158 return fn;
3161 /* Return true iff VAL is a gimple expression that is known to be
3162 non-negative. Restricted to floating-point inputs. */
3164 bool
3165 gimple_val_nonnegative_real_p (tree val)
3167 gimple def_stmt;
3169 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3171 /* Use existing logic for non-gimple trees. */
3172 if (tree_expr_nonnegative_p (val))
3173 return true;
3175 if (TREE_CODE (val) != SSA_NAME)
3176 return false;
3178 /* Currently we look only at the immediately defining statement
3179 to make this determination, since recursion on defining
3180 statements of operands can lead to quadratic behavior in the
3181 worst case. This is expected to catch almost all occurrences
3182 in practice. It would be possible to implement limited-depth
3183 recursion if important cases are lost. Alternatively, passes
3184 that need this information (such as the pow/powi lowering code
3185 in the cse_sincos pass) could be revised to provide it through
3186 dataflow propagation. */
3188 def_stmt = SSA_NAME_DEF_STMT (val);
3190 if (is_gimple_assign (def_stmt))
3192 tree op0, op1;
3194 /* See fold-const.c:tree_expr_nonnegative_p for additional
3195 cases that could be handled with recursion. */
3197 switch (gimple_assign_rhs_code (def_stmt))
3199 case ABS_EXPR:
3200 /* Always true for floating-point operands. */
3201 return true;
3203 case MULT_EXPR:
3204 /* True if the two operands are identical (since we are
3205 restricted to floating-point inputs). */
3206 op0 = gimple_assign_rhs1 (def_stmt);
3207 op1 = gimple_assign_rhs2 (def_stmt);
3209 if (op0 == op1
3210 || operand_equal_p (op0, op1, 0))
3211 return true;
3213 default:
3214 return false;
3217 else if (is_gimple_call (def_stmt))
3219 tree fndecl = gimple_call_fndecl (def_stmt);
3220 if (fndecl
3221 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3223 tree arg1;
3225 switch (DECL_FUNCTION_CODE (fndecl))
3227 CASE_FLT_FN (BUILT_IN_ACOS):
3228 CASE_FLT_FN (BUILT_IN_ACOSH):
3229 CASE_FLT_FN (BUILT_IN_CABS):
3230 CASE_FLT_FN (BUILT_IN_COSH):
3231 CASE_FLT_FN (BUILT_IN_ERFC):
3232 CASE_FLT_FN (BUILT_IN_EXP):
3233 CASE_FLT_FN (BUILT_IN_EXP10):
3234 CASE_FLT_FN (BUILT_IN_EXP2):
3235 CASE_FLT_FN (BUILT_IN_FABS):
3236 CASE_FLT_FN (BUILT_IN_FDIM):
3237 CASE_FLT_FN (BUILT_IN_HYPOT):
3238 CASE_FLT_FN (BUILT_IN_POW10):
3239 return true;
3241 CASE_FLT_FN (BUILT_IN_SQRT):
3242 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3243 nonnegative inputs. */
3244 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3245 return true;
3247 break;
3249 CASE_FLT_FN (BUILT_IN_POWI):
3250 /* True if the second argument is an even integer. */
3251 arg1 = gimple_call_arg (def_stmt, 1);
3253 if (TREE_CODE (arg1) == INTEGER_CST
3254 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3255 return true;
3257 break;
3259 CASE_FLT_FN (BUILT_IN_POW):
3260 /* True if the second argument is an even integer-valued
3261 real. */
3262 arg1 = gimple_call_arg (def_stmt, 1);
3264 if (TREE_CODE (arg1) == REAL_CST)
3266 REAL_VALUE_TYPE c;
3267 HOST_WIDE_INT n;
3269 c = TREE_REAL_CST (arg1);
3270 n = real_to_integer (&c);
3272 if ((n & 1) == 0)
3274 REAL_VALUE_TYPE cint;
3275 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3276 if (real_identical (&c, &cint))
3277 return true;
3281 break;
3283 default:
3284 return false;
3289 return false;