* config/rl78/rl78.c (rl78_asm_file_start): Specify alternate
[official-gcc.git] / gcc / gimple-fold.c
blob51713e64655055bf0644576313b01e7a466118eb
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "tree-ssa.h"
30 #include "tree-ssa-propagate.h"
31 #include "target.h"
32 #include "gimple-fold.h"
34 /* Return true when DECL can be referenced from current unit.
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
37 reasons:
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61 symtab_node snode;
63 if (DECL_ABSTRACT (decl))
64 return false;
66 /* We are concerned only about static/external vars and functions. */
67 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
68 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
69 return true;
71 /* Static objects can be referred only if they was not optimized out yet. */
72 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
74 snode = symtab_get_node (decl);
75 if (!snode)
76 return false;
77 node = dyn_cast <cgraph_node> (snode);
78 return !node || !node->global.inlined_to;
81 /* We will later output the initializer, so we can refer to it.
82 So we are concerned only when DECL comes from initializer of
83 external var. */
84 if (!from_decl
85 || TREE_CODE (from_decl) != VAR_DECL
86 || !DECL_EXTERNAL (from_decl)
87 || (flag_ltrans
88 && symtab_get_node (from_decl)->symbol.in_other_partition))
89 return true;
90 /* We are folding reference from external vtable. The vtable may reffer
91 to a symbol keyed to other compilation unit. The other compilation
92 unit may be in separate DSO and the symbol may be hidden. */
93 if (DECL_VISIBILITY_SPECIFIED (decl)
94 && DECL_EXTERNAL (decl)
95 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
96 return false;
97 /* When function is public, we always can introduce new reference.
98 Exception are the COMDAT functions where introducing a direct
99 reference imply need to include function body in the curren tunit. */
100 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
101 return true;
102 /* We are not at ltrans stage; so don't worry about WHOPR.
103 Also when still gimplifying all referred comdat functions will be
104 produced.
106 As observed in PR20991 for already optimized out comdat virtual functions
107 it may be tempting to not necessarily give up because the copy will be
108 output elsewhere when corresponding vtable is output.
109 This is however not possible - ABI specify that COMDATs are output in
110 units where they are used and when the other unit was compiled with LTO
111 it is possible that vtable was kept public while the function itself
112 was privatized. */
113 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
114 return true;
116 /* OK we are seeing either COMDAT or static variable. In this case we must
117 check that the definition is still around so we can refer it. */
118 if (TREE_CODE (decl) == FUNCTION_DECL)
120 node = cgraph_get_node (decl);
121 /* Check that we still have function body and that we didn't took
122 the decision to eliminate offline copy of the function yet.
123 The second is important when devirtualization happens during final
124 compilation stage when making a new reference no longer makes callee
125 to be compiled. */
126 if (!node || !node->symbol.definition || node->global.inlined_to)
128 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
129 return false;
132 else if (TREE_CODE (decl) == VAR_DECL)
134 vnode = varpool_get_node (decl);
135 if (!vnode || !vnode->symbol.definition)
137 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
138 return false;
141 return true;
144 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
145 acceptable form for is_gimple_min_invariant.
146 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
148 tree
149 canonicalize_constructor_val (tree cval, tree from_decl)
151 tree orig_cval = cval;
152 STRIP_NOPS (cval);
153 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
154 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
156 tree ptr = TREE_OPERAND (cval, 0);
157 if (is_gimple_min_invariant (ptr))
158 cval = build1_loc (EXPR_LOCATION (cval),
159 ADDR_EXPR, TREE_TYPE (ptr),
160 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
161 ptr,
162 fold_convert (ptr_type_node,
163 TREE_OPERAND (cval, 1))));
165 if (TREE_CODE (cval) == ADDR_EXPR)
167 tree base = NULL_TREE;
168 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
170 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
171 if (base)
172 TREE_OPERAND (cval, 0) = base;
174 else
175 base = get_base_address (TREE_OPERAND (cval, 0));
176 if (!base)
177 return NULL_TREE;
179 if ((TREE_CODE (base) == VAR_DECL
180 || TREE_CODE (base) == FUNCTION_DECL)
181 && !can_refer_decl_in_current_unit_p (base, from_decl))
182 return NULL_TREE;
183 if (TREE_CODE (base) == VAR_DECL)
184 TREE_ADDRESSABLE (base) = 1;
185 else if (TREE_CODE (base) == FUNCTION_DECL)
187 /* Make sure we create a cgraph node for functions we'll reference.
188 They can be non-existent if the reference comes from an entry
189 of an external vtable for example. */
190 cgraph_get_create_real_symbol_node (base);
192 /* Fixup types in global initializers. */
193 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
194 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
196 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
197 cval = fold_convert (TREE_TYPE (orig_cval), cval);
198 return cval;
200 return orig_cval;
203 /* If SYM is a constant variable with known value, return the value.
204 NULL_TREE is returned otherwise. */
206 tree
207 get_symbol_constant_value (tree sym)
209 tree val = ctor_for_folding (sym);
210 if (val != error_mark_node)
212 if (val)
214 val = canonicalize_constructor_val (unshare_expr (val), sym);
215 if (val && is_gimple_min_invariant (val))
216 return val;
217 else
218 return NULL_TREE;
220 /* Variables declared 'const' without an initializer
221 have zero as the initializer if they may not be
222 overridden at link or run time. */
223 if (!val
224 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
225 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
226 return build_zero_cst (TREE_TYPE (sym));
229 return NULL_TREE;
234 /* Subroutine of fold_stmt. We perform several simplifications of the
235 memory reference tree EXPR and make sure to re-gimplify them properly
236 after propagation of constant addresses. IS_LHS is true if the
237 reference is supposed to be an lvalue. */
239 static tree
240 maybe_fold_reference (tree expr, bool is_lhs)
242 tree *t = &expr;
243 tree result;
245 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
246 || TREE_CODE (expr) == REALPART_EXPR
247 || TREE_CODE (expr) == IMAGPART_EXPR)
248 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
249 return fold_unary_loc (EXPR_LOCATION (expr),
250 TREE_CODE (expr),
251 TREE_TYPE (expr),
252 TREE_OPERAND (expr, 0));
253 else if (TREE_CODE (expr) == BIT_FIELD_REF
254 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
255 return fold_ternary_loc (EXPR_LOCATION (expr),
256 TREE_CODE (expr),
257 TREE_TYPE (expr),
258 TREE_OPERAND (expr, 0),
259 TREE_OPERAND (expr, 1),
260 TREE_OPERAND (expr, 2));
262 while (handled_component_p (*t))
263 t = &TREE_OPERAND (*t, 0);
265 /* Canonicalize MEM_REFs invariant address operand. Do this first
266 to avoid feeding non-canonical MEM_REFs elsewhere. */
267 if (TREE_CODE (*t) == MEM_REF
268 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
270 bool volatile_p = TREE_THIS_VOLATILE (*t);
271 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
272 TREE_OPERAND (*t, 0),
273 TREE_OPERAND (*t, 1));
274 if (tem)
276 TREE_THIS_VOLATILE (tem) = volatile_p;
277 *t = tem;
278 tem = maybe_fold_reference (expr, is_lhs);
279 if (tem)
280 return tem;
281 return expr;
285 if (!is_lhs
286 && (result = fold_const_aggregate_ref (expr))
287 && is_gimple_min_invariant (result))
288 return result;
290 /* Fold back MEM_REFs to reference trees. */
291 if (TREE_CODE (*t) == MEM_REF
292 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
293 && integer_zerop (TREE_OPERAND (*t, 1))
294 && (TREE_THIS_VOLATILE (*t)
295 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
296 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
297 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
298 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
299 /* We have to look out here to not drop a required conversion
300 from the rhs to the lhs if is_lhs, but we don't have the
301 rhs here to verify that. Thus require strict type
302 compatibility. */
303 && types_compatible_p (TREE_TYPE (*t),
304 TREE_TYPE (TREE_OPERAND
305 (TREE_OPERAND (*t, 0), 0))))
307 tree tem;
308 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
309 tem = maybe_fold_reference (expr, is_lhs);
310 if (tem)
311 return tem;
312 return expr;
314 else if (TREE_CODE (*t) == TARGET_MEM_REF)
316 tree tem = maybe_fold_tmr (*t);
317 if (tem)
319 *t = tem;
320 tem = maybe_fold_reference (expr, is_lhs);
321 if (tem)
322 return tem;
323 return expr;
327 return NULL_TREE;
331 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
332 replacement rhs for the statement or NULL_TREE if no simplification
333 could be made. It is assumed that the operands have been previously
334 folded. */
336 static tree
337 fold_gimple_assign (gimple_stmt_iterator *si)
339 gimple stmt = gsi_stmt (*si);
340 enum tree_code subcode = gimple_assign_rhs_code (stmt);
341 location_t loc = gimple_location (stmt);
343 tree result = NULL_TREE;
345 switch (get_gimple_rhs_class (subcode))
347 case GIMPLE_SINGLE_RHS:
349 tree rhs = gimple_assign_rhs1 (stmt);
351 if (REFERENCE_CLASS_P (rhs))
352 return maybe_fold_reference (rhs, false);
354 else if (TREE_CODE (rhs) == ADDR_EXPR)
356 tree ref = TREE_OPERAND (rhs, 0);
357 tree tem = maybe_fold_reference (ref, true);
358 if (tem
359 && TREE_CODE (tem) == MEM_REF
360 && integer_zerop (TREE_OPERAND (tem, 1)))
361 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
362 else if (tem)
363 result = fold_convert (TREE_TYPE (rhs),
364 build_fold_addr_expr_loc (loc, tem));
365 else if (TREE_CODE (ref) == MEM_REF
366 && integer_zerop (TREE_OPERAND (ref, 1)))
367 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
370 else if (TREE_CODE (rhs) == CONSTRUCTOR
371 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
372 && (CONSTRUCTOR_NELTS (rhs)
373 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
375 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
376 unsigned i;
377 tree val;
379 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
380 if (TREE_CODE (val) != INTEGER_CST
381 && TREE_CODE (val) != REAL_CST
382 && TREE_CODE (val) != FIXED_CST)
383 return NULL_TREE;
385 return build_vector_from_ctor (TREE_TYPE (rhs),
386 CONSTRUCTOR_ELTS (rhs));
389 else if (DECL_P (rhs))
390 return get_symbol_constant_value (rhs);
392 /* If we couldn't fold the RHS, hand over to the generic
393 fold routines. */
394 if (result == NULL_TREE)
395 result = fold (rhs);
397 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
398 that may have been added by fold, and "useless" type
399 conversions that might now be apparent due to propagation. */
400 STRIP_USELESS_TYPE_CONVERSION (result);
402 if (result != rhs && valid_gimple_rhs_p (result))
403 return result;
405 return NULL_TREE;
407 break;
409 case GIMPLE_UNARY_RHS:
411 tree rhs = gimple_assign_rhs1 (stmt);
413 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
414 if (result)
416 /* If the operation was a conversion do _not_ mark a
417 resulting constant with TREE_OVERFLOW if the original
418 constant was not. These conversions have implementation
419 defined behavior and retaining the TREE_OVERFLOW flag
420 here would confuse later passes such as VRP. */
421 if (CONVERT_EXPR_CODE_P (subcode)
422 && TREE_CODE (result) == INTEGER_CST
423 && TREE_CODE (rhs) == INTEGER_CST)
424 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
426 STRIP_USELESS_TYPE_CONVERSION (result);
427 if (valid_gimple_rhs_p (result))
428 return result;
431 break;
433 case GIMPLE_BINARY_RHS:
434 /* Try to canonicalize for boolean-typed X the comparisons
435 X == 0, X == 1, X != 0, and X != 1. */
436 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
437 || gimple_assign_rhs_code (stmt) == NE_EXPR)
439 tree lhs = gimple_assign_lhs (stmt);
440 tree op1 = gimple_assign_rhs1 (stmt);
441 tree op2 = gimple_assign_rhs2 (stmt);
442 tree type = TREE_TYPE (op1);
444 /* Check whether the comparison operands are of the same boolean
445 type as the result type is.
446 Check that second operand is an integer-constant with value
447 one or zero. */
448 if (TREE_CODE (op2) == INTEGER_CST
449 && (integer_zerop (op2) || integer_onep (op2))
450 && useless_type_conversion_p (TREE_TYPE (lhs), type))
452 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
453 bool is_logical_not = false;
455 /* X == 0 and X != 1 is a logical-not.of X
456 X == 1 and X != 0 is X */
457 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
458 || (cmp_code == NE_EXPR && integer_onep (op2)))
459 is_logical_not = true;
461 if (is_logical_not == false)
462 result = op1;
463 /* Only for one-bit precision typed X the transformation
464 !X -> ~X is valied. */
465 else if (TYPE_PRECISION (type) == 1)
466 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
467 type, op1);
468 /* Otherwise we use !X -> X ^ 1. */
469 else
470 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
471 type, op1, build_int_cst (type, 1));
476 if (!result)
477 result = fold_binary_loc (loc, subcode,
478 TREE_TYPE (gimple_assign_lhs (stmt)),
479 gimple_assign_rhs1 (stmt),
480 gimple_assign_rhs2 (stmt));
482 if (result)
484 STRIP_USELESS_TYPE_CONVERSION (result);
485 if (valid_gimple_rhs_p (result))
486 return result;
488 break;
490 case GIMPLE_TERNARY_RHS:
491 /* Try to fold a conditional expression. */
492 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
494 tree op0 = gimple_assign_rhs1 (stmt);
495 tree tem;
496 bool set = false;
497 location_t cond_loc = gimple_location (stmt);
499 if (COMPARISON_CLASS_P (op0))
501 fold_defer_overflow_warnings ();
502 tem = fold_binary_loc (cond_loc,
503 TREE_CODE (op0), TREE_TYPE (op0),
504 TREE_OPERAND (op0, 0),
505 TREE_OPERAND (op0, 1));
506 /* This is actually a conditional expression, not a GIMPLE
507 conditional statement, however, the valid_gimple_rhs_p
508 test still applies. */
509 set = (tem && is_gimple_condexpr (tem)
510 && valid_gimple_rhs_p (tem));
511 fold_undefer_overflow_warnings (set, stmt, 0);
513 else if (is_gimple_min_invariant (op0))
515 tem = op0;
516 set = true;
518 else
519 return NULL_TREE;
521 if (set)
522 result = fold_build3_loc (cond_loc, COND_EXPR,
523 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
524 gimple_assign_rhs2 (stmt),
525 gimple_assign_rhs3 (stmt));
528 if (!result)
529 result = fold_ternary_loc (loc, subcode,
530 TREE_TYPE (gimple_assign_lhs (stmt)),
531 gimple_assign_rhs1 (stmt),
532 gimple_assign_rhs2 (stmt),
533 gimple_assign_rhs3 (stmt));
535 if (result)
537 STRIP_USELESS_TYPE_CONVERSION (result);
538 if (valid_gimple_rhs_p (result))
539 return result;
541 break;
543 case GIMPLE_INVALID_RHS:
544 gcc_unreachable ();
547 return NULL_TREE;
550 /* Attempt to fold a conditional statement. Return true if any changes were
551 made. We only attempt to fold the condition expression, and do not perform
552 any transformation that would require alteration of the cfg. It is
553 assumed that the operands have been previously folded. */
555 static bool
556 fold_gimple_cond (gimple stmt)
558 tree result = fold_binary_loc (gimple_location (stmt),
559 gimple_cond_code (stmt),
560 boolean_type_node,
561 gimple_cond_lhs (stmt),
562 gimple_cond_rhs (stmt));
564 if (result)
566 STRIP_USELESS_TYPE_CONVERSION (result);
567 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
569 gimple_cond_set_condition_from_tree (stmt, result);
570 return true;
574 return false;
577 /* Convert EXPR into a GIMPLE value suitable for substitution on the
578 RHS of an assignment. Insert the necessary statements before
579 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
580 is replaced. If the call is expected to produces a result, then it
581 is replaced by an assignment of the new RHS to the result variable.
582 If the result is to be ignored, then the call is replaced by a
583 GIMPLE_NOP. A proper VDEF chain is retained by making the first
584 VUSE and the last VDEF of the whole sequence be the same as the replaced
585 statement and using new SSA names for stores in between. */
587 void
588 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
590 tree lhs;
591 gimple stmt, new_stmt;
592 gimple_stmt_iterator i;
593 gimple_seq stmts = NULL;
594 struct gimplify_ctx gctx;
595 gimple laststore;
596 tree reaching_vuse;
598 stmt = gsi_stmt (*si_p);
600 gcc_assert (is_gimple_call (stmt));
602 push_gimplify_context (&gctx);
603 gctx.into_ssa = gimple_in_ssa_p (cfun);
605 lhs = gimple_call_lhs (stmt);
606 if (lhs == NULL_TREE)
608 gimplify_and_add (expr, &stmts);
609 /* We can end up with folding a memcpy of an empty class assignment
610 which gets optimized away by C++ gimplification. */
611 if (gimple_seq_empty_p (stmts))
613 pop_gimplify_context (NULL);
614 if (gimple_in_ssa_p (cfun))
616 unlink_stmt_vdef (stmt);
617 release_defs (stmt);
619 gsi_replace (si_p, gimple_build_nop (), true);
620 return;
623 else
625 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
626 new_stmt = gimple_build_assign (lhs, tmp);
627 i = gsi_last (stmts);
628 gsi_insert_after_without_update (&i, new_stmt,
629 GSI_CONTINUE_LINKING);
632 pop_gimplify_context (NULL);
634 if (gimple_has_location (stmt))
635 annotate_all_with_location (stmts, gimple_location (stmt));
637 /* First iterate over the replacement statements backward, assigning
638 virtual operands to their defining statements. */
639 laststore = NULL;
640 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
642 new_stmt = gsi_stmt (i);
643 if ((gimple_assign_single_p (new_stmt)
644 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
645 || (is_gimple_call (new_stmt)
646 && (gimple_call_flags (new_stmt)
647 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
649 tree vdef;
650 if (!laststore)
651 vdef = gimple_vdef (stmt);
652 else
653 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
654 gimple_set_vdef (new_stmt, vdef);
655 if (vdef && TREE_CODE (vdef) == SSA_NAME)
656 SSA_NAME_DEF_STMT (vdef) = new_stmt;
657 laststore = new_stmt;
661 /* Second iterate over the statements forward, assigning virtual
662 operands to their uses. */
663 reaching_vuse = gimple_vuse (stmt);
664 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
666 new_stmt = gsi_stmt (i);
667 /* If the new statement possibly has a VUSE, update it with exact SSA
668 name we know will reach this one. */
669 if (gimple_has_mem_ops (new_stmt))
670 gimple_set_vuse (new_stmt, reaching_vuse);
671 gimple_set_modified (new_stmt, true);
672 if (gimple_vdef (new_stmt))
673 reaching_vuse = gimple_vdef (new_stmt);
676 /* If the new sequence does not do a store release the virtual
677 definition of the original statement. */
678 if (reaching_vuse
679 && reaching_vuse == gimple_vuse (stmt))
681 tree vdef = gimple_vdef (stmt);
682 if (vdef
683 && TREE_CODE (vdef) == SSA_NAME)
685 unlink_stmt_vdef (stmt);
686 release_ssa_name (vdef);
690 /* Finally replace the original statement with the sequence. */
691 gsi_replace_with_seq (si_p, stmts, false);
694 /* Return the string length, maximum string length or maximum value of
695 ARG in LENGTH.
696 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
697 is not NULL and, for TYPE == 0, its value is not equal to the length
698 we determine or if we are unable to determine the length or value,
699 return false. VISITED is a bitmap of visited variables.
700 TYPE is 0 if string length should be returned, 1 for maximum string
701 length and 2 for maximum value ARG can have. */
703 static bool
704 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
706 tree var, val;
707 gimple def_stmt;
709 if (TREE_CODE (arg) != SSA_NAME)
711 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
712 if (TREE_CODE (arg) == ADDR_EXPR
713 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
714 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
716 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
717 if (TREE_CODE (aop0) == INDIRECT_REF
718 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
719 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
720 length, visited, type);
723 if (type == 2)
725 val = arg;
726 if (TREE_CODE (val) != INTEGER_CST
727 || tree_int_cst_sgn (val) < 0)
728 return false;
730 else
731 val = c_strlen (arg, 1);
732 if (!val)
733 return false;
735 if (*length)
737 if (type > 0)
739 if (TREE_CODE (*length) != INTEGER_CST
740 || TREE_CODE (val) != INTEGER_CST)
741 return false;
743 if (tree_int_cst_lt (*length, val))
744 *length = val;
745 return true;
747 else if (simple_cst_equal (val, *length) != 1)
748 return false;
751 *length = val;
752 return true;
755 /* If ARG is registered for SSA update we cannot look at its defining
756 statement. */
757 if (name_registered_for_update_p (arg))
758 return false;
760 /* If we were already here, break the infinite cycle. */
761 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
762 return true;
764 var = arg;
765 def_stmt = SSA_NAME_DEF_STMT (var);
767 switch (gimple_code (def_stmt))
769 case GIMPLE_ASSIGN:
770 /* The RHS of the statement defining VAR must either have a
771 constant length or come from another SSA_NAME with a constant
772 length. */
773 if (gimple_assign_single_p (def_stmt)
774 || gimple_assign_unary_nop_p (def_stmt))
776 tree rhs = gimple_assign_rhs1 (def_stmt);
777 return get_maxval_strlen (rhs, length, visited, type);
779 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
781 tree op2 = gimple_assign_rhs2 (def_stmt);
782 tree op3 = gimple_assign_rhs3 (def_stmt);
783 return get_maxval_strlen (op2, length, visited, type)
784 && get_maxval_strlen (op3, length, visited, type);
786 return false;
788 case GIMPLE_PHI:
790 /* All the arguments of the PHI node must have the same constant
791 length. */
792 unsigned i;
794 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
796 tree arg = gimple_phi_arg (def_stmt, i)->def;
798 /* If this PHI has itself as an argument, we cannot
799 determine the string length of this argument. However,
800 if we can find a constant string length for the other
801 PHI args then we can still be sure that this is a
802 constant string length. So be optimistic and just
803 continue with the next argument. */
804 if (arg == gimple_phi_result (def_stmt))
805 continue;
807 if (!get_maxval_strlen (arg, length, visited, type))
808 return false;
811 return true;
813 default:
814 return false;
819 /* Fold builtin call in statement STMT. Returns a simplified tree.
820 We may return a non-constant expression, including another call
821 to a different function and with different arguments, e.g.,
822 substituting memcpy for strcpy when the string length is known.
823 Note that some builtins expand into inline code that may not
824 be valid in GIMPLE. Callers must take care. */
826 tree
827 gimple_fold_builtin (gimple stmt)
829 tree result, val[3];
830 tree callee, a;
831 int arg_idx, type;
832 bitmap visited;
833 bool ignore;
834 int nargs;
835 location_t loc = gimple_location (stmt);
837 gcc_assert (is_gimple_call (stmt));
839 ignore = (gimple_call_lhs (stmt) == NULL);
841 /* First try the generic builtin folder. If that succeeds, return the
842 result directly. */
843 result = fold_call_stmt (stmt, ignore);
844 if (result)
846 if (ignore)
847 STRIP_NOPS (result);
848 return result;
851 /* Ignore MD builtins. */
852 callee = gimple_call_fndecl (stmt);
853 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
854 return NULL_TREE;
856 /* Give up for always_inline inline builtins until they are
857 inlined. */
858 if (avoid_folding_inline_builtin (callee))
859 return NULL_TREE;
861 /* If the builtin could not be folded, and it has no argument list,
862 we're done. */
863 nargs = gimple_call_num_args (stmt);
864 if (nargs == 0)
865 return NULL_TREE;
867 /* Limit the work only for builtins we know how to simplify. */
868 switch (DECL_FUNCTION_CODE (callee))
870 case BUILT_IN_STRLEN:
871 case BUILT_IN_FPUTS:
872 case BUILT_IN_FPUTS_UNLOCKED:
873 arg_idx = 0;
874 type = 0;
875 break;
876 case BUILT_IN_STRCPY:
877 case BUILT_IN_STRNCPY:
878 arg_idx = 1;
879 type = 0;
880 break;
881 case BUILT_IN_MEMCPY_CHK:
882 case BUILT_IN_MEMPCPY_CHK:
883 case BUILT_IN_MEMMOVE_CHK:
884 case BUILT_IN_MEMSET_CHK:
885 case BUILT_IN_STRNCPY_CHK:
886 case BUILT_IN_STPNCPY_CHK:
887 arg_idx = 2;
888 type = 2;
889 break;
890 case BUILT_IN_STRCPY_CHK:
891 case BUILT_IN_STPCPY_CHK:
892 arg_idx = 1;
893 type = 1;
894 break;
895 case BUILT_IN_SNPRINTF_CHK:
896 case BUILT_IN_VSNPRINTF_CHK:
897 arg_idx = 1;
898 type = 2;
899 break;
900 default:
901 return NULL_TREE;
904 if (arg_idx >= nargs)
905 return NULL_TREE;
907 /* Try to use the dataflow information gathered by the CCP process. */
908 visited = BITMAP_ALLOC (NULL);
909 bitmap_clear (visited);
911 memset (val, 0, sizeof (val));
912 a = gimple_call_arg (stmt, arg_idx);
913 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
914 val[arg_idx] = NULL_TREE;
916 BITMAP_FREE (visited);
918 result = NULL_TREE;
919 switch (DECL_FUNCTION_CODE (callee))
921 case BUILT_IN_STRLEN:
922 if (val[0] && nargs == 1)
924 tree new_val =
925 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
927 /* If the result is not a valid gimple value, or not a cast
928 of a valid gimple value, then we cannot use the result. */
929 if (is_gimple_val (new_val)
930 || (CONVERT_EXPR_P (new_val)
931 && is_gimple_val (TREE_OPERAND (new_val, 0))))
932 return new_val;
934 break;
936 case BUILT_IN_STRCPY:
937 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
938 result = fold_builtin_strcpy (loc, callee,
939 gimple_call_arg (stmt, 0),
940 gimple_call_arg (stmt, 1),
941 val[1]);
942 break;
944 case BUILT_IN_STRNCPY:
945 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
946 result = fold_builtin_strncpy (loc, callee,
947 gimple_call_arg (stmt, 0),
948 gimple_call_arg (stmt, 1),
949 gimple_call_arg (stmt, 2),
950 val[1]);
951 break;
953 case BUILT_IN_FPUTS:
954 if (nargs == 2)
955 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
956 gimple_call_arg (stmt, 1),
957 ignore, false, val[0]);
958 break;
960 case BUILT_IN_FPUTS_UNLOCKED:
961 if (nargs == 2)
962 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
963 gimple_call_arg (stmt, 1),
964 ignore, true, val[0]);
965 break;
967 case BUILT_IN_MEMCPY_CHK:
968 case BUILT_IN_MEMPCPY_CHK:
969 case BUILT_IN_MEMMOVE_CHK:
970 case BUILT_IN_MEMSET_CHK:
971 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
972 result = fold_builtin_memory_chk (loc, callee,
973 gimple_call_arg (stmt, 0),
974 gimple_call_arg (stmt, 1),
975 gimple_call_arg (stmt, 2),
976 gimple_call_arg (stmt, 3),
977 val[2], ignore,
978 DECL_FUNCTION_CODE (callee));
979 break;
981 case BUILT_IN_STRCPY_CHK:
982 case BUILT_IN_STPCPY_CHK:
983 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
984 result = fold_builtin_stxcpy_chk (loc, callee,
985 gimple_call_arg (stmt, 0),
986 gimple_call_arg (stmt, 1),
987 gimple_call_arg (stmt, 2),
988 val[1], ignore,
989 DECL_FUNCTION_CODE (callee));
990 break;
992 case BUILT_IN_STRNCPY_CHK:
993 case BUILT_IN_STPNCPY_CHK:
994 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
995 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
996 gimple_call_arg (stmt, 1),
997 gimple_call_arg (stmt, 2),
998 gimple_call_arg (stmt, 3),
999 val[2], ignore,
1000 DECL_FUNCTION_CODE (callee));
1001 break;
1003 case BUILT_IN_SNPRINTF_CHK:
1004 case BUILT_IN_VSNPRINTF_CHK:
1005 if (val[1] && is_gimple_val (val[1]))
1006 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1007 DECL_FUNCTION_CODE (callee));
1008 break;
1010 default:
1011 gcc_unreachable ();
1014 if (result && ignore)
1015 result = fold_ignored_result (result);
1016 return result;
1020 /* Return a binfo to be used for devirtualization of calls based on an object
1021 represented by a declaration (i.e. a global or automatically allocated one)
1022 or NULL if it cannot be found or is not safe. CST is expected to be an
1023 ADDR_EXPR of such object or the function will return NULL. Currently it is
1024 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1025 EXPECTED_TYPE is type of the class virtual belongs to. */
1027 tree
1028 gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
1030 HOST_WIDE_INT offset, size, max_size;
1031 tree base, type, binfo;
1032 bool last_artificial = false;
1034 if (!flag_devirtualize
1035 || TREE_CODE (cst) != ADDR_EXPR
1036 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1037 return NULL_TREE;
1039 cst = TREE_OPERAND (cst, 0);
1040 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1041 type = TREE_TYPE (base);
1042 if (!DECL_P (base)
1043 || max_size == -1
1044 || max_size != size
1045 || TREE_CODE (type) != RECORD_TYPE)
1046 return NULL_TREE;
1048 /* Find the sub-object the constant actually refers to and mark whether it is
1049 an artificial one (as opposed to a user-defined one). */
1050 while (true)
1052 HOST_WIDE_INT pos, size;
1053 tree fld;
1055 if (types_same_for_odr (type, expected_type))
1056 break;
1057 if (offset < 0)
1058 return NULL_TREE;
1060 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1062 if (TREE_CODE (fld) != FIELD_DECL)
1063 continue;
1065 pos = int_bit_position (fld);
1066 size = tree_low_cst (DECL_SIZE (fld), 1);
1067 if (pos <= offset && (pos + size) > offset)
1068 break;
1070 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1071 return NULL_TREE;
1073 last_artificial = DECL_ARTIFICIAL (fld);
1074 type = TREE_TYPE (fld);
1075 offset -= pos;
1077 /* Artificial sub-objects are ancestors, we do not want to use them for
1078 devirtualization, at least not here. */
1079 if (last_artificial)
1080 return NULL_TREE;
1081 binfo = TYPE_BINFO (type);
1082 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1083 return NULL_TREE;
1084 else
1085 return binfo;
1088 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1089 The statement may be replaced by another statement, e.g., if the call
1090 simplifies to a constant value. Return true if any changes were made.
1091 It is assumed that the operands have been previously folded. */
1093 static bool
1094 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1096 gimple stmt = gsi_stmt (*gsi);
1097 tree callee;
1098 bool changed = false;
1099 unsigned i;
1101 /* Fold *& in call arguments. */
1102 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1103 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1105 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1106 if (tmp)
1108 gimple_call_set_arg (stmt, i, tmp);
1109 changed = true;
1113 /* Check for virtual calls that became direct calls. */
1114 callee = gimple_call_fn (stmt);
1115 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1117 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1119 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1120 changed = true;
1122 else if (virtual_method_call_p (callee))
1124 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1125 tree binfo = gimple_extract_devirt_binfo_from_cst
1126 (obj, obj_type_ref_class (callee));
1127 if (binfo)
1129 HOST_WIDE_INT token
1130 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1131 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1132 if (fndecl)
1134 gimple_call_set_fndecl (stmt, fndecl);
1135 changed = true;
1141 if (inplace)
1142 return changed;
1144 /* Check for builtins that CCP can handle using information not
1145 available in the generic fold routines. */
1146 callee = gimple_call_fndecl (stmt);
1147 if (callee && DECL_BUILT_IN (callee))
1149 tree result = gimple_fold_builtin (stmt);
1150 if (result)
1152 if (!update_call_from_tree (gsi, result))
1153 gimplify_and_update_call_from_tree (gsi, result);
1154 changed = true;
1156 else if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1157 changed |= targetm.gimple_fold_builtin (gsi);
1160 return changed;
1163 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1164 distinguishes both cases. */
1166 static bool
1167 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1169 bool changed = false;
1170 gimple stmt = gsi_stmt (*gsi);
1171 unsigned i;
1173 /* Fold the main computation performed by the statement. */
1174 switch (gimple_code (stmt))
1176 case GIMPLE_ASSIGN:
1178 unsigned old_num_ops = gimple_num_ops (stmt);
1179 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1180 tree lhs = gimple_assign_lhs (stmt);
1181 tree new_rhs;
1182 /* First canonicalize operand order. This avoids building new
1183 trees if this is the only thing fold would later do. */
1184 if ((commutative_tree_code (subcode)
1185 || commutative_ternary_tree_code (subcode))
1186 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1187 gimple_assign_rhs2 (stmt), false))
1189 tree tem = gimple_assign_rhs1 (stmt);
1190 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1191 gimple_assign_set_rhs2 (stmt, tem);
1192 changed = true;
1194 new_rhs = fold_gimple_assign (gsi);
1195 if (new_rhs
1196 && !useless_type_conversion_p (TREE_TYPE (lhs),
1197 TREE_TYPE (new_rhs)))
1198 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1199 if (new_rhs
1200 && (!inplace
1201 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1203 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1204 changed = true;
1206 break;
1209 case GIMPLE_COND:
1210 changed |= fold_gimple_cond (stmt);
1211 break;
1213 case GIMPLE_CALL:
1214 changed |= gimple_fold_call (gsi, inplace);
1215 break;
1217 case GIMPLE_ASM:
1218 /* Fold *& in asm operands. */
1220 size_t noutputs;
1221 const char **oconstraints;
1222 const char *constraint;
1223 bool allows_mem, allows_reg;
1225 noutputs = gimple_asm_noutputs (stmt);
1226 oconstraints = XALLOCAVEC (const char *, noutputs);
1228 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1230 tree link = gimple_asm_output_op (stmt, i);
1231 tree op = TREE_VALUE (link);
1232 oconstraints[i]
1233 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1234 if (REFERENCE_CLASS_P (op)
1235 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1237 TREE_VALUE (link) = op;
1238 changed = true;
1241 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1243 tree link = gimple_asm_input_op (stmt, i);
1244 tree op = TREE_VALUE (link);
1245 constraint
1246 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1247 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1248 oconstraints, &allows_mem, &allows_reg);
1249 if (REFERENCE_CLASS_P (op)
1250 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1251 != NULL_TREE)
1253 TREE_VALUE (link) = op;
1254 changed = true;
1258 break;
1260 case GIMPLE_DEBUG:
1261 if (gimple_debug_bind_p (stmt))
1263 tree val = gimple_debug_bind_get_value (stmt);
1264 if (val
1265 && REFERENCE_CLASS_P (val))
1267 tree tem = maybe_fold_reference (val, false);
1268 if (tem)
1270 gimple_debug_bind_set_value (stmt, tem);
1271 changed = true;
1274 else if (val
1275 && TREE_CODE (val) == ADDR_EXPR)
1277 tree ref = TREE_OPERAND (val, 0);
1278 tree tem = maybe_fold_reference (ref, false);
1279 if (tem)
1281 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1282 gimple_debug_bind_set_value (stmt, tem);
1283 changed = true;
1287 break;
1289 default:;
1292 stmt = gsi_stmt (*gsi);
1294 /* Fold *& on the lhs. */
1295 if (gimple_has_lhs (stmt))
1297 tree lhs = gimple_get_lhs (stmt);
1298 if (lhs && REFERENCE_CLASS_P (lhs))
1300 tree new_lhs = maybe_fold_reference (lhs, true);
1301 if (new_lhs)
1303 gimple_set_lhs (stmt, new_lhs);
1304 changed = true;
1309 return changed;
1312 /* Fold the statement pointed to by GSI. In some cases, this function may
1313 replace the whole statement with a new one. Returns true iff folding
1314 makes any changes.
1315 The statement pointed to by GSI should be in valid gimple form but may
1316 be in unfolded state as resulting from for example constant propagation
1317 which can produce *&x = 0. */
1319 bool
1320 fold_stmt (gimple_stmt_iterator *gsi)
1322 return fold_stmt_1 (gsi, false);
1325 /* Perform the minimal folding on statement *GSI. Only operations like
1326 *&x created by constant propagation are handled. The statement cannot
1327 be replaced with a new one. Return true if the statement was
1328 changed, false otherwise.
1329 The statement *GSI should be in valid gimple form but may
1330 be in unfolded state as resulting from for example constant propagation
1331 which can produce *&x = 0. */
1333 bool
1334 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1336 gimple stmt = gsi_stmt (*gsi);
1337 bool changed = fold_stmt_1 (gsi, true);
1338 gcc_assert (gsi_stmt (*gsi) == stmt);
1339 return changed;
1342 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1343 if EXPR is null or we don't know how.
1344 If non-null, the result always has boolean type. */
1346 static tree
1347 canonicalize_bool (tree expr, bool invert)
1349 if (!expr)
1350 return NULL_TREE;
1351 else if (invert)
1353 if (integer_nonzerop (expr))
1354 return boolean_false_node;
1355 else if (integer_zerop (expr))
1356 return boolean_true_node;
1357 else if (TREE_CODE (expr) == SSA_NAME)
1358 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1359 build_int_cst (TREE_TYPE (expr), 0));
1360 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1361 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1362 boolean_type_node,
1363 TREE_OPERAND (expr, 0),
1364 TREE_OPERAND (expr, 1));
1365 else
1366 return NULL_TREE;
1368 else
1370 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1371 return expr;
1372 if (integer_nonzerop (expr))
1373 return boolean_true_node;
1374 else if (integer_zerop (expr))
1375 return boolean_false_node;
1376 else if (TREE_CODE (expr) == SSA_NAME)
1377 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1378 build_int_cst (TREE_TYPE (expr), 0));
1379 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1380 return fold_build2 (TREE_CODE (expr),
1381 boolean_type_node,
1382 TREE_OPERAND (expr, 0),
1383 TREE_OPERAND (expr, 1));
1384 else
1385 return NULL_TREE;
1389 /* Check to see if a boolean expression EXPR is logically equivalent to the
1390 comparison (OP1 CODE OP2). Check for various identities involving
1391 SSA_NAMEs. */
1393 static bool
1394 same_bool_comparison_p (const_tree expr, enum tree_code code,
1395 const_tree op1, const_tree op2)
1397 gimple s;
1399 /* The obvious case. */
1400 if (TREE_CODE (expr) == code
1401 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1402 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1403 return true;
1405 /* Check for comparing (name, name != 0) and the case where expr
1406 is an SSA_NAME with a definition matching the comparison. */
1407 if (TREE_CODE (expr) == SSA_NAME
1408 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1410 if (operand_equal_p (expr, op1, 0))
1411 return ((code == NE_EXPR && integer_zerop (op2))
1412 || (code == EQ_EXPR && integer_nonzerop (op2)));
1413 s = SSA_NAME_DEF_STMT (expr);
1414 if (is_gimple_assign (s)
1415 && gimple_assign_rhs_code (s) == code
1416 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1417 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1418 return true;
1421 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1422 of name is a comparison, recurse. */
1423 if (TREE_CODE (op1) == SSA_NAME
1424 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1426 s = SSA_NAME_DEF_STMT (op1);
1427 if (is_gimple_assign (s)
1428 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1430 enum tree_code c = gimple_assign_rhs_code (s);
1431 if ((c == NE_EXPR && integer_zerop (op2))
1432 || (c == EQ_EXPR && integer_nonzerop (op2)))
1433 return same_bool_comparison_p (expr, c,
1434 gimple_assign_rhs1 (s),
1435 gimple_assign_rhs2 (s));
1436 if ((c == EQ_EXPR && integer_zerop (op2))
1437 || (c == NE_EXPR && integer_nonzerop (op2)))
1438 return same_bool_comparison_p (expr,
1439 invert_tree_comparison (c, false),
1440 gimple_assign_rhs1 (s),
1441 gimple_assign_rhs2 (s));
1444 return false;
1447 /* Check to see if two boolean expressions OP1 and OP2 are logically
1448 equivalent. */
1450 static bool
1451 same_bool_result_p (const_tree op1, const_tree op2)
1453 /* Simple cases first. */
1454 if (operand_equal_p (op1, op2, 0))
1455 return true;
1457 /* Check the cases where at least one of the operands is a comparison.
1458 These are a bit smarter than operand_equal_p in that they apply some
1459 identifies on SSA_NAMEs. */
1460 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1461 && same_bool_comparison_p (op1, TREE_CODE (op2),
1462 TREE_OPERAND (op2, 0),
1463 TREE_OPERAND (op2, 1)))
1464 return true;
1465 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1466 && same_bool_comparison_p (op2, TREE_CODE (op1),
1467 TREE_OPERAND (op1, 0),
1468 TREE_OPERAND (op1, 1)))
1469 return true;
1471 /* Default case. */
1472 return false;
1475 /* Forward declarations for some mutually recursive functions. */
1477 static tree
1478 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1479 enum tree_code code2, tree op2a, tree op2b);
1480 static tree
1481 and_var_with_comparison (tree var, bool invert,
1482 enum tree_code code2, tree op2a, tree op2b);
1483 static tree
1484 and_var_with_comparison_1 (gimple stmt,
1485 enum tree_code code2, tree op2a, tree op2b);
1486 static tree
1487 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1488 enum tree_code code2, tree op2a, tree op2b);
1489 static tree
1490 or_var_with_comparison (tree var, bool invert,
1491 enum tree_code code2, tree op2a, tree op2b);
1492 static tree
1493 or_var_with_comparison_1 (gimple stmt,
1494 enum tree_code code2, tree op2a, tree op2b);
1496 /* Helper function for and_comparisons_1: try to simplify the AND of the
1497 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1498 If INVERT is true, invert the value of the VAR before doing the AND.
1499 Return NULL_EXPR if we can't simplify this to a single expression. */
1501 static tree
1502 and_var_with_comparison (tree var, bool invert,
1503 enum tree_code code2, tree op2a, tree op2b)
1505 tree t;
1506 gimple stmt = SSA_NAME_DEF_STMT (var);
1508 /* We can only deal with variables whose definitions are assignments. */
1509 if (!is_gimple_assign (stmt))
1510 return NULL_TREE;
1512 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1513 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1514 Then we only have to consider the simpler non-inverted cases. */
1515 if (invert)
1516 t = or_var_with_comparison_1 (stmt,
1517 invert_tree_comparison (code2, false),
1518 op2a, op2b);
1519 else
1520 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1521 return canonicalize_bool (t, invert);
1524 /* Try to simplify the AND of the ssa variable defined by the assignment
1525 STMT with the comparison specified by (OP2A CODE2 OP2B).
1526 Return NULL_EXPR if we can't simplify this to a single expression. */
1528 static tree
1529 and_var_with_comparison_1 (gimple stmt,
1530 enum tree_code code2, tree op2a, tree op2b)
1532 tree var = gimple_assign_lhs (stmt);
1533 tree true_test_var = NULL_TREE;
1534 tree false_test_var = NULL_TREE;
1535 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1537 /* Check for identities like (var AND (var == 0)) => false. */
1538 if (TREE_CODE (op2a) == SSA_NAME
1539 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1541 if ((code2 == NE_EXPR && integer_zerop (op2b))
1542 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1544 true_test_var = op2a;
1545 if (var == true_test_var)
1546 return var;
1548 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1549 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1551 false_test_var = op2a;
1552 if (var == false_test_var)
1553 return boolean_false_node;
1557 /* If the definition is a comparison, recurse on it. */
1558 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1560 tree t = and_comparisons_1 (innercode,
1561 gimple_assign_rhs1 (stmt),
1562 gimple_assign_rhs2 (stmt),
1563 code2,
1564 op2a,
1565 op2b);
1566 if (t)
1567 return t;
1570 /* If the definition is an AND or OR expression, we may be able to
1571 simplify by reassociating. */
1572 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1573 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1575 tree inner1 = gimple_assign_rhs1 (stmt);
1576 tree inner2 = gimple_assign_rhs2 (stmt);
1577 gimple s;
1578 tree t;
1579 tree partial = NULL_TREE;
1580 bool is_and = (innercode == BIT_AND_EXPR);
1582 /* Check for boolean identities that don't require recursive examination
1583 of inner1/inner2:
1584 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1585 inner1 AND (inner1 OR inner2) => inner1
1586 !inner1 AND (inner1 AND inner2) => false
1587 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1588 Likewise for similar cases involving inner2. */
1589 if (inner1 == true_test_var)
1590 return (is_and ? var : inner1);
1591 else if (inner2 == true_test_var)
1592 return (is_and ? var : inner2);
1593 else if (inner1 == false_test_var)
1594 return (is_and
1595 ? boolean_false_node
1596 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1597 else if (inner2 == false_test_var)
1598 return (is_and
1599 ? boolean_false_node
1600 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1602 /* Next, redistribute/reassociate the AND across the inner tests.
1603 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1604 if (TREE_CODE (inner1) == SSA_NAME
1605 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1606 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1607 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1608 gimple_assign_rhs1 (s),
1609 gimple_assign_rhs2 (s),
1610 code2, op2a, op2b)))
1612 /* Handle the AND case, where we are reassociating:
1613 (inner1 AND inner2) AND (op2a code2 op2b)
1614 => (t AND inner2)
1615 If the partial result t is a constant, we win. Otherwise
1616 continue on to try reassociating with the other inner test. */
1617 if (is_and)
1619 if (integer_onep (t))
1620 return inner2;
1621 else if (integer_zerop (t))
1622 return boolean_false_node;
1625 /* Handle the OR case, where we are redistributing:
1626 (inner1 OR inner2) AND (op2a code2 op2b)
1627 => (t OR (inner2 AND (op2a code2 op2b))) */
1628 else if (integer_onep (t))
1629 return boolean_true_node;
1631 /* Save partial result for later. */
1632 partial = t;
1635 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1636 if (TREE_CODE (inner2) == SSA_NAME
1637 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1638 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1639 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1640 gimple_assign_rhs1 (s),
1641 gimple_assign_rhs2 (s),
1642 code2, op2a, op2b)))
1644 /* Handle the AND case, where we are reassociating:
1645 (inner1 AND inner2) AND (op2a code2 op2b)
1646 => (inner1 AND t) */
1647 if (is_and)
1649 if (integer_onep (t))
1650 return inner1;
1651 else if (integer_zerop (t))
1652 return boolean_false_node;
1653 /* If both are the same, we can apply the identity
1654 (x AND x) == x. */
1655 else if (partial && same_bool_result_p (t, partial))
1656 return t;
1659 /* Handle the OR case. where we are redistributing:
1660 (inner1 OR inner2) AND (op2a code2 op2b)
1661 => (t OR (inner1 AND (op2a code2 op2b)))
1662 => (t OR partial) */
1663 else
1665 if (integer_onep (t))
1666 return boolean_true_node;
1667 else if (partial)
1669 /* We already got a simplification for the other
1670 operand to the redistributed OR expression. The
1671 interesting case is when at least one is false.
1672 Or, if both are the same, we can apply the identity
1673 (x OR x) == x. */
1674 if (integer_zerop (partial))
1675 return t;
1676 else if (integer_zerop (t))
1677 return partial;
1678 else if (same_bool_result_p (t, partial))
1679 return t;
1684 return NULL_TREE;
1687 /* Try to simplify the AND of two comparisons defined by
1688 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1689 If this can be done without constructing an intermediate value,
1690 return the resulting tree; otherwise NULL_TREE is returned.
1691 This function is deliberately asymmetric as it recurses on SSA_DEFs
1692 in the first comparison but not the second. */
1694 static tree
1695 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1696 enum tree_code code2, tree op2a, tree op2b)
1698 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1700 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1701 if (operand_equal_p (op1a, op2a, 0)
1702 && operand_equal_p (op1b, op2b, 0))
1704 /* Result will be either NULL_TREE, or a combined comparison. */
1705 tree t = combine_comparisons (UNKNOWN_LOCATION,
1706 TRUTH_ANDIF_EXPR, code1, code2,
1707 truth_type, op1a, op1b);
1708 if (t)
1709 return t;
1712 /* Likewise the swapped case of the above. */
1713 if (operand_equal_p (op1a, op2b, 0)
1714 && operand_equal_p (op1b, op2a, 0))
1716 /* Result will be either NULL_TREE, or a combined comparison. */
1717 tree t = combine_comparisons (UNKNOWN_LOCATION,
1718 TRUTH_ANDIF_EXPR, code1,
1719 swap_tree_comparison (code2),
1720 truth_type, op1a, op1b);
1721 if (t)
1722 return t;
1725 /* If both comparisons are of the same value against constants, we might
1726 be able to merge them. */
1727 if (operand_equal_p (op1a, op2a, 0)
1728 && TREE_CODE (op1b) == INTEGER_CST
1729 && TREE_CODE (op2b) == INTEGER_CST)
1731 int cmp = tree_int_cst_compare (op1b, op2b);
1733 /* If we have (op1a == op1b), we should either be able to
1734 return that or FALSE, depending on whether the constant op1b
1735 also satisfies the other comparison against op2b. */
1736 if (code1 == EQ_EXPR)
1738 bool done = true;
1739 bool val;
1740 switch (code2)
1742 case EQ_EXPR: val = (cmp == 0); break;
1743 case NE_EXPR: val = (cmp != 0); break;
1744 case LT_EXPR: val = (cmp < 0); break;
1745 case GT_EXPR: val = (cmp > 0); break;
1746 case LE_EXPR: val = (cmp <= 0); break;
1747 case GE_EXPR: val = (cmp >= 0); break;
1748 default: done = false;
1750 if (done)
1752 if (val)
1753 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1754 else
1755 return boolean_false_node;
1758 /* Likewise if the second comparison is an == comparison. */
1759 else if (code2 == EQ_EXPR)
1761 bool done = true;
1762 bool val;
1763 switch (code1)
1765 case EQ_EXPR: val = (cmp == 0); break;
1766 case NE_EXPR: val = (cmp != 0); break;
1767 case LT_EXPR: val = (cmp > 0); break;
1768 case GT_EXPR: val = (cmp < 0); break;
1769 case LE_EXPR: val = (cmp >= 0); break;
1770 case GE_EXPR: val = (cmp <= 0); break;
1771 default: done = false;
1773 if (done)
1775 if (val)
1776 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1777 else
1778 return boolean_false_node;
1782 /* Same business with inequality tests. */
1783 else if (code1 == NE_EXPR)
1785 bool val;
1786 switch (code2)
1788 case EQ_EXPR: val = (cmp != 0); break;
1789 case NE_EXPR: val = (cmp == 0); break;
1790 case LT_EXPR: val = (cmp >= 0); break;
1791 case GT_EXPR: val = (cmp <= 0); break;
1792 case LE_EXPR: val = (cmp > 0); break;
1793 case GE_EXPR: val = (cmp < 0); break;
1794 default:
1795 val = false;
1797 if (val)
1798 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1800 else if (code2 == NE_EXPR)
1802 bool val;
1803 switch (code1)
1805 case EQ_EXPR: val = (cmp == 0); break;
1806 case NE_EXPR: val = (cmp != 0); break;
1807 case LT_EXPR: val = (cmp <= 0); break;
1808 case GT_EXPR: val = (cmp >= 0); break;
1809 case LE_EXPR: val = (cmp < 0); break;
1810 case GE_EXPR: val = (cmp > 0); break;
1811 default:
1812 val = false;
1814 if (val)
1815 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1818 /* Chose the more restrictive of two < or <= comparisons. */
1819 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1820 && (code2 == LT_EXPR || code2 == LE_EXPR))
1822 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1823 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1824 else
1825 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1828 /* Likewise chose the more restrictive of two > or >= comparisons. */
1829 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1830 && (code2 == GT_EXPR || code2 == GE_EXPR))
1832 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1833 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1834 else
1835 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1838 /* Check for singleton ranges. */
1839 else if (cmp == 0
1840 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1841 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1842 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1844 /* Check for disjoint ranges. */
1845 else if (cmp <= 0
1846 && (code1 == LT_EXPR || code1 == LE_EXPR)
1847 && (code2 == GT_EXPR || code2 == GE_EXPR))
1848 return boolean_false_node;
1849 else if (cmp >= 0
1850 && (code1 == GT_EXPR || code1 == GE_EXPR)
1851 && (code2 == LT_EXPR || code2 == LE_EXPR))
1852 return boolean_false_node;
1855 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1856 NAME's definition is a truth value. See if there are any simplifications
1857 that can be done against the NAME's definition. */
1858 if (TREE_CODE (op1a) == SSA_NAME
1859 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1860 && (integer_zerop (op1b) || integer_onep (op1b)))
1862 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1863 || (code1 == NE_EXPR && integer_onep (op1b)));
1864 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1865 switch (gimple_code (stmt))
1867 case GIMPLE_ASSIGN:
1868 /* Try to simplify by copy-propagating the definition. */
1869 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1871 case GIMPLE_PHI:
1872 /* If every argument to the PHI produces the same result when
1873 ANDed with the second comparison, we win.
1874 Do not do this unless the type is bool since we need a bool
1875 result here anyway. */
1876 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1878 tree result = NULL_TREE;
1879 unsigned i;
1880 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1882 tree arg = gimple_phi_arg_def (stmt, i);
1884 /* If this PHI has itself as an argument, ignore it.
1885 If all the other args produce the same result,
1886 we're still OK. */
1887 if (arg == gimple_phi_result (stmt))
1888 continue;
1889 else if (TREE_CODE (arg) == INTEGER_CST)
1891 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1893 if (!result)
1894 result = boolean_false_node;
1895 else if (!integer_zerop (result))
1896 return NULL_TREE;
1898 else if (!result)
1899 result = fold_build2 (code2, boolean_type_node,
1900 op2a, op2b);
1901 else if (!same_bool_comparison_p (result,
1902 code2, op2a, op2b))
1903 return NULL_TREE;
1905 else if (TREE_CODE (arg) == SSA_NAME
1906 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1908 tree temp;
1909 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1910 /* In simple cases we can look through PHI nodes,
1911 but we have to be careful with loops.
1912 See PR49073. */
1913 if (! dom_info_available_p (CDI_DOMINATORS)
1914 || gimple_bb (def_stmt) == gimple_bb (stmt)
1915 || dominated_by_p (CDI_DOMINATORS,
1916 gimple_bb (def_stmt),
1917 gimple_bb (stmt)))
1918 return NULL_TREE;
1919 temp = and_var_with_comparison (arg, invert, code2,
1920 op2a, op2b);
1921 if (!temp)
1922 return NULL_TREE;
1923 else if (!result)
1924 result = temp;
1925 else if (!same_bool_result_p (result, temp))
1926 return NULL_TREE;
1928 else
1929 return NULL_TREE;
1931 return result;
1934 default:
1935 break;
1938 return NULL_TREE;
1941 /* Try to simplify the AND of two comparisons, specified by
1942 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1943 If this can be simplified to a single expression (without requiring
1944 introducing more SSA variables to hold intermediate values),
1945 return the resulting tree. Otherwise return NULL_TREE.
1946 If the result expression is non-null, it has boolean type. */
1948 tree
1949 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1950 enum tree_code code2, tree op2a, tree op2b)
1952 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1953 if (t)
1954 return t;
1955 else
1956 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1959 /* Helper function for or_comparisons_1: try to simplify the OR of the
1960 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1961 If INVERT is true, invert the value of VAR before doing the OR.
1962 Return NULL_EXPR if we can't simplify this to a single expression. */
1964 static tree
1965 or_var_with_comparison (tree var, bool invert,
1966 enum tree_code code2, tree op2a, tree op2b)
1968 tree t;
1969 gimple stmt = SSA_NAME_DEF_STMT (var);
1971 /* We can only deal with variables whose definitions are assignments. */
1972 if (!is_gimple_assign (stmt))
1973 return NULL_TREE;
1975 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1976 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1977 Then we only have to consider the simpler non-inverted cases. */
1978 if (invert)
1979 t = and_var_with_comparison_1 (stmt,
1980 invert_tree_comparison (code2, false),
1981 op2a, op2b);
1982 else
1983 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1984 return canonicalize_bool (t, invert);
1987 /* Try to simplify the OR of the ssa variable defined by the assignment
1988 STMT with the comparison specified by (OP2A CODE2 OP2B).
1989 Return NULL_EXPR if we can't simplify this to a single expression. */
1991 static tree
1992 or_var_with_comparison_1 (gimple stmt,
1993 enum tree_code code2, tree op2a, tree op2b)
1995 tree var = gimple_assign_lhs (stmt);
1996 tree true_test_var = NULL_TREE;
1997 tree false_test_var = NULL_TREE;
1998 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2000 /* Check for identities like (var OR (var != 0)) => true . */
2001 if (TREE_CODE (op2a) == SSA_NAME
2002 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2004 if ((code2 == NE_EXPR && integer_zerop (op2b))
2005 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2007 true_test_var = op2a;
2008 if (var == true_test_var)
2009 return var;
2011 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2012 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2014 false_test_var = op2a;
2015 if (var == false_test_var)
2016 return boolean_true_node;
2020 /* If the definition is a comparison, recurse on it. */
2021 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2023 tree t = or_comparisons_1 (innercode,
2024 gimple_assign_rhs1 (stmt),
2025 gimple_assign_rhs2 (stmt),
2026 code2,
2027 op2a,
2028 op2b);
2029 if (t)
2030 return t;
2033 /* If the definition is an AND or OR expression, we may be able to
2034 simplify by reassociating. */
2035 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2036 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2038 tree inner1 = gimple_assign_rhs1 (stmt);
2039 tree inner2 = gimple_assign_rhs2 (stmt);
2040 gimple s;
2041 tree t;
2042 tree partial = NULL_TREE;
2043 bool is_or = (innercode == BIT_IOR_EXPR);
2045 /* Check for boolean identities that don't require recursive examination
2046 of inner1/inner2:
2047 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2048 inner1 OR (inner1 AND inner2) => inner1
2049 !inner1 OR (inner1 OR inner2) => true
2050 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2052 if (inner1 == true_test_var)
2053 return (is_or ? var : inner1);
2054 else if (inner2 == true_test_var)
2055 return (is_or ? var : inner2);
2056 else if (inner1 == false_test_var)
2057 return (is_or
2058 ? boolean_true_node
2059 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2060 else if (inner2 == false_test_var)
2061 return (is_or
2062 ? boolean_true_node
2063 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2065 /* Next, redistribute/reassociate the OR across the inner tests.
2066 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2067 if (TREE_CODE (inner1) == SSA_NAME
2068 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2069 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2070 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2071 gimple_assign_rhs1 (s),
2072 gimple_assign_rhs2 (s),
2073 code2, op2a, op2b)))
2075 /* Handle the OR case, where we are reassociating:
2076 (inner1 OR inner2) OR (op2a code2 op2b)
2077 => (t OR inner2)
2078 If the partial result t is a constant, we win. Otherwise
2079 continue on to try reassociating with the other inner test. */
2080 if (is_or)
2082 if (integer_onep (t))
2083 return boolean_true_node;
2084 else if (integer_zerop (t))
2085 return inner2;
2088 /* Handle the AND case, where we are redistributing:
2089 (inner1 AND inner2) OR (op2a code2 op2b)
2090 => (t AND (inner2 OR (op2a code op2b))) */
2091 else if (integer_zerop (t))
2092 return boolean_false_node;
2094 /* Save partial result for later. */
2095 partial = t;
2098 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2099 if (TREE_CODE (inner2) == SSA_NAME
2100 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2101 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2102 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2103 gimple_assign_rhs1 (s),
2104 gimple_assign_rhs2 (s),
2105 code2, op2a, op2b)))
2107 /* Handle the OR case, where we are reassociating:
2108 (inner1 OR inner2) OR (op2a code2 op2b)
2109 => (inner1 OR t)
2110 => (t OR partial) */
2111 if (is_or)
2113 if (integer_zerop (t))
2114 return inner1;
2115 else if (integer_onep (t))
2116 return boolean_true_node;
2117 /* If both are the same, we can apply the identity
2118 (x OR x) == x. */
2119 else if (partial && same_bool_result_p (t, partial))
2120 return t;
2123 /* Handle the AND case, where we are redistributing:
2124 (inner1 AND inner2) OR (op2a code2 op2b)
2125 => (t AND (inner1 OR (op2a code2 op2b)))
2126 => (t AND partial) */
2127 else
2129 if (integer_zerop (t))
2130 return boolean_false_node;
2131 else if (partial)
2133 /* We already got a simplification for the other
2134 operand to the redistributed AND expression. The
2135 interesting case is when at least one is true.
2136 Or, if both are the same, we can apply the identity
2137 (x AND x) == x. */
2138 if (integer_onep (partial))
2139 return t;
2140 else if (integer_onep (t))
2141 return partial;
2142 else if (same_bool_result_p (t, partial))
2143 return t;
2148 return NULL_TREE;
2151 /* Try to simplify the OR of two comparisons defined by
2152 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2153 If this can be done without constructing an intermediate value,
2154 return the resulting tree; otherwise NULL_TREE is returned.
2155 This function is deliberately asymmetric as it recurses on SSA_DEFs
2156 in the first comparison but not the second. */
2158 static tree
2159 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2160 enum tree_code code2, tree op2a, tree op2b)
2162 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2164 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2165 if (operand_equal_p (op1a, op2a, 0)
2166 && operand_equal_p (op1b, op2b, 0))
2168 /* Result will be either NULL_TREE, or a combined comparison. */
2169 tree t = combine_comparisons (UNKNOWN_LOCATION,
2170 TRUTH_ORIF_EXPR, code1, code2,
2171 truth_type, op1a, op1b);
2172 if (t)
2173 return t;
2176 /* Likewise the swapped case of the above. */
2177 if (operand_equal_p (op1a, op2b, 0)
2178 && operand_equal_p (op1b, op2a, 0))
2180 /* Result will be either NULL_TREE, or a combined comparison. */
2181 tree t = combine_comparisons (UNKNOWN_LOCATION,
2182 TRUTH_ORIF_EXPR, code1,
2183 swap_tree_comparison (code2),
2184 truth_type, op1a, op1b);
2185 if (t)
2186 return t;
2189 /* If both comparisons are of the same value against constants, we might
2190 be able to merge them. */
2191 if (operand_equal_p (op1a, op2a, 0)
2192 && TREE_CODE (op1b) == INTEGER_CST
2193 && TREE_CODE (op2b) == INTEGER_CST)
2195 int cmp = tree_int_cst_compare (op1b, op2b);
2197 /* If we have (op1a != op1b), we should either be able to
2198 return that or TRUE, depending on whether the constant op1b
2199 also satisfies the other comparison against op2b. */
2200 if (code1 == NE_EXPR)
2202 bool done = true;
2203 bool val;
2204 switch (code2)
2206 case EQ_EXPR: val = (cmp == 0); break;
2207 case NE_EXPR: val = (cmp != 0); break;
2208 case LT_EXPR: val = (cmp < 0); break;
2209 case GT_EXPR: val = (cmp > 0); break;
2210 case LE_EXPR: val = (cmp <= 0); break;
2211 case GE_EXPR: val = (cmp >= 0); break;
2212 default: done = false;
2214 if (done)
2216 if (val)
2217 return boolean_true_node;
2218 else
2219 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2222 /* Likewise if the second comparison is a != comparison. */
2223 else if (code2 == NE_EXPR)
2225 bool done = true;
2226 bool val;
2227 switch (code1)
2229 case EQ_EXPR: val = (cmp == 0); break;
2230 case NE_EXPR: val = (cmp != 0); break;
2231 case LT_EXPR: val = (cmp > 0); break;
2232 case GT_EXPR: val = (cmp < 0); break;
2233 case LE_EXPR: val = (cmp >= 0); break;
2234 case GE_EXPR: val = (cmp <= 0); break;
2235 default: done = false;
2237 if (done)
2239 if (val)
2240 return boolean_true_node;
2241 else
2242 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2246 /* See if an equality test is redundant with the other comparison. */
2247 else if (code1 == EQ_EXPR)
2249 bool val;
2250 switch (code2)
2252 case EQ_EXPR: val = (cmp == 0); break;
2253 case NE_EXPR: val = (cmp != 0); break;
2254 case LT_EXPR: val = (cmp < 0); break;
2255 case GT_EXPR: val = (cmp > 0); break;
2256 case LE_EXPR: val = (cmp <= 0); break;
2257 case GE_EXPR: val = (cmp >= 0); break;
2258 default:
2259 val = false;
2261 if (val)
2262 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2264 else if (code2 == EQ_EXPR)
2266 bool val;
2267 switch (code1)
2269 case EQ_EXPR: val = (cmp == 0); break;
2270 case NE_EXPR: val = (cmp != 0); break;
2271 case LT_EXPR: val = (cmp > 0); break;
2272 case GT_EXPR: val = (cmp < 0); break;
2273 case LE_EXPR: val = (cmp >= 0); break;
2274 case GE_EXPR: val = (cmp <= 0); break;
2275 default:
2276 val = false;
2278 if (val)
2279 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2282 /* Chose the less restrictive of two < or <= comparisons. */
2283 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2284 && (code2 == LT_EXPR || code2 == LE_EXPR))
2286 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2287 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2288 else
2289 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2292 /* Likewise chose the less restrictive of two > or >= comparisons. */
2293 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2294 && (code2 == GT_EXPR || code2 == GE_EXPR))
2296 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2297 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2298 else
2299 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2302 /* Check for singleton ranges. */
2303 else if (cmp == 0
2304 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2305 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2306 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2308 /* Check for less/greater pairs that don't restrict the range at all. */
2309 else if (cmp >= 0
2310 && (code1 == LT_EXPR || code1 == LE_EXPR)
2311 && (code2 == GT_EXPR || code2 == GE_EXPR))
2312 return boolean_true_node;
2313 else if (cmp <= 0
2314 && (code1 == GT_EXPR || code1 == GE_EXPR)
2315 && (code2 == LT_EXPR || code2 == LE_EXPR))
2316 return boolean_true_node;
2319 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2320 NAME's definition is a truth value. See if there are any simplifications
2321 that can be done against the NAME's definition. */
2322 if (TREE_CODE (op1a) == SSA_NAME
2323 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2324 && (integer_zerop (op1b) || integer_onep (op1b)))
2326 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2327 || (code1 == NE_EXPR && integer_onep (op1b)));
2328 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2329 switch (gimple_code (stmt))
2331 case GIMPLE_ASSIGN:
2332 /* Try to simplify by copy-propagating the definition. */
2333 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2335 case GIMPLE_PHI:
2336 /* If every argument to the PHI produces the same result when
2337 ORed with the second comparison, we win.
2338 Do not do this unless the type is bool since we need a bool
2339 result here anyway. */
2340 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2342 tree result = NULL_TREE;
2343 unsigned i;
2344 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2346 tree arg = gimple_phi_arg_def (stmt, i);
2348 /* If this PHI has itself as an argument, ignore it.
2349 If all the other args produce the same result,
2350 we're still OK. */
2351 if (arg == gimple_phi_result (stmt))
2352 continue;
2353 else if (TREE_CODE (arg) == INTEGER_CST)
2355 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2357 if (!result)
2358 result = boolean_true_node;
2359 else if (!integer_onep (result))
2360 return NULL_TREE;
2362 else if (!result)
2363 result = fold_build2 (code2, boolean_type_node,
2364 op2a, op2b);
2365 else if (!same_bool_comparison_p (result,
2366 code2, op2a, op2b))
2367 return NULL_TREE;
2369 else if (TREE_CODE (arg) == SSA_NAME
2370 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2372 tree temp;
2373 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2374 /* In simple cases we can look through PHI nodes,
2375 but we have to be careful with loops.
2376 See PR49073. */
2377 if (! dom_info_available_p (CDI_DOMINATORS)
2378 || gimple_bb (def_stmt) == gimple_bb (stmt)
2379 || dominated_by_p (CDI_DOMINATORS,
2380 gimple_bb (def_stmt),
2381 gimple_bb (stmt)))
2382 return NULL_TREE;
2383 temp = or_var_with_comparison (arg, invert, code2,
2384 op2a, op2b);
2385 if (!temp)
2386 return NULL_TREE;
2387 else if (!result)
2388 result = temp;
2389 else if (!same_bool_result_p (result, temp))
2390 return NULL_TREE;
2392 else
2393 return NULL_TREE;
2395 return result;
2398 default:
2399 break;
2402 return NULL_TREE;
2405 /* Try to simplify the OR of two comparisons, specified by
2406 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2407 If this can be simplified to a single expression (without requiring
2408 introducing more SSA variables to hold intermediate values),
2409 return the resulting tree. Otherwise return NULL_TREE.
2410 If the result expression is non-null, it has boolean type. */
2412 tree
2413 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2414 enum tree_code code2, tree op2a, tree op2b)
2416 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2417 if (t)
2418 return t;
2419 else
2420 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2424 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2426 Either NULL_TREE, a simplified but non-constant or a constant
2427 is returned.
2429 ??? This should go into a gimple-fold-inline.h file to be eventually
2430 privatized with the single valueize function used in the various TUs
2431 to avoid the indirect function call overhead. */
2433 tree
2434 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2436 location_t loc = gimple_location (stmt);
2437 switch (gimple_code (stmt))
2439 case GIMPLE_ASSIGN:
2441 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2443 switch (get_gimple_rhs_class (subcode))
2445 case GIMPLE_SINGLE_RHS:
2447 tree rhs = gimple_assign_rhs1 (stmt);
2448 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2450 if (TREE_CODE (rhs) == SSA_NAME)
2452 /* If the RHS is an SSA_NAME, return its known constant value,
2453 if any. */
2454 return (*valueize) (rhs);
2456 /* Handle propagating invariant addresses into address
2457 operations. */
2458 else if (TREE_CODE (rhs) == ADDR_EXPR
2459 && !is_gimple_min_invariant (rhs))
2461 HOST_WIDE_INT offset = 0;
2462 tree base;
2463 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2464 &offset,
2465 valueize);
2466 if (base
2467 && (CONSTANT_CLASS_P (base)
2468 || decl_address_invariant_p (base)))
2469 return build_invariant_address (TREE_TYPE (rhs),
2470 base, offset);
2472 else if (TREE_CODE (rhs) == CONSTRUCTOR
2473 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2474 && (CONSTRUCTOR_NELTS (rhs)
2475 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2477 unsigned i;
2478 tree val, *vec;
2480 vec = XALLOCAVEC (tree,
2481 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2482 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2484 val = (*valueize) (val);
2485 if (TREE_CODE (val) == INTEGER_CST
2486 || TREE_CODE (val) == REAL_CST
2487 || TREE_CODE (val) == FIXED_CST)
2488 vec[i] = val;
2489 else
2490 return NULL_TREE;
2493 return build_vector (TREE_TYPE (rhs), vec);
2496 if (kind == tcc_reference)
2498 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2499 || TREE_CODE (rhs) == REALPART_EXPR
2500 || TREE_CODE (rhs) == IMAGPART_EXPR)
2501 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2503 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2504 return fold_unary_loc (EXPR_LOCATION (rhs),
2505 TREE_CODE (rhs),
2506 TREE_TYPE (rhs), val);
2508 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2509 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2511 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2512 return fold_ternary_loc (EXPR_LOCATION (rhs),
2513 TREE_CODE (rhs),
2514 TREE_TYPE (rhs), val,
2515 TREE_OPERAND (rhs, 1),
2516 TREE_OPERAND (rhs, 2));
2518 else if (TREE_CODE (rhs) == MEM_REF
2519 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2521 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2522 if (TREE_CODE (val) == ADDR_EXPR
2523 && is_gimple_min_invariant (val))
2525 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2526 unshare_expr (val),
2527 TREE_OPERAND (rhs, 1));
2528 if (tem)
2529 rhs = tem;
2532 return fold_const_aggregate_ref_1 (rhs, valueize);
2534 else if (kind == tcc_declaration)
2535 return get_symbol_constant_value (rhs);
2536 return rhs;
2539 case GIMPLE_UNARY_RHS:
2541 /* Handle unary operators that can appear in GIMPLE form.
2542 Note that we know the single operand must be a constant,
2543 so this should almost always return a simplified RHS. */
2544 tree lhs = gimple_assign_lhs (stmt);
2545 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2547 /* Conversions are useless for CCP purposes if they are
2548 value-preserving. Thus the restrictions that
2549 useless_type_conversion_p places for restrict qualification
2550 of pointer types should not apply here.
2551 Substitution later will only substitute to allowed places. */
2552 if (CONVERT_EXPR_CODE_P (subcode)
2553 && POINTER_TYPE_P (TREE_TYPE (lhs))
2554 && POINTER_TYPE_P (TREE_TYPE (op0))
2555 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2556 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2557 && TYPE_MODE (TREE_TYPE (lhs))
2558 == TYPE_MODE (TREE_TYPE (op0)))
2559 return op0;
2561 return
2562 fold_unary_ignore_overflow_loc (loc, subcode,
2563 gimple_expr_type (stmt), op0);
2566 case GIMPLE_BINARY_RHS:
2568 /* Handle binary operators that can appear in GIMPLE form. */
2569 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2570 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2572 /* Translate &x + CST into an invariant form suitable for
2573 further propagation. */
2574 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2575 && TREE_CODE (op0) == ADDR_EXPR
2576 && TREE_CODE (op1) == INTEGER_CST)
2578 tree off = fold_convert (ptr_type_node, op1);
2579 return build_fold_addr_expr_loc
2580 (loc,
2581 fold_build2 (MEM_REF,
2582 TREE_TYPE (TREE_TYPE (op0)),
2583 unshare_expr (op0), off));
2586 return fold_binary_loc (loc, subcode,
2587 gimple_expr_type (stmt), op0, op1);
2590 case GIMPLE_TERNARY_RHS:
2592 /* Handle ternary operators that can appear in GIMPLE form. */
2593 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2594 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2595 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2597 /* Fold embedded expressions in ternary codes. */
2598 if ((subcode == COND_EXPR
2599 || subcode == VEC_COND_EXPR)
2600 && COMPARISON_CLASS_P (op0))
2602 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2603 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2604 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2605 TREE_TYPE (op0), op00, op01);
2606 if (tem)
2607 op0 = tem;
2610 return fold_ternary_loc (loc, subcode,
2611 gimple_expr_type (stmt), op0, op1, op2);
2614 default:
2615 gcc_unreachable ();
2619 case GIMPLE_CALL:
2621 tree fn;
2623 if (gimple_call_internal_p (stmt))
2624 /* No folding yet for these functions. */
2625 return NULL_TREE;
2627 fn = (*valueize) (gimple_call_fn (stmt));
2628 if (TREE_CODE (fn) == ADDR_EXPR
2629 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2630 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2632 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2633 tree call, retval;
2634 unsigned i;
2635 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2636 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2637 call = build_call_array_loc (loc,
2638 gimple_call_return_type (stmt),
2639 fn, gimple_call_num_args (stmt), args);
2640 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2641 if (retval)
2642 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2643 STRIP_NOPS (retval);
2644 return retval;
2646 return NULL_TREE;
2649 default:
2650 return NULL_TREE;
2654 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2655 Returns NULL_TREE if folding to a constant is not possible, otherwise
2656 returns a constant according to is_gimple_min_invariant. */
2658 tree
2659 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2661 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2662 if (res && is_gimple_min_invariant (res))
2663 return res;
2664 return NULL_TREE;
2668 /* The following set of functions are supposed to fold references using
2669 their constant initializers. */
2671 static tree fold_ctor_reference (tree type, tree ctor,
2672 unsigned HOST_WIDE_INT offset,
2673 unsigned HOST_WIDE_INT size, tree);
2675 /* See if we can find constructor defining value of BASE.
2676 When we know the consructor with constant offset (such as
2677 base is array[40] and we do know constructor of array), then
2678 BIT_OFFSET is adjusted accordingly.
2680 As a special case, return error_mark_node when constructor
2681 is not explicitly available, but it is known to be zero
2682 such as 'static const int a;'. */
2683 static tree
2684 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2685 tree (*valueize)(tree))
2687 HOST_WIDE_INT bit_offset2, size, max_size;
2688 if (TREE_CODE (base) == MEM_REF)
2690 if (!integer_zerop (TREE_OPERAND (base, 1)))
2692 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2693 return NULL_TREE;
2694 *bit_offset += (mem_ref_offset (base).low
2695 * BITS_PER_UNIT);
2698 if (valueize
2699 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2700 base = valueize (TREE_OPERAND (base, 0));
2701 if (!base || TREE_CODE (base) != ADDR_EXPR)
2702 return NULL_TREE;
2703 base = TREE_OPERAND (base, 0);
2706 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2707 DECL_INITIAL. If BASE is a nested reference into another
2708 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2709 the inner reference. */
2710 switch (TREE_CODE (base))
2712 case VAR_DECL:
2713 case CONST_DECL:
2715 tree init = ctor_for_folding (base);
2717 /* Our semantic is exact opposite of ctor_for_folding;
2718 NULL means unknown, while error_mark_node is 0. */
2719 if (init == error_mark_node)
2720 return NULL_TREE;
2721 if (!init)
2722 return error_mark_node;
2723 return init;
2726 case ARRAY_REF:
2727 case COMPONENT_REF:
2728 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2729 if (max_size == -1 || size != max_size)
2730 return NULL_TREE;
2731 *bit_offset += bit_offset2;
2732 return get_base_constructor (base, bit_offset, valueize);
2734 case STRING_CST:
2735 case CONSTRUCTOR:
2736 return base;
2738 default:
2739 return NULL_TREE;
2743 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2744 to the memory at bit OFFSET.
2746 We do only simple job of folding byte accesses. */
2748 static tree
2749 fold_string_cst_ctor_reference (tree type, tree ctor,
2750 unsigned HOST_WIDE_INT offset,
2751 unsigned HOST_WIDE_INT size)
2753 if (INTEGRAL_TYPE_P (type)
2754 && (TYPE_MODE (type)
2755 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2756 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2757 == MODE_INT)
2758 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2759 && size == BITS_PER_UNIT
2760 && !(offset % BITS_PER_UNIT))
2762 offset /= BITS_PER_UNIT;
2763 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2764 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2765 [offset]));
2766 /* Folding
2767 const char a[20]="hello";
2768 return a[10];
2770 might lead to offset greater than string length. In this case we
2771 know value is either initialized to 0 or out of bounds. Return 0
2772 in both cases. */
2773 return build_zero_cst (type);
2775 return NULL_TREE;
2778 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2779 SIZE to the memory at bit OFFSET. */
2781 static tree
2782 fold_array_ctor_reference (tree type, tree ctor,
2783 unsigned HOST_WIDE_INT offset,
2784 unsigned HOST_WIDE_INT size,
2785 tree from_decl)
2787 unsigned HOST_WIDE_INT cnt;
2788 tree cfield, cval;
2789 double_int low_bound, elt_size;
2790 double_int index, max_index;
2791 double_int access_index;
2792 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2793 HOST_WIDE_INT inner_offset;
2795 /* Compute low bound and elt size. */
2796 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2797 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2798 if (domain_type && TYPE_MIN_VALUE (domain_type))
2800 /* Static constructors for variably sized objects makes no sense. */
2801 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2802 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2803 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2805 else
2806 low_bound = double_int_zero;
2807 /* Static constructors for variably sized objects makes no sense. */
2808 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2809 == INTEGER_CST);
2810 elt_size =
2811 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2814 /* We can handle only constantly sized accesses that are known to not
2815 be larger than size of array element. */
2816 if (!TYPE_SIZE_UNIT (type)
2817 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2818 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2819 return NULL_TREE;
2821 /* Compute the array index we look for. */
2822 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2823 .udiv (elt_size, TRUNC_DIV_EXPR);
2824 access_index += low_bound;
2825 if (index_type)
2826 access_index = access_index.ext (TYPE_PRECISION (index_type),
2827 TYPE_UNSIGNED (index_type));
2829 /* And offset within the access. */
2830 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2832 /* See if the array field is large enough to span whole access. We do not
2833 care to fold accesses spanning multiple array indexes. */
2834 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2835 return NULL_TREE;
2837 index = low_bound - double_int_one;
2838 if (index_type)
2839 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2841 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2843 /* Array constructor might explicitely set index, or specify range
2844 or leave index NULL meaning that it is next index after previous
2845 one. */
2846 if (cfield)
2848 if (TREE_CODE (cfield) == INTEGER_CST)
2849 max_index = index = tree_to_double_int (cfield);
2850 else
2852 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2853 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2854 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2857 else
2859 index += double_int_one;
2860 if (index_type)
2861 index = index.ext (TYPE_PRECISION (index_type),
2862 TYPE_UNSIGNED (index_type));
2863 max_index = index;
2866 /* Do we have match? */
2867 if (access_index.cmp (index, 1) >= 0
2868 && access_index.cmp (max_index, 1) <= 0)
2869 return fold_ctor_reference (type, cval, inner_offset, size,
2870 from_decl);
2872 /* When memory is not explicitely mentioned in constructor,
2873 it is 0 (or out of range). */
2874 return build_zero_cst (type);
2877 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2878 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2880 static tree
2881 fold_nonarray_ctor_reference (tree type, tree ctor,
2882 unsigned HOST_WIDE_INT offset,
2883 unsigned HOST_WIDE_INT size,
2884 tree from_decl)
2886 unsigned HOST_WIDE_INT cnt;
2887 tree cfield, cval;
2889 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2890 cval)
2892 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2893 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2894 tree field_size = DECL_SIZE (cfield);
2895 double_int bitoffset;
2896 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2897 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2898 double_int bitoffset_end, access_end;
2900 /* Variable sized objects in static constructors makes no sense,
2901 but field_size can be NULL for flexible array members. */
2902 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2903 && TREE_CODE (byte_offset) == INTEGER_CST
2904 && (field_size != NULL_TREE
2905 ? TREE_CODE (field_size) == INTEGER_CST
2906 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2908 /* Compute bit offset of the field. */
2909 bitoffset = tree_to_double_int (field_offset)
2910 + byte_offset_cst * bits_per_unit_cst;
2911 /* Compute bit offset where the field ends. */
2912 if (field_size != NULL_TREE)
2913 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2914 else
2915 bitoffset_end = double_int_zero;
2917 access_end = double_int::from_uhwi (offset)
2918 + double_int::from_uhwi (size);
2920 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2921 [BITOFFSET, BITOFFSET_END)? */
2922 if (access_end.cmp (bitoffset, 0) > 0
2923 && (field_size == NULL_TREE
2924 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2926 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2927 /* We do have overlap. Now see if field is large enough to
2928 cover the access. Give up for accesses spanning multiple
2929 fields. */
2930 if (access_end.cmp (bitoffset_end, 0) > 0)
2931 return NULL_TREE;
2932 if (double_int::from_uhwi (offset).slt (bitoffset))
2933 return NULL_TREE;
2934 return fold_ctor_reference (type, cval,
2935 inner_offset.to_uhwi (), size,
2936 from_decl);
2939 /* When memory is not explicitely mentioned in constructor, it is 0. */
2940 return build_zero_cst (type);
2943 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2944 to the memory at bit OFFSET. */
2946 static tree
2947 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2948 unsigned HOST_WIDE_INT size, tree from_decl)
2950 tree ret;
2952 /* We found the field with exact match. */
2953 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2954 && !offset)
2955 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2957 /* We are at the end of walk, see if we can view convert the
2958 result. */
2959 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2960 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2961 && operand_equal_p (TYPE_SIZE (type),
2962 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2964 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2965 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2966 if (ret)
2967 STRIP_NOPS (ret);
2968 return ret;
2970 if (TREE_CODE (ctor) == STRING_CST)
2971 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2972 if (TREE_CODE (ctor) == CONSTRUCTOR)
2975 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2976 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2977 return fold_array_ctor_reference (type, ctor, offset, size,
2978 from_decl);
2979 else
2980 return fold_nonarray_ctor_reference (type, ctor, offset, size,
2981 from_decl);
2984 return NULL_TREE;
2987 /* Return the tree representing the element referenced by T if T is an
2988 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2989 names using VALUEIZE. Return NULL_TREE otherwise. */
2991 tree
2992 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2994 tree ctor, idx, base;
2995 HOST_WIDE_INT offset, size, max_size;
2996 tree tem;
2998 if (TREE_THIS_VOLATILE (t))
2999 return NULL_TREE;
3001 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3002 return get_symbol_constant_value (t);
3004 tem = fold_read_from_constant_string (t);
3005 if (tem)
3006 return tem;
3008 switch (TREE_CODE (t))
3010 case ARRAY_REF:
3011 case ARRAY_RANGE_REF:
3012 /* Constant indexes are handled well by get_base_constructor.
3013 Only special case variable offsets.
3014 FIXME: This code can't handle nested references with variable indexes
3015 (they will be handled only by iteration of ccp). Perhaps we can bring
3016 get_ref_base_and_extent here and make it use a valueize callback. */
3017 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3018 && valueize
3019 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3020 && TREE_CODE (idx) == INTEGER_CST)
3022 tree low_bound, unit_size;
3023 double_int doffset;
3025 /* If the resulting bit-offset is constant, track it. */
3026 if ((low_bound = array_ref_low_bound (t),
3027 TREE_CODE (low_bound) == INTEGER_CST)
3028 && (unit_size = array_ref_element_size (t),
3029 host_integerp (unit_size, 1))
3030 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3031 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3032 doffset.fits_shwi ()))
3034 offset = doffset.to_shwi ();
3035 offset *= TREE_INT_CST_LOW (unit_size);
3036 offset *= BITS_PER_UNIT;
3038 base = TREE_OPERAND (t, 0);
3039 ctor = get_base_constructor (base, &offset, valueize);
3040 /* Empty constructor. Always fold to 0. */
3041 if (ctor == error_mark_node)
3042 return build_zero_cst (TREE_TYPE (t));
3043 /* Out of bound array access. Value is undefined,
3044 but don't fold. */
3045 if (offset < 0)
3046 return NULL_TREE;
3047 /* We can not determine ctor. */
3048 if (!ctor)
3049 return NULL_TREE;
3050 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3051 TREE_INT_CST_LOW (unit_size)
3052 * BITS_PER_UNIT,
3053 base);
3056 /* Fallthru. */
3058 case COMPONENT_REF:
3059 case BIT_FIELD_REF:
3060 case TARGET_MEM_REF:
3061 case MEM_REF:
3062 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3063 ctor = get_base_constructor (base, &offset, valueize);
3065 /* Empty constructor. Always fold to 0. */
3066 if (ctor == error_mark_node)
3067 return build_zero_cst (TREE_TYPE (t));
3068 /* We do not know precise address. */
3069 if (max_size == -1 || max_size != size)
3070 return NULL_TREE;
3071 /* We can not determine ctor. */
3072 if (!ctor)
3073 return NULL_TREE;
3075 /* Out of bound array access. Value is undefined, but don't fold. */
3076 if (offset < 0)
3077 return NULL_TREE;
3079 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3080 base);
3082 case REALPART_EXPR:
3083 case IMAGPART_EXPR:
3085 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3086 if (c && TREE_CODE (c) == COMPLEX_CST)
3087 return fold_build1_loc (EXPR_LOCATION (t),
3088 TREE_CODE (t), TREE_TYPE (t), c);
3089 break;
3092 default:
3093 break;
3096 return NULL_TREE;
3099 tree
3100 fold_const_aggregate_ref (tree t)
3102 return fold_const_aggregate_ref_1 (t, NULL);
3105 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3106 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3107 KNOWN_BINFO carries the binfo describing the true type of
3108 OBJ_TYPE_REF_OBJECT(REF). */
3110 tree
3111 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3113 unsigned HOST_WIDE_INT offset, size;
3114 tree v, fn, vtable, init;
3116 vtable = v = BINFO_VTABLE (known_binfo);
3117 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3118 if (!v)
3119 return NULL_TREE;
3121 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3123 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3124 v = TREE_OPERAND (v, 0);
3126 else
3127 offset = 0;
3129 if (TREE_CODE (v) != ADDR_EXPR)
3130 return NULL_TREE;
3131 v = TREE_OPERAND (v, 0);
3133 if (TREE_CODE (v) != VAR_DECL
3134 || !DECL_VIRTUAL_P (v))
3135 return NULL_TREE;
3136 init = ctor_for_folding (v);
3138 /* The virtual tables should always be born with constructors.
3139 and we always should assume that they are avaialble for
3140 folding. At the moment we do not stream them in all cases,
3141 but it should never happen that ctor seem unreachable. */
3142 gcc_assert (init);
3143 if (init == error_mark_node)
3145 gcc_assert (in_lto_p);
3146 return NULL_TREE;
3148 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3149 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3150 offset += token * size;
3151 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3152 offset, size, v);
3153 if (!fn || integer_zerop (fn))
3154 return NULL_TREE;
3155 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3156 || TREE_CODE (fn) == FDESC_EXPR);
3157 fn = TREE_OPERAND (fn, 0);
3158 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3160 /* When cgraph node is missing and function is not public, we cannot
3161 devirtualize. This can happen in WHOPR when the actual method
3162 ends up in other partition, because we found devirtualization
3163 possibility too late. */
3164 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3165 return NULL_TREE;
3167 /* Make sure we create a cgraph node for functions we'll reference.
3168 They can be non-existent if the reference comes from an entry
3169 of an external vtable for example. */
3170 cgraph_get_create_node (fn);
3172 return fn;
3175 /* Return true iff VAL is a gimple expression that is known to be
3176 non-negative. Restricted to floating-point inputs. */
3178 bool
3179 gimple_val_nonnegative_real_p (tree val)
3181 gimple def_stmt;
3183 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3185 /* Use existing logic for non-gimple trees. */
3186 if (tree_expr_nonnegative_p (val))
3187 return true;
3189 if (TREE_CODE (val) != SSA_NAME)
3190 return false;
3192 /* Currently we look only at the immediately defining statement
3193 to make this determination, since recursion on defining
3194 statements of operands can lead to quadratic behavior in the
3195 worst case. This is expected to catch almost all occurrences
3196 in practice. It would be possible to implement limited-depth
3197 recursion if important cases are lost. Alternatively, passes
3198 that need this information (such as the pow/powi lowering code
3199 in the cse_sincos pass) could be revised to provide it through
3200 dataflow propagation. */
3202 def_stmt = SSA_NAME_DEF_STMT (val);
3204 if (is_gimple_assign (def_stmt))
3206 tree op0, op1;
3208 /* See fold-const.c:tree_expr_nonnegative_p for additional
3209 cases that could be handled with recursion. */
3211 switch (gimple_assign_rhs_code (def_stmt))
3213 case ABS_EXPR:
3214 /* Always true for floating-point operands. */
3215 return true;
3217 case MULT_EXPR:
3218 /* True if the two operands are identical (since we are
3219 restricted to floating-point inputs). */
3220 op0 = gimple_assign_rhs1 (def_stmt);
3221 op1 = gimple_assign_rhs2 (def_stmt);
3223 if (op0 == op1
3224 || operand_equal_p (op0, op1, 0))
3225 return true;
3227 default:
3228 return false;
3231 else if (is_gimple_call (def_stmt))
3233 tree fndecl = gimple_call_fndecl (def_stmt);
3234 if (fndecl
3235 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3237 tree arg1;
3239 switch (DECL_FUNCTION_CODE (fndecl))
3241 CASE_FLT_FN (BUILT_IN_ACOS):
3242 CASE_FLT_FN (BUILT_IN_ACOSH):
3243 CASE_FLT_FN (BUILT_IN_CABS):
3244 CASE_FLT_FN (BUILT_IN_COSH):
3245 CASE_FLT_FN (BUILT_IN_ERFC):
3246 CASE_FLT_FN (BUILT_IN_EXP):
3247 CASE_FLT_FN (BUILT_IN_EXP10):
3248 CASE_FLT_FN (BUILT_IN_EXP2):
3249 CASE_FLT_FN (BUILT_IN_FABS):
3250 CASE_FLT_FN (BUILT_IN_FDIM):
3251 CASE_FLT_FN (BUILT_IN_HYPOT):
3252 CASE_FLT_FN (BUILT_IN_POW10):
3253 return true;
3255 CASE_FLT_FN (BUILT_IN_SQRT):
3256 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3257 nonnegative inputs. */
3258 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3259 return true;
3261 break;
3263 CASE_FLT_FN (BUILT_IN_POWI):
3264 /* True if the second argument is an even integer. */
3265 arg1 = gimple_call_arg (def_stmt, 1);
3267 if (TREE_CODE (arg1) == INTEGER_CST
3268 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3269 return true;
3271 break;
3273 CASE_FLT_FN (BUILT_IN_POW):
3274 /* True if the second argument is an even integer-valued
3275 real. */
3276 arg1 = gimple_call_arg (def_stmt, 1);
3278 if (TREE_CODE (arg1) == REAL_CST)
3280 REAL_VALUE_TYPE c;
3281 HOST_WIDE_INT n;
3283 c = TREE_REAL_CST (arg1);
3284 n = real_to_integer (&c);
3286 if ((n & 1) == 0)
3288 REAL_VALUE_TYPE cint;
3289 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3290 if (real_identical (&c, &cint))
3291 return true;
3295 break;
3297 default:
3298 return false;
3303 return false;