2014-04-15 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / gimple-fold.c
blob6402cce2f3f7bed5d70090943f8743696e7f8a88
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stringpool.h"
27 #include "expr.h"
28 #include "stmt.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "dumpfile.h"
33 #include "bitmap.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "tree-dfa.h"
47 #include "tree-ssa.h"
48 #include "tree-ssa-propagate.h"
49 #include "target.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
56 /* Return true when DECL can be referenced from current unit.
57 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
58 We can get declarations that are not possible to reference for various
59 reasons:
61 1) When analyzing C++ virtual tables.
62 C++ virtual tables do have known constructors even
63 when they are keyed to other compilation unit.
64 Those tables can contain pointers to methods and vars
65 in other units. Those methods have both STATIC and EXTERNAL
66 set.
67 2) In WHOPR mode devirtualization might lead to reference
68 to method that was partitioned elsehwere.
69 In this case we have static VAR_DECL or FUNCTION_DECL
70 that has no corresponding callgraph/varpool node
71 declaring the body.
72 3) COMDAT functions referred by external vtables that
73 we devirtualize only during final compilation stage.
74 At this time we already decided that we will not output
75 the function body and thus we can't reference the symbol
76 directly. */
78 static bool
79 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
81 varpool_node *vnode;
82 struct cgraph_node *node;
83 symtab_node *snode;
85 if (DECL_ABSTRACT (decl))
86 return false;
88 /* We are concerned only about static/external vars and functions. */
89 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
90 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
91 return true;
93 /* Static objects can be referred only if they was not optimized out yet. */
94 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
96 snode = symtab_get_node (decl);
97 if (!snode)
98 return false;
99 node = dyn_cast <cgraph_node> (snode);
100 return !node || !node->global.inlined_to;
103 /* We will later output the initializer, so we can refer to it.
104 So we are concerned only when DECL comes from initializer of
105 external var. */
106 if (!from_decl
107 || TREE_CODE (from_decl) != VAR_DECL
108 || !DECL_EXTERNAL (from_decl)
109 || (flag_ltrans
110 && symtab_get_node (from_decl)->in_other_partition))
111 return true;
112 /* We are folding reference from external vtable. The vtable may reffer
113 to a symbol keyed to other compilation unit. The other compilation
114 unit may be in separate DSO and the symbol may be hidden. */
115 if (DECL_VISIBILITY_SPECIFIED (decl)
116 && DECL_EXTERNAL (decl)
117 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
118 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
119 return false;
120 /* When function is public, we always can introduce new reference.
121 Exception are the COMDAT functions where introducing a direct
122 reference imply need to include function body in the curren tunit. */
123 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
124 return true;
125 /* We are not at ltrans stage; so don't worry about WHOPR.
126 Also when still gimplifying all referred comdat functions will be
127 produced.
129 As observed in PR20991 for already optimized out comdat virtual functions
130 it may be tempting to not necessarily give up because the copy will be
131 output elsewhere when corresponding vtable is output.
132 This is however not possible - ABI specify that COMDATs are output in
133 units where they are used and when the other unit was compiled with LTO
134 it is possible that vtable was kept public while the function itself
135 was privatized. */
136 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
137 return true;
139 /* OK we are seeing either COMDAT or static variable. In this case we must
140 check that the definition is still around so we can refer it. */
141 if (TREE_CODE (decl) == FUNCTION_DECL)
143 node = cgraph_get_node (decl);
144 /* Check that we still have function body and that we didn't took
145 the decision to eliminate offline copy of the function yet.
146 The second is important when devirtualization happens during final
147 compilation stage when making a new reference no longer makes callee
148 to be compiled. */
149 if (!node || !node->definition || node->global.inlined_to)
151 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
152 return false;
155 else if (TREE_CODE (decl) == VAR_DECL)
157 vnode = varpool_get_node (decl);
158 if (!vnode || !vnode->definition)
160 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
161 return false;
164 return true;
167 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
168 acceptable form for is_gimple_min_invariant.
169 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
171 tree
172 canonicalize_constructor_val (tree cval, tree from_decl)
174 tree orig_cval = cval;
175 STRIP_NOPS (cval);
176 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
177 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
179 tree ptr = TREE_OPERAND (cval, 0);
180 if (is_gimple_min_invariant (ptr))
181 cval = build1_loc (EXPR_LOCATION (cval),
182 ADDR_EXPR, TREE_TYPE (ptr),
183 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
184 ptr,
185 fold_convert (ptr_type_node,
186 TREE_OPERAND (cval, 1))));
188 if (TREE_CODE (cval) == ADDR_EXPR)
190 tree base = NULL_TREE;
191 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
193 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
194 if (base)
195 TREE_OPERAND (cval, 0) = base;
197 else
198 base = get_base_address (TREE_OPERAND (cval, 0));
199 if (!base)
200 return NULL_TREE;
202 if ((TREE_CODE (base) == VAR_DECL
203 || TREE_CODE (base) == FUNCTION_DECL)
204 && !can_refer_decl_in_current_unit_p (base, from_decl))
205 return NULL_TREE;
206 if (TREE_CODE (base) == VAR_DECL)
207 TREE_ADDRESSABLE (base) = 1;
208 else if (TREE_CODE (base) == FUNCTION_DECL)
210 /* Make sure we create a cgraph node for functions we'll reference.
211 They can be non-existent if the reference comes from an entry
212 of an external vtable for example. */
213 cgraph_get_create_node (base);
215 /* Fixup types in global initializers. */
216 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
217 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
219 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
220 cval = fold_convert (TREE_TYPE (orig_cval), cval);
221 return cval;
223 if (TREE_OVERFLOW_P (cval))
224 return drop_tree_overflow (cval);
225 return orig_cval;
228 /* If SYM is a constant variable with known value, return the value.
229 NULL_TREE is returned otherwise. */
231 tree
232 get_symbol_constant_value (tree sym)
234 tree val = ctor_for_folding (sym);
235 if (val != error_mark_node)
237 if (val)
239 val = canonicalize_constructor_val (unshare_expr (val), sym);
240 if (val && is_gimple_min_invariant (val))
241 return val;
242 else
243 return NULL_TREE;
245 /* Variables declared 'const' without an initializer
246 have zero as the initializer if they may not be
247 overridden at link or run time. */
248 if (!val
249 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
250 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
251 return build_zero_cst (TREE_TYPE (sym));
254 return NULL_TREE;
259 /* Subroutine of fold_stmt. We perform several simplifications of the
260 memory reference tree EXPR and make sure to re-gimplify them properly
261 after propagation of constant addresses. IS_LHS is true if the
262 reference is supposed to be an lvalue. */
264 static tree
265 maybe_fold_reference (tree expr, bool is_lhs)
267 tree *t = &expr;
268 tree result;
270 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
271 || TREE_CODE (expr) == REALPART_EXPR
272 || TREE_CODE (expr) == IMAGPART_EXPR)
273 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
274 return fold_unary_loc (EXPR_LOCATION (expr),
275 TREE_CODE (expr),
276 TREE_TYPE (expr),
277 TREE_OPERAND (expr, 0));
278 else if (TREE_CODE (expr) == BIT_FIELD_REF
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
280 return fold_ternary_loc (EXPR_LOCATION (expr),
281 TREE_CODE (expr),
282 TREE_TYPE (expr),
283 TREE_OPERAND (expr, 0),
284 TREE_OPERAND (expr, 1),
285 TREE_OPERAND (expr, 2));
287 while (handled_component_p (*t))
288 t = &TREE_OPERAND (*t, 0);
290 /* Canonicalize MEM_REFs invariant address operand. Do this first
291 to avoid feeding non-canonical MEM_REFs elsewhere. */
292 if (TREE_CODE (*t) == MEM_REF
293 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
295 bool volatile_p = TREE_THIS_VOLATILE (*t);
296 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
297 TREE_OPERAND (*t, 0),
298 TREE_OPERAND (*t, 1));
299 if (tem)
301 TREE_THIS_VOLATILE (tem) = volatile_p;
302 *t = tem;
303 tem = maybe_fold_reference (expr, is_lhs);
304 if (tem)
305 return tem;
306 return expr;
310 if (!is_lhs
311 && (result = fold_const_aggregate_ref (expr))
312 && is_gimple_min_invariant (result))
313 return result;
315 /* Fold back MEM_REFs to reference trees. */
316 if (TREE_CODE (*t) == MEM_REF
317 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
318 && integer_zerop (TREE_OPERAND (*t, 1))
319 && (TREE_THIS_VOLATILE (*t)
320 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
321 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
322 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
323 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
324 /* We have to look out here to not drop a required conversion
325 from the rhs to the lhs if is_lhs, but we don't have the
326 rhs here to verify that. Thus require strict type
327 compatibility. */
328 && types_compatible_p (TREE_TYPE (*t),
329 TREE_TYPE (TREE_OPERAND
330 (TREE_OPERAND (*t, 0), 0))))
332 tree tem;
333 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
334 tem = maybe_fold_reference (expr, is_lhs);
335 if (tem)
336 return tem;
337 return expr;
339 else if (TREE_CODE (*t) == TARGET_MEM_REF)
341 tree tem = maybe_fold_tmr (*t);
342 if (tem)
344 *t = tem;
345 tem = maybe_fold_reference (expr, is_lhs);
346 if (tem)
347 return tem;
348 return expr;
352 return NULL_TREE;
356 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
357 replacement rhs for the statement or NULL_TREE if no simplification
358 could be made. It is assumed that the operands have been previously
359 folded. */
361 static tree
362 fold_gimple_assign (gimple_stmt_iterator *si)
364 gimple stmt = gsi_stmt (*si);
365 enum tree_code subcode = gimple_assign_rhs_code (stmt);
366 location_t loc = gimple_location (stmt);
368 tree result = NULL_TREE;
370 switch (get_gimple_rhs_class (subcode))
372 case GIMPLE_SINGLE_RHS:
374 tree rhs = gimple_assign_rhs1 (stmt);
376 if (REFERENCE_CLASS_P (rhs))
377 return maybe_fold_reference (rhs, false);
379 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
381 tree val = OBJ_TYPE_REF_EXPR (rhs);
382 if (is_gimple_min_invariant (val))
383 return val;
384 else if (flag_devirtualize && virtual_method_call_p (val))
386 bool final;
387 vec <cgraph_node *>targets
388 = possible_polymorphic_call_targets (val, &final);
389 if (final && targets.length () <= 1)
391 tree fndecl;
392 if (targets.length () == 1)
393 fndecl = targets[0]->decl;
394 else
395 fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
396 val = fold_convert (TREE_TYPE (val), fndecl);
397 STRIP_USELESS_TYPE_CONVERSION (val);
398 return val;
403 else if (TREE_CODE (rhs) == ADDR_EXPR)
405 tree ref = TREE_OPERAND (rhs, 0);
406 tree tem = maybe_fold_reference (ref, true);
407 if (tem
408 && TREE_CODE (tem) == MEM_REF
409 && integer_zerop (TREE_OPERAND (tem, 1)))
410 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
411 else if (tem)
412 result = fold_convert (TREE_TYPE (rhs),
413 build_fold_addr_expr_loc (loc, tem));
414 else if (TREE_CODE (ref) == MEM_REF
415 && integer_zerop (TREE_OPERAND (ref, 1)))
416 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
419 else if (TREE_CODE (rhs) == CONSTRUCTOR
420 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
421 && (CONSTRUCTOR_NELTS (rhs)
422 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
424 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
425 unsigned i;
426 tree val;
428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
429 if (TREE_CODE (val) != INTEGER_CST
430 && TREE_CODE (val) != REAL_CST
431 && TREE_CODE (val) != FIXED_CST)
432 return NULL_TREE;
434 return build_vector_from_ctor (TREE_TYPE (rhs),
435 CONSTRUCTOR_ELTS (rhs));
438 else if (DECL_P (rhs))
439 return get_symbol_constant_value (rhs);
441 /* If we couldn't fold the RHS, hand over to the generic
442 fold routines. */
443 if (result == NULL_TREE)
444 result = fold (rhs);
446 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
447 that may have been added by fold, and "useless" type
448 conversions that might now be apparent due to propagation. */
449 STRIP_USELESS_TYPE_CONVERSION (result);
451 if (result != rhs && valid_gimple_rhs_p (result))
452 return result;
454 return NULL_TREE;
456 break;
458 case GIMPLE_UNARY_RHS:
460 tree rhs = gimple_assign_rhs1 (stmt);
462 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
463 if (result)
465 /* If the operation was a conversion do _not_ mark a
466 resulting constant with TREE_OVERFLOW if the original
467 constant was not. These conversions have implementation
468 defined behavior and retaining the TREE_OVERFLOW flag
469 here would confuse later passes such as VRP. */
470 if (CONVERT_EXPR_CODE_P (subcode)
471 && TREE_CODE (result) == INTEGER_CST
472 && TREE_CODE (rhs) == INTEGER_CST)
473 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
475 STRIP_USELESS_TYPE_CONVERSION (result);
476 if (valid_gimple_rhs_p (result))
477 return result;
480 break;
482 case GIMPLE_BINARY_RHS:
483 /* Try to canonicalize for boolean-typed X the comparisons
484 X == 0, X == 1, X != 0, and X != 1. */
485 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
486 || gimple_assign_rhs_code (stmt) == NE_EXPR)
488 tree lhs = gimple_assign_lhs (stmt);
489 tree op1 = gimple_assign_rhs1 (stmt);
490 tree op2 = gimple_assign_rhs2 (stmt);
491 tree type = TREE_TYPE (op1);
493 /* Check whether the comparison operands are of the same boolean
494 type as the result type is.
495 Check that second operand is an integer-constant with value
496 one or zero. */
497 if (TREE_CODE (op2) == INTEGER_CST
498 && (integer_zerop (op2) || integer_onep (op2))
499 && useless_type_conversion_p (TREE_TYPE (lhs), type))
501 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
502 bool is_logical_not = false;
504 /* X == 0 and X != 1 is a logical-not.of X
505 X == 1 and X != 0 is X */
506 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
507 || (cmp_code == NE_EXPR && integer_onep (op2)))
508 is_logical_not = true;
510 if (is_logical_not == false)
511 result = op1;
512 /* Only for one-bit precision typed X the transformation
513 !X -> ~X is valied. */
514 else if (TYPE_PRECISION (type) == 1)
515 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
516 type, op1);
517 /* Otherwise we use !X -> X ^ 1. */
518 else
519 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
520 type, op1, build_int_cst (type, 1));
525 if (!result)
526 result = fold_binary_loc (loc, subcode,
527 TREE_TYPE (gimple_assign_lhs (stmt)),
528 gimple_assign_rhs1 (stmt),
529 gimple_assign_rhs2 (stmt));
531 if (result)
533 STRIP_USELESS_TYPE_CONVERSION (result);
534 if (valid_gimple_rhs_p (result))
535 return result;
537 break;
539 case GIMPLE_TERNARY_RHS:
540 /* Try to fold a conditional expression. */
541 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
543 tree op0 = gimple_assign_rhs1 (stmt);
544 tree tem;
545 bool set = false;
546 location_t cond_loc = gimple_location (stmt);
548 if (COMPARISON_CLASS_P (op0))
550 fold_defer_overflow_warnings ();
551 tem = fold_binary_loc (cond_loc,
552 TREE_CODE (op0), TREE_TYPE (op0),
553 TREE_OPERAND (op0, 0),
554 TREE_OPERAND (op0, 1));
555 /* This is actually a conditional expression, not a GIMPLE
556 conditional statement, however, the valid_gimple_rhs_p
557 test still applies. */
558 set = (tem && is_gimple_condexpr (tem)
559 && valid_gimple_rhs_p (tem));
560 fold_undefer_overflow_warnings (set, stmt, 0);
562 else if (is_gimple_min_invariant (op0))
564 tem = op0;
565 set = true;
567 else
568 return NULL_TREE;
570 if (set)
571 result = fold_build3_loc (cond_loc, COND_EXPR,
572 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
573 gimple_assign_rhs2 (stmt),
574 gimple_assign_rhs3 (stmt));
577 if (!result)
578 result = fold_ternary_loc (loc, subcode,
579 TREE_TYPE (gimple_assign_lhs (stmt)),
580 gimple_assign_rhs1 (stmt),
581 gimple_assign_rhs2 (stmt),
582 gimple_assign_rhs3 (stmt));
584 if (result)
586 STRIP_USELESS_TYPE_CONVERSION (result);
587 if (valid_gimple_rhs_p (result))
588 return result;
590 break;
592 case GIMPLE_INVALID_RHS:
593 gcc_unreachable ();
596 return NULL_TREE;
599 /* Attempt to fold a conditional statement. Return true if any changes were
600 made. We only attempt to fold the condition expression, and do not perform
601 any transformation that would require alteration of the cfg. It is
602 assumed that the operands have been previously folded. */
604 static bool
605 fold_gimple_cond (gimple stmt)
607 tree result = fold_binary_loc (gimple_location (stmt),
608 gimple_cond_code (stmt),
609 boolean_type_node,
610 gimple_cond_lhs (stmt),
611 gimple_cond_rhs (stmt));
613 if (result)
615 STRIP_USELESS_TYPE_CONVERSION (result);
616 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
618 gimple_cond_set_condition_from_tree (stmt, result);
619 return true;
623 return false;
626 /* Convert EXPR into a GIMPLE value suitable for substitution on the
627 RHS of an assignment. Insert the necessary statements before
628 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
629 is replaced. If the call is expected to produces a result, then it
630 is replaced by an assignment of the new RHS to the result variable.
631 If the result is to be ignored, then the call is replaced by a
632 GIMPLE_NOP. A proper VDEF chain is retained by making the first
633 VUSE and the last VDEF of the whole sequence be the same as the replaced
634 statement and using new SSA names for stores in between. */
636 void
637 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
639 tree lhs;
640 gimple stmt, new_stmt;
641 gimple_stmt_iterator i;
642 gimple_seq stmts = NULL;
643 gimple laststore;
644 tree reaching_vuse;
646 stmt = gsi_stmt (*si_p);
648 gcc_assert (is_gimple_call (stmt));
650 push_gimplify_context (gimple_in_ssa_p (cfun));
652 lhs = gimple_call_lhs (stmt);
653 if (lhs == NULL_TREE)
655 gimplify_and_add (expr, &stmts);
656 /* We can end up with folding a memcpy of an empty class assignment
657 which gets optimized away by C++ gimplification. */
658 if (gimple_seq_empty_p (stmts))
660 pop_gimplify_context (NULL);
661 if (gimple_in_ssa_p (cfun))
663 unlink_stmt_vdef (stmt);
664 release_defs (stmt);
666 gsi_replace (si_p, gimple_build_nop (), true);
667 return;
670 else
672 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
673 new_stmt = gimple_build_assign (lhs, tmp);
674 i = gsi_last (stmts);
675 gsi_insert_after_without_update (&i, new_stmt,
676 GSI_CONTINUE_LINKING);
679 pop_gimplify_context (NULL);
681 if (gimple_has_location (stmt))
682 annotate_all_with_location (stmts, gimple_location (stmt));
684 /* First iterate over the replacement statements backward, assigning
685 virtual operands to their defining statements. */
686 laststore = NULL;
687 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
689 new_stmt = gsi_stmt (i);
690 if ((gimple_assign_single_p (new_stmt)
691 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
692 || (is_gimple_call (new_stmt)
693 && (gimple_call_flags (new_stmt)
694 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
696 tree vdef;
697 if (!laststore)
698 vdef = gimple_vdef (stmt);
699 else
700 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
701 gimple_set_vdef (new_stmt, vdef);
702 if (vdef && TREE_CODE (vdef) == SSA_NAME)
703 SSA_NAME_DEF_STMT (vdef) = new_stmt;
704 laststore = new_stmt;
708 /* Second iterate over the statements forward, assigning virtual
709 operands to their uses. */
710 reaching_vuse = gimple_vuse (stmt);
711 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
713 new_stmt = gsi_stmt (i);
714 /* If the new statement possibly has a VUSE, update it with exact SSA
715 name we know will reach this one. */
716 if (gimple_has_mem_ops (new_stmt))
717 gimple_set_vuse (new_stmt, reaching_vuse);
718 gimple_set_modified (new_stmt, true);
719 if (gimple_vdef (new_stmt))
720 reaching_vuse = gimple_vdef (new_stmt);
723 /* If the new sequence does not do a store release the virtual
724 definition of the original statement. */
725 if (reaching_vuse
726 && reaching_vuse == gimple_vuse (stmt))
728 tree vdef = gimple_vdef (stmt);
729 if (vdef
730 && TREE_CODE (vdef) == SSA_NAME)
732 unlink_stmt_vdef (stmt);
733 release_ssa_name (vdef);
737 /* Finally replace the original statement with the sequence. */
738 gsi_replace_with_seq (si_p, stmts, false);
741 /* Return the string length, maximum string length or maximum value of
742 ARG in LENGTH.
743 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
744 is not NULL and, for TYPE == 0, its value is not equal to the length
745 we determine or if we are unable to determine the length or value,
746 return false. VISITED is a bitmap of visited variables.
747 TYPE is 0 if string length should be returned, 1 for maximum string
748 length and 2 for maximum value ARG can have. */
750 static bool
751 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
753 tree var, val;
754 gimple def_stmt;
756 if (TREE_CODE (arg) != SSA_NAME)
758 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
759 if (TREE_CODE (arg) == ADDR_EXPR
760 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
761 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
763 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
764 if (TREE_CODE (aop0) == INDIRECT_REF
765 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
766 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
767 length, visited, type);
770 if (type == 2)
772 val = arg;
773 if (TREE_CODE (val) != INTEGER_CST
774 || tree_int_cst_sgn (val) < 0)
775 return false;
777 else
778 val = c_strlen (arg, 1);
779 if (!val)
780 return false;
782 if (*length)
784 if (type > 0)
786 if (TREE_CODE (*length) != INTEGER_CST
787 || TREE_CODE (val) != INTEGER_CST)
788 return false;
790 if (tree_int_cst_lt (*length, val))
791 *length = val;
792 return true;
794 else if (simple_cst_equal (val, *length) != 1)
795 return false;
798 *length = val;
799 return true;
802 /* If ARG is registered for SSA update we cannot look at its defining
803 statement. */
804 if (name_registered_for_update_p (arg))
805 return false;
807 /* If we were already here, break the infinite cycle. */
808 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
809 return true;
811 var = arg;
812 def_stmt = SSA_NAME_DEF_STMT (var);
814 switch (gimple_code (def_stmt))
816 case GIMPLE_ASSIGN:
817 /* The RHS of the statement defining VAR must either have a
818 constant length or come from another SSA_NAME with a constant
819 length. */
820 if (gimple_assign_single_p (def_stmt)
821 || gimple_assign_unary_nop_p (def_stmt))
823 tree rhs = gimple_assign_rhs1 (def_stmt);
824 return get_maxval_strlen (rhs, length, visited, type);
826 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
828 tree op2 = gimple_assign_rhs2 (def_stmt);
829 tree op3 = gimple_assign_rhs3 (def_stmt);
830 return get_maxval_strlen (op2, length, visited, type)
831 && get_maxval_strlen (op3, length, visited, type);
833 return false;
835 case GIMPLE_PHI:
837 /* All the arguments of the PHI node must have the same constant
838 length. */
839 unsigned i;
841 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
843 tree arg = gimple_phi_arg (def_stmt, i)->def;
845 /* If this PHI has itself as an argument, we cannot
846 determine the string length of this argument. However,
847 if we can find a constant string length for the other
848 PHI args then we can still be sure that this is a
849 constant string length. So be optimistic and just
850 continue with the next argument. */
851 if (arg == gimple_phi_result (def_stmt))
852 continue;
854 if (!get_maxval_strlen (arg, length, visited, type))
855 return false;
858 return true;
860 default:
861 return false;
866 /* Fold builtin call in statement STMT. Returns a simplified tree.
867 We may return a non-constant expression, including another call
868 to a different function and with different arguments, e.g.,
869 substituting memcpy for strcpy when the string length is known.
870 Note that some builtins expand into inline code that may not
871 be valid in GIMPLE. Callers must take care. */
873 tree
874 gimple_fold_builtin (gimple stmt)
876 tree result, val[3];
877 tree callee, a;
878 int arg_idx, type;
879 bitmap visited;
880 bool ignore;
881 int nargs;
882 location_t loc = gimple_location (stmt);
884 ignore = (gimple_call_lhs (stmt) == NULL);
886 /* First try the generic builtin folder. If that succeeds, return the
887 result directly. */
888 result = fold_call_stmt (stmt, ignore);
889 if (result)
891 if (ignore)
892 STRIP_NOPS (result);
893 else
894 result = fold_convert (gimple_call_return_type (stmt), result);
895 return result;
898 /* Ignore MD builtins. */
899 callee = gimple_call_fndecl (stmt);
900 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
901 return NULL_TREE;
903 /* Give up for always_inline inline builtins until they are
904 inlined. */
905 if (avoid_folding_inline_builtin (callee))
906 return NULL_TREE;
908 /* If the builtin could not be folded, and it has no argument list,
909 we're done. */
910 nargs = gimple_call_num_args (stmt);
911 if (nargs == 0)
912 return NULL_TREE;
914 /* Limit the work only for builtins we know how to simplify. */
915 switch (DECL_FUNCTION_CODE (callee))
917 case BUILT_IN_STRLEN:
918 case BUILT_IN_FPUTS:
919 case BUILT_IN_FPUTS_UNLOCKED:
920 arg_idx = 0;
921 type = 0;
922 break;
923 case BUILT_IN_STRCPY:
924 case BUILT_IN_STRNCPY:
925 case BUILT_IN_STRCAT:
926 arg_idx = 1;
927 type = 0;
928 break;
929 case BUILT_IN_MEMCPY_CHK:
930 case BUILT_IN_MEMPCPY_CHK:
931 case BUILT_IN_MEMMOVE_CHK:
932 case BUILT_IN_MEMSET_CHK:
933 case BUILT_IN_STRNCPY_CHK:
934 case BUILT_IN_STPNCPY_CHK:
935 arg_idx = 2;
936 type = 2;
937 break;
938 case BUILT_IN_STRCPY_CHK:
939 case BUILT_IN_STPCPY_CHK:
940 arg_idx = 1;
941 type = 1;
942 break;
943 case BUILT_IN_SNPRINTF_CHK:
944 case BUILT_IN_VSNPRINTF_CHK:
945 arg_idx = 1;
946 type = 2;
947 break;
948 default:
949 return NULL_TREE;
952 if (arg_idx >= nargs)
953 return NULL_TREE;
955 /* Try to use the dataflow information gathered by the CCP process. */
956 visited = BITMAP_ALLOC (NULL);
957 bitmap_clear (visited);
959 memset (val, 0, sizeof (val));
960 a = gimple_call_arg (stmt, arg_idx);
961 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
962 val[arg_idx] = NULL_TREE;
964 BITMAP_FREE (visited);
966 result = NULL_TREE;
967 switch (DECL_FUNCTION_CODE (callee))
969 case BUILT_IN_STRLEN:
970 if (val[0] && nargs == 1)
972 tree new_val =
973 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
975 /* If the result is not a valid gimple value, or not a cast
976 of a valid gimple value, then we cannot use the result. */
977 if (is_gimple_val (new_val)
978 || (CONVERT_EXPR_P (new_val)
979 && is_gimple_val (TREE_OPERAND (new_val, 0))))
980 return new_val;
982 break;
984 case BUILT_IN_STRCPY:
985 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
986 result = fold_builtin_strcpy (loc, callee,
987 gimple_call_arg (stmt, 0),
988 gimple_call_arg (stmt, 1),
989 val[1]);
990 break;
992 case BUILT_IN_STRNCPY:
993 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
994 result = fold_builtin_strncpy (loc, callee,
995 gimple_call_arg (stmt, 0),
996 gimple_call_arg (stmt, 1),
997 gimple_call_arg (stmt, 2),
998 val[1]);
999 break;
1001 case BUILT_IN_STRCAT:
1002 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1003 result = fold_builtin_strcat (loc, gimple_call_arg (stmt, 0),
1004 gimple_call_arg (stmt, 1),
1005 val[1]);
1006 break;
1008 case BUILT_IN_FPUTS:
1009 if (nargs == 2)
1010 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1011 gimple_call_arg (stmt, 1),
1012 ignore, false, val[0]);
1013 break;
1015 case BUILT_IN_FPUTS_UNLOCKED:
1016 if (nargs == 2)
1017 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1018 gimple_call_arg (stmt, 1),
1019 ignore, true, val[0]);
1020 break;
1022 case BUILT_IN_MEMCPY_CHK:
1023 case BUILT_IN_MEMPCPY_CHK:
1024 case BUILT_IN_MEMMOVE_CHK:
1025 case BUILT_IN_MEMSET_CHK:
1026 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1027 result = fold_builtin_memory_chk (loc, callee,
1028 gimple_call_arg (stmt, 0),
1029 gimple_call_arg (stmt, 1),
1030 gimple_call_arg (stmt, 2),
1031 gimple_call_arg (stmt, 3),
1032 val[2], ignore,
1033 DECL_FUNCTION_CODE (callee));
1034 break;
1036 case BUILT_IN_STRCPY_CHK:
1037 case BUILT_IN_STPCPY_CHK:
1038 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1039 result = fold_builtin_stxcpy_chk (loc, callee,
1040 gimple_call_arg (stmt, 0),
1041 gimple_call_arg (stmt, 1),
1042 gimple_call_arg (stmt, 2),
1043 val[1], ignore,
1044 DECL_FUNCTION_CODE (callee));
1045 break;
1047 case BUILT_IN_STRNCPY_CHK:
1048 case BUILT_IN_STPNCPY_CHK:
1049 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1050 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1051 gimple_call_arg (stmt, 1),
1052 gimple_call_arg (stmt, 2),
1053 gimple_call_arg (stmt, 3),
1054 val[2], ignore,
1055 DECL_FUNCTION_CODE (callee));
1056 break;
1058 case BUILT_IN_SNPRINTF_CHK:
1059 case BUILT_IN_VSNPRINTF_CHK:
1060 if (val[1] && is_gimple_val (val[1]))
1061 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1062 DECL_FUNCTION_CODE (callee));
1063 break;
1065 default:
1066 gcc_unreachable ();
1069 if (result && ignore)
1070 result = fold_ignored_result (result);
1071 return result;
1075 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1076 The statement may be replaced by another statement, e.g., if the call
1077 simplifies to a constant value. Return true if any changes were made.
1078 It is assumed that the operands have been previously folded. */
1080 static bool
1081 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1083 gimple stmt = gsi_stmt (*gsi);
1084 tree callee;
1085 bool changed = false;
1086 unsigned i;
1088 /* Fold *& in call arguments. */
1089 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1090 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1092 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1093 if (tmp)
1095 gimple_call_set_arg (stmt, i, tmp);
1096 changed = true;
1100 /* Check for virtual calls that became direct calls. */
1101 callee = gimple_call_fn (stmt);
1102 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1104 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1106 if (dump_file && virtual_method_call_p (callee)
1107 && !possible_polymorphic_call_target_p
1108 (callee, cgraph_get_node (gimple_call_addr_fndecl
1109 (OBJ_TYPE_REF_EXPR (callee)))))
1111 fprintf (dump_file,
1112 "Type inheritance inconsistent devirtualization of ");
1113 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1114 fprintf (dump_file, " to ");
1115 print_generic_expr (dump_file, callee, TDF_SLIM);
1116 fprintf (dump_file, "\n");
1119 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1120 changed = true;
1122 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
1124 bool final;
1125 vec <cgraph_node *>targets
1126 = possible_polymorphic_call_targets (callee, &final);
1127 if (final && targets.length () <= 1)
1129 tree lhs = gimple_call_lhs (stmt);
1130 if (targets.length () == 1)
1132 gimple_call_set_fndecl (stmt, targets[0]->decl);
1133 changed = true;
1134 /* If the call becomes noreturn, remove the lhs. */
1135 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
1137 if (TREE_CODE (lhs) == SSA_NAME)
1139 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1140 tree def = get_or_create_ssa_default_def (cfun, var);
1141 gimple new_stmt = gimple_build_assign (lhs, def);
1142 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1144 gimple_call_set_lhs (stmt, NULL_TREE);
1147 else
1149 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
1150 gimple new_stmt = gimple_build_call (fndecl, 0);
1151 gimple_set_location (new_stmt, gimple_location (stmt));
1152 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1154 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1155 tree def = get_or_create_ssa_default_def (cfun, var);
1157 /* To satisfy condition for
1158 cgraph_update_edges_for_call_stmt_node,
1159 we need to preserve GIMPLE_CALL statement
1160 at position of GSI iterator. */
1161 update_call_from_tree (gsi, def);
1162 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
1164 else
1165 gsi_replace (gsi, new_stmt, true);
1166 return true;
1172 if (inplace)
1173 return changed;
1175 /* Check for builtins that CCP can handle using information not
1176 available in the generic fold routines. */
1177 if (gimple_call_builtin_p (stmt))
1179 tree result = gimple_fold_builtin (stmt);
1180 if (result)
1182 if (!update_call_from_tree (gsi, result))
1183 gimplify_and_update_call_from_tree (gsi, result);
1184 changed = true;
1186 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
1187 changed |= targetm.gimple_fold_builtin (gsi);
1189 else if (gimple_call_internal_p (stmt))
1191 enum tree_code subcode = ERROR_MARK;
1192 tree result = NULL_TREE;
1193 switch (gimple_call_internal_fn (stmt))
1195 case IFN_BUILTIN_EXPECT:
1196 result = fold_builtin_expect (gimple_location (stmt),
1197 gimple_call_arg (stmt, 0),
1198 gimple_call_arg (stmt, 1),
1199 gimple_call_arg (stmt, 2));
1200 break;
1201 case IFN_UBSAN_CHECK_ADD:
1202 subcode = PLUS_EXPR;
1203 break;
1204 case IFN_UBSAN_CHECK_SUB:
1205 subcode = MINUS_EXPR;
1206 break;
1207 case IFN_UBSAN_CHECK_MUL:
1208 subcode = MULT_EXPR;
1209 break;
1210 default:
1211 break;
1213 if (subcode != ERROR_MARK)
1215 tree arg0 = gimple_call_arg (stmt, 0);
1216 tree arg1 = gimple_call_arg (stmt, 1);
1217 /* x = y + 0; x = y - 0; x = y * 0; */
1218 if (integer_zerop (arg1))
1219 result = subcode == MULT_EXPR
1220 ? build_zero_cst (TREE_TYPE (arg0))
1221 : arg0;
1222 /* x = 0 + y; x = 0 * y; */
1223 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1224 result = subcode == MULT_EXPR
1225 ? build_zero_cst (TREE_TYPE (arg0))
1226 : arg1;
1227 /* x = y - y; */
1228 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1229 result = build_zero_cst (TREE_TYPE (arg0));
1230 /* x = y * 1; x = 1 * y; */
1231 else if (subcode == MULT_EXPR)
1233 if (integer_onep (arg1))
1234 result = arg0;
1235 else if (integer_onep (arg0))
1236 result = arg1;
1239 if (result)
1241 if (!update_call_from_tree (gsi, result))
1242 gimplify_and_update_call_from_tree (gsi, result);
1243 changed = true;
1247 return changed;
1250 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1251 distinguishes both cases. */
1253 static bool
1254 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1256 bool changed = false;
1257 gimple stmt = gsi_stmt (*gsi);
1258 unsigned i;
1260 /* Fold the main computation performed by the statement. */
1261 switch (gimple_code (stmt))
1263 case GIMPLE_ASSIGN:
1265 unsigned old_num_ops = gimple_num_ops (stmt);
1266 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1267 tree lhs = gimple_assign_lhs (stmt);
1268 tree new_rhs;
1269 /* First canonicalize operand order. This avoids building new
1270 trees if this is the only thing fold would later do. */
1271 if ((commutative_tree_code (subcode)
1272 || commutative_ternary_tree_code (subcode))
1273 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1274 gimple_assign_rhs2 (stmt), false))
1276 tree tem = gimple_assign_rhs1 (stmt);
1277 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1278 gimple_assign_set_rhs2 (stmt, tem);
1279 changed = true;
1281 new_rhs = fold_gimple_assign (gsi);
1282 if (new_rhs
1283 && !useless_type_conversion_p (TREE_TYPE (lhs),
1284 TREE_TYPE (new_rhs)))
1285 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1286 if (new_rhs
1287 && (!inplace
1288 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1290 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1291 changed = true;
1293 break;
1296 case GIMPLE_COND:
1297 changed |= fold_gimple_cond (stmt);
1298 break;
1300 case GIMPLE_CALL:
1301 changed |= gimple_fold_call (gsi, inplace);
1302 break;
1304 case GIMPLE_ASM:
1305 /* Fold *& in asm operands. */
1307 size_t noutputs;
1308 const char **oconstraints;
1309 const char *constraint;
1310 bool allows_mem, allows_reg;
1312 noutputs = gimple_asm_noutputs (stmt);
1313 oconstraints = XALLOCAVEC (const char *, noutputs);
1315 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1317 tree link = gimple_asm_output_op (stmt, i);
1318 tree op = TREE_VALUE (link);
1319 oconstraints[i]
1320 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1321 if (REFERENCE_CLASS_P (op)
1322 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1324 TREE_VALUE (link) = op;
1325 changed = true;
1328 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1330 tree link = gimple_asm_input_op (stmt, i);
1331 tree op = TREE_VALUE (link);
1332 constraint
1333 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1334 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1335 oconstraints, &allows_mem, &allows_reg);
1336 if (REFERENCE_CLASS_P (op)
1337 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1338 != NULL_TREE)
1340 TREE_VALUE (link) = op;
1341 changed = true;
1345 break;
1347 case GIMPLE_DEBUG:
1348 if (gimple_debug_bind_p (stmt))
1350 tree val = gimple_debug_bind_get_value (stmt);
1351 if (val
1352 && REFERENCE_CLASS_P (val))
1354 tree tem = maybe_fold_reference (val, false);
1355 if (tem)
1357 gimple_debug_bind_set_value (stmt, tem);
1358 changed = true;
1361 else if (val
1362 && TREE_CODE (val) == ADDR_EXPR)
1364 tree ref = TREE_OPERAND (val, 0);
1365 tree tem = maybe_fold_reference (ref, false);
1366 if (tem)
1368 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1369 gimple_debug_bind_set_value (stmt, tem);
1370 changed = true;
1374 break;
1376 default:;
1379 stmt = gsi_stmt (*gsi);
1381 /* Fold *& on the lhs. */
1382 if (gimple_has_lhs (stmt))
1384 tree lhs = gimple_get_lhs (stmt);
1385 if (lhs && REFERENCE_CLASS_P (lhs))
1387 tree new_lhs = maybe_fold_reference (lhs, true);
1388 if (new_lhs)
1390 gimple_set_lhs (stmt, new_lhs);
1391 changed = true;
1396 return changed;
1399 /* Fold the statement pointed to by GSI. In some cases, this function may
1400 replace the whole statement with a new one. Returns true iff folding
1401 makes any changes.
1402 The statement pointed to by GSI should be in valid gimple form but may
1403 be in unfolded state as resulting from for example constant propagation
1404 which can produce *&x = 0. */
1406 bool
1407 fold_stmt (gimple_stmt_iterator *gsi)
1409 return fold_stmt_1 (gsi, false);
1412 /* Perform the minimal folding on statement *GSI. Only operations like
1413 *&x created by constant propagation are handled. The statement cannot
1414 be replaced with a new one. Return true if the statement was
1415 changed, false otherwise.
1416 The statement *GSI should be in valid gimple form but may
1417 be in unfolded state as resulting from for example constant propagation
1418 which can produce *&x = 0. */
1420 bool
1421 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1423 gimple stmt = gsi_stmt (*gsi);
1424 bool changed = fold_stmt_1 (gsi, true);
1425 gcc_assert (gsi_stmt (*gsi) == stmt);
1426 return changed;
1429 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1430 if EXPR is null or we don't know how.
1431 If non-null, the result always has boolean type. */
1433 static tree
1434 canonicalize_bool (tree expr, bool invert)
1436 if (!expr)
1437 return NULL_TREE;
1438 else if (invert)
1440 if (integer_nonzerop (expr))
1441 return boolean_false_node;
1442 else if (integer_zerop (expr))
1443 return boolean_true_node;
1444 else if (TREE_CODE (expr) == SSA_NAME)
1445 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1446 build_int_cst (TREE_TYPE (expr), 0));
1447 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1448 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1449 boolean_type_node,
1450 TREE_OPERAND (expr, 0),
1451 TREE_OPERAND (expr, 1));
1452 else
1453 return NULL_TREE;
1455 else
1457 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1458 return expr;
1459 if (integer_nonzerop (expr))
1460 return boolean_true_node;
1461 else if (integer_zerop (expr))
1462 return boolean_false_node;
1463 else if (TREE_CODE (expr) == SSA_NAME)
1464 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1465 build_int_cst (TREE_TYPE (expr), 0));
1466 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1467 return fold_build2 (TREE_CODE (expr),
1468 boolean_type_node,
1469 TREE_OPERAND (expr, 0),
1470 TREE_OPERAND (expr, 1));
1471 else
1472 return NULL_TREE;
1476 /* Check to see if a boolean expression EXPR is logically equivalent to the
1477 comparison (OP1 CODE OP2). Check for various identities involving
1478 SSA_NAMEs. */
1480 static bool
1481 same_bool_comparison_p (const_tree expr, enum tree_code code,
1482 const_tree op1, const_tree op2)
1484 gimple s;
1486 /* The obvious case. */
1487 if (TREE_CODE (expr) == code
1488 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1489 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1490 return true;
1492 /* Check for comparing (name, name != 0) and the case where expr
1493 is an SSA_NAME with a definition matching the comparison. */
1494 if (TREE_CODE (expr) == SSA_NAME
1495 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1497 if (operand_equal_p (expr, op1, 0))
1498 return ((code == NE_EXPR && integer_zerop (op2))
1499 || (code == EQ_EXPR && integer_nonzerop (op2)));
1500 s = SSA_NAME_DEF_STMT (expr);
1501 if (is_gimple_assign (s)
1502 && gimple_assign_rhs_code (s) == code
1503 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1504 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1505 return true;
1508 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1509 of name is a comparison, recurse. */
1510 if (TREE_CODE (op1) == SSA_NAME
1511 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1513 s = SSA_NAME_DEF_STMT (op1);
1514 if (is_gimple_assign (s)
1515 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1517 enum tree_code c = gimple_assign_rhs_code (s);
1518 if ((c == NE_EXPR && integer_zerop (op2))
1519 || (c == EQ_EXPR && integer_nonzerop (op2)))
1520 return same_bool_comparison_p (expr, c,
1521 gimple_assign_rhs1 (s),
1522 gimple_assign_rhs2 (s));
1523 if ((c == EQ_EXPR && integer_zerop (op2))
1524 || (c == NE_EXPR && integer_nonzerop (op2)))
1525 return same_bool_comparison_p (expr,
1526 invert_tree_comparison (c, false),
1527 gimple_assign_rhs1 (s),
1528 gimple_assign_rhs2 (s));
1531 return false;
1534 /* Check to see if two boolean expressions OP1 and OP2 are logically
1535 equivalent. */
1537 static bool
1538 same_bool_result_p (const_tree op1, const_tree op2)
1540 /* Simple cases first. */
1541 if (operand_equal_p (op1, op2, 0))
1542 return true;
1544 /* Check the cases where at least one of the operands is a comparison.
1545 These are a bit smarter than operand_equal_p in that they apply some
1546 identifies on SSA_NAMEs. */
1547 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1548 && same_bool_comparison_p (op1, TREE_CODE (op2),
1549 TREE_OPERAND (op2, 0),
1550 TREE_OPERAND (op2, 1)))
1551 return true;
1552 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1553 && same_bool_comparison_p (op2, TREE_CODE (op1),
1554 TREE_OPERAND (op1, 0),
1555 TREE_OPERAND (op1, 1)))
1556 return true;
1558 /* Default case. */
1559 return false;
1562 /* Forward declarations for some mutually recursive functions. */
1564 static tree
1565 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1566 enum tree_code code2, tree op2a, tree op2b);
1567 static tree
1568 and_var_with_comparison (tree var, bool invert,
1569 enum tree_code code2, tree op2a, tree op2b);
1570 static tree
1571 and_var_with_comparison_1 (gimple stmt,
1572 enum tree_code code2, tree op2a, tree op2b);
1573 static tree
1574 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1575 enum tree_code code2, tree op2a, tree op2b);
1576 static tree
1577 or_var_with_comparison (tree var, bool invert,
1578 enum tree_code code2, tree op2a, tree op2b);
1579 static tree
1580 or_var_with_comparison_1 (gimple stmt,
1581 enum tree_code code2, tree op2a, tree op2b);
1583 /* Helper function for and_comparisons_1: try to simplify the AND of the
1584 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1585 If INVERT is true, invert the value of the VAR before doing the AND.
1586 Return NULL_EXPR if we can't simplify this to a single expression. */
1588 static tree
1589 and_var_with_comparison (tree var, bool invert,
1590 enum tree_code code2, tree op2a, tree op2b)
1592 tree t;
1593 gimple stmt = SSA_NAME_DEF_STMT (var);
1595 /* We can only deal with variables whose definitions are assignments. */
1596 if (!is_gimple_assign (stmt))
1597 return NULL_TREE;
1599 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1600 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1601 Then we only have to consider the simpler non-inverted cases. */
1602 if (invert)
1603 t = or_var_with_comparison_1 (stmt,
1604 invert_tree_comparison (code2, false),
1605 op2a, op2b);
1606 else
1607 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1608 return canonicalize_bool (t, invert);
1611 /* Try to simplify the AND of the ssa variable defined by the assignment
1612 STMT with the comparison specified by (OP2A CODE2 OP2B).
1613 Return NULL_EXPR if we can't simplify this to a single expression. */
1615 static tree
1616 and_var_with_comparison_1 (gimple stmt,
1617 enum tree_code code2, tree op2a, tree op2b)
1619 tree var = gimple_assign_lhs (stmt);
1620 tree true_test_var = NULL_TREE;
1621 tree false_test_var = NULL_TREE;
1622 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1624 /* Check for identities like (var AND (var == 0)) => false. */
1625 if (TREE_CODE (op2a) == SSA_NAME
1626 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1628 if ((code2 == NE_EXPR && integer_zerop (op2b))
1629 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1631 true_test_var = op2a;
1632 if (var == true_test_var)
1633 return var;
1635 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1636 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1638 false_test_var = op2a;
1639 if (var == false_test_var)
1640 return boolean_false_node;
1644 /* If the definition is a comparison, recurse on it. */
1645 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1647 tree t = and_comparisons_1 (innercode,
1648 gimple_assign_rhs1 (stmt),
1649 gimple_assign_rhs2 (stmt),
1650 code2,
1651 op2a,
1652 op2b);
1653 if (t)
1654 return t;
1657 /* If the definition is an AND or OR expression, we may be able to
1658 simplify by reassociating. */
1659 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1660 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1662 tree inner1 = gimple_assign_rhs1 (stmt);
1663 tree inner2 = gimple_assign_rhs2 (stmt);
1664 gimple s;
1665 tree t;
1666 tree partial = NULL_TREE;
1667 bool is_and = (innercode == BIT_AND_EXPR);
1669 /* Check for boolean identities that don't require recursive examination
1670 of inner1/inner2:
1671 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1672 inner1 AND (inner1 OR inner2) => inner1
1673 !inner1 AND (inner1 AND inner2) => false
1674 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1675 Likewise for similar cases involving inner2. */
1676 if (inner1 == true_test_var)
1677 return (is_and ? var : inner1);
1678 else if (inner2 == true_test_var)
1679 return (is_and ? var : inner2);
1680 else if (inner1 == false_test_var)
1681 return (is_and
1682 ? boolean_false_node
1683 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1684 else if (inner2 == false_test_var)
1685 return (is_and
1686 ? boolean_false_node
1687 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1689 /* Next, redistribute/reassociate the AND across the inner tests.
1690 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1691 if (TREE_CODE (inner1) == SSA_NAME
1692 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1693 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1694 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1695 gimple_assign_rhs1 (s),
1696 gimple_assign_rhs2 (s),
1697 code2, op2a, op2b)))
1699 /* Handle the AND case, where we are reassociating:
1700 (inner1 AND inner2) AND (op2a code2 op2b)
1701 => (t AND inner2)
1702 If the partial result t is a constant, we win. Otherwise
1703 continue on to try reassociating with the other inner test. */
1704 if (is_and)
1706 if (integer_onep (t))
1707 return inner2;
1708 else if (integer_zerop (t))
1709 return boolean_false_node;
1712 /* Handle the OR case, where we are redistributing:
1713 (inner1 OR inner2) AND (op2a code2 op2b)
1714 => (t OR (inner2 AND (op2a code2 op2b))) */
1715 else if (integer_onep (t))
1716 return boolean_true_node;
1718 /* Save partial result for later. */
1719 partial = t;
1722 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1723 if (TREE_CODE (inner2) == SSA_NAME
1724 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1725 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1726 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1727 gimple_assign_rhs1 (s),
1728 gimple_assign_rhs2 (s),
1729 code2, op2a, op2b)))
1731 /* Handle the AND case, where we are reassociating:
1732 (inner1 AND inner2) AND (op2a code2 op2b)
1733 => (inner1 AND t) */
1734 if (is_and)
1736 if (integer_onep (t))
1737 return inner1;
1738 else if (integer_zerop (t))
1739 return boolean_false_node;
1740 /* If both are the same, we can apply the identity
1741 (x AND x) == x. */
1742 else if (partial && same_bool_result_p (t, partial))
1743 return t;
1746 /* Handle the OR case. where we are redistributing:
1747 (inner1 OR inner2) AND (op2a code2 op2b)
1748 => (t OR (inner1 AND (op2a code2 op2b)))
1749 => (t OR partial) */
1750 else
1752 if (integer_onep (t))
1753 return boolean_true_node;
1754 else if (partial)
1756 /* We already got a simplification for the other
1757 operand to the redistributed OR expression. The
1758 interesting case is when at least one is false.
1759 Or, if both are the same, we can apply the identity
1760 (x OR x) == x. */
1761 if (integer_zerop (partial))
1762 return t;
1763 else if (integer_zerop (t))
1764 return partial;
1765 else if (same_bool_result_p (t, partial))
1766 return t;
1771 return NULL_TREE;
1774 /* Try to simplify the AND of two comparisons defined by
1775 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1776 If this can be done without constructing an intermediate value,
1777 return the resulting tree; otherwise NULL_TREE is returned.
1778 This function is deliberately asymmetric as it recurses on SSA_DEFs
1779 in the first comparison but not the second. */
1781 static tree
1782 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1783 enum tree_code code2, tree op2a, tree op2b)
1785 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1787 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1788 if (operand_equal_p (op1a, op2a, 0)
1789 && operand_equal_p (op1b, op2b, 0))
1791 /* Result will be either NULL_TREE, or a combined comparison. */
1792 tree t = combine_comparisons (UNKNOWN_LOCATION,
1793 TRUTH_ANDIF_EXPR, code1, code2,
1794 truth_type, op1a, op1b);
1795 if (t)
1796 return t;
1799 /* Likewise the swapped case of the above. */
1800 if (operand_equal_p (op1a, op2b, 0)
1801 && operand_equal_p (op1b, op2a, 0))
1803 /* Result will be either NULL_TREE, or a combined comparison. */
1804 tree t = combine_comparisons (UNKNOWN_LOCATION,
1805 TRUTH_ANDIF_EXPR, code1,
1806 swap_tree_comparison (code2),
1807 truth_type, op1a, op1b);
1808 if (t)
1809 return t;
1812 /* If both comparisons are of the same value against constants, we might
1813 be able to merge them. */
1814 if (operand_equal_p (op1a, op2a, 0)
1815 && TREE_CODE (op1b) == INTEGER_CST
1816 && TREE_CODE (op2b) == INTEGER_CST)
1818 int cmp = tree_int_cst_compare (op1b, op2b);
1820 /* If we have (op1a == op1b), we should either be able to
1821 return that or FALSE, depending on whether the constant op1b
1822 also satisfies the other comparison against op2b. */
1823 if (code1 == EQ_EXPR)
1825 bool done = true;
1826 bool val;
1827 switch (code2)
1829 case EQ_EXPR: val = (cmp == 0); break;
1830 case NE_EXPR: val = (cmp != 0); break;
1831 case LT_EXPR: val = (cmp < 0); break;
1832 case GT_EXPR: val = (cmp > 0); break;
1833 case LE_EXPR: val = (cmp <= 0); break;
1834 case GE_EXPR: val = (cmp >= 0); break;
1835 default: done = false;
1837 if (done)
1839 if (val)
1840 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1841 else
1842 return boolean_false_node;
1845 /* Likewise if the second comparison is an == comparison. */
1846 else if (code2 == EQ_EXPR)
1848 bool done = true;
1849 bool val;
1850 switch (code1)
1852 case EQ_EXPR: val = (cmp == 0); break;
1853 case NE_EXPR: val = (cmp != 0); break;
1854 case LT_EXPR: val = (cmp > 0); break;
1855 case GT_EXPR: val = (cmp < 0); break;
1856 case LE_EXPR: val = (cmp >= 0); break;
1857 case GE_EXPR: val = (cmp <= 0); break;
1858 default: done = false;
1860 if (done)
1862 if (val)
1863 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1864 else
1865 return boolean_false_node;
1869 /* Same business with inequality tests. */
1870 else if (code1 == NE_EXPR)
1872 bool val;
1873 switch (code2)
1875 case EQ_EXPR: val = (cmp != 0); break;
1876 case NE_EXPR: val = (cmp == 0); break;
1877 case LT_EXPR: val = (cmp >= 0); break;
1878 case GT_EXPR: val = (cmp <= 0); break;
1879 case LE_EXPR: val = (cmp > 0); break;
1880 case GE_EXPR: val = (cmp < 0); break;
1881 default:
1882 val = false;
1884 if (val)
1885 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1887 else if (code2 == NE_EXPR)
1889 bool val;
1890 switch (code1)
1892 case EQ_EXPR: val = (cmp == 0); break;
1893 case NE_EXPR: val = (cmp != 0); break;
1894 case LT_EXPR: val = (cmp <= 0); break;
1895 case GT_EXPR: val = (cmp >= 0); break;
1896 case LE_EXPR: val = (cmp < 0); break;
1897 case GE_EXPR: val = (cmp > 0); break;
1898 default:
1899 val = false;
1901 if (val)
1902 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1905 /* Chose the more restrictive of two < or <= comparisons. */
1906 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1907 && (code2 == LT_EXPR || code2 == LE_EXPR))
1909 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1910 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1911 else
1912 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1915 /* Likewise chose the more restrictive of two > or >= comparisons. */
1916 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1917 && (code2 == GT_EXPR || code2 == GE_EXPR))
1919 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1920 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1921 else
1922 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1925 /* Check for singleton ranges. */
1926 else if (cmp == 0
1927 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1928 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1929 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1931 /* Check for disjoint ranges. */
1932 else if (cmp <= 0
1933 && (code1 == LT_EXPR || code1 == LE_EXPR)
1934 && (code2 == GT_EXPR || code2 == GE_EXPR))
1935 return boolean_false_node;
1936 else if (cmp >= 0
1937 && (code1 == GT_EXPR || code1 == GE_EXPR)
1938 && (code2 == LT_EXPR || code2 == LE_EXPR))
1939 return boolean_false_node;
1942 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1943 NAME's definition is a truth value. See if there are any simplifications
1944 that can be done against the NAME's definition. */
1945 if (TREE_CODE (op1a) == SSA_NAME
1946 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1947 && (integer_zerop (op1b) || integer_onep (op1b)))
1949 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1950 || (code1 == NE_EXPR && integer_onep (op1b)));
1951 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1952 switch (gimple_code (stmt))
1954 case GIMPLE_ASSIGN:
1955 /* Try to simplify by copy-propagating the definition. */
1956 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1958 case GIMPLE_PHI:
1959 /* If every argument to the PHI produces the same result when
1960 ANDed with the second comparison, we win.
1961 Do not do this unless the type is bool since we need a bool
1962 result here anyway. */
1963 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1965 tree result = NULL_TREE;
1966 unsigned i;
1967 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1969 tree arg = gimple_phi_arg_def (stmt, i);
1971 /* If this PHI has itself as an argument, ignore it.
1972 If all the other args produce the same result,
1973 we're still OK. */
1974 if (arg == gimple_phi_result (stmt))
1975 continue;
1976 else if (TREE_CODE (arg) == INTEGER_CST)
1978 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1980 if (!result)
1981 result = boolean_false_node;
1982 else if (!integer_zerop (result))
1983 return NULL_TREE;
1985 else if (!result)
1986 result = fold_build2 (code2, boolean_type_node,
1987 op2a, op2b);
1988 else if (!same_bool_comparison_p (result,
1989 code2, op2a, op2b))
1990 return NULL_TREE;
1992 else if (TREE_CODE (arg) == SSA_NAME
1993 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1995 tree temp;
1996 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1997 /* In simple cases we can look through PHI nodes,
1998 but we have to be careful with loops.
1999 See PR49073. */
2000 if (! dom_info_available_p (CDI_DOMINATORS)
2001 || gimple_bb (def_stmt) == gimple_bb (stmt)
2002 || dominated_by_p (CDI_DOMINATORS,
2003 gimple_bb (def_stmt),
2004 gimple_bb (stmt)))
2005 return NULL_TREE;
2006 temp = and_var_with_comparison (arg, invert, code2,
2007 op2a, op2b);
2008 if (!temp)
2009 return NULL_TREE;
2010 else if (!result)
2011 result = temp;
2012 else if (!same_bool_result_p (result, temp))
2013 return NULL_TREE;
2015 else
2016 return NULL_TREE;
2018 return result;
2021 default:
2022 break;
2025 return NULL_TREE;
2028 /* Try to simplify the AND of two comparisons, specified by
2029 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2030 If this can be simplified to a single expression (without requiring
2031 introducing more SSA variables to hold intermediate values),
2032 return the resulting tree. Otherwise return NULL_TREE.
2033 If the result expression is non-null, it has boolean type. */
2035 tree
2036 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2037 enum tree_code code2, tree op2a, tree op2b)
2039 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2040 if (t)
2041 return t;
2042 else
2043 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2046 /* Helper function for or_comparisons_1: try to simplify the OR of the
2047 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2048 If INVERT is true, invert the value of VAR before doing the OR.
2049 Return NULL_EXPR if we can't simplify this to a single expression. */
2051 static tree
2052 or_var_with_comparison (tree var, bool invert,
2053 enum tree_code code2, tree op2a, tree op2b)
2055 tree t;
2056 gimple stmt = SSA_NAME_DEF_STMT (var);
2058 /* We can only deal with variables whose definitions are assignments. */
2059 if (!is_gimple_assign (stmt))
2060 return NULL_TREE;
2062 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2063 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2064 Then we only have to consider the simpler non-inverted cases. */
2065 if (invert)
2066 t = and_var_with_comparison_1 (stmt,
2067 invert_tree_comparison (code2, false),
2068 op2a, op2b);
2069 else
2070 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2071 return canonicalize_bool (t, invert);
2074 /* Try to simplify the OR of the ssa variable defined by the assignment
2075 STMT with the comparison specified by (OP2A CODE2 OP2B).
2076 Return NULL_EXPR if we can't simplify this to a single expression. */
2078 static tree
2079 or_var_with_comparison_1 (gimple stmt,
2080 enum tree_code code2, tree op2a, tree op2b)
2082 tree var = gimple_assign_lhs (stmt);
2083 tree true_test_var = NULL_TREE;
2084 tree false_test_var = NULL_TREE;
2085 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2087 /* Check for identities like (var OR (var != 0)) => true . */
2088 if (TREE_CODE (op2a) == SSA_NAME
2089 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2091 if ((code2 == NE_EXPR && integer_zerop (op2b))
2092 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2094 true_test_var = op2a;
2095 if (var == true_test_var)
2096 return var;
2098 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2099 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2101 false_test_var = op2a;
2102 if (var == false_test_var)
2103 return boolean_true_node;
2107 /* If the definition is a comparison, recurse on it. */
2108 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2110 tree t = or_comparisons_1 (innercode,
2111 gimple_assign_rhs1 (stmt),
2112 gimple_assign_rhs2 (stmt),
2113 code2,
2114 op2a,
2115 op2b);
2116 if (t)
2117 return t;
2120 /* If the definition is an AND or OR expression, we may be able to
2121 simplify by reassociating. */
2122 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2123 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2125 tree inner1 = gimple_assign_rhs1 (stmt);
2126 tree inner2 = gimple_assign_rhs2 (stmt);
2127 gimple s;
2128 tree t;
2129 tree partial = NULL_TREE;
2130 bool is_or = (innercode == BIT_IOR_EXPR);
2132 /* Check for boolean identities that don't require recursive examination
2133 of inner1/inner2:
2134 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2135 inner1 OR (inner1 AND inner2) => inner1
2136 !inner1 OR (inner1 OR inner2) => true
2137 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2139 if (inner1 == true_test_var)
2140 return (is_or ? var : inner1);
2141 else if (inner2 == true_test_var)
2142 return (is_or ? var : inner2);
2143 else if (inner1 == false_test_var)
2144 return (is_or
2145 ? boolean_true_node
2146 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2147 else if (inner2 == false_test_var)
2148 return (is_or
2149 ? boolean_true_node
2150 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2152 /* Next, redistribute/reassociate the OR across the inner tests.
2153 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2154 if (TREE_CODE (inner1) == SSA_NAME
2155 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2156 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2157 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2158 gimple_assign_rhs1 (s),
2159 gimple_assign_rhs2 (s),
2160 code2, op2a, op2b)))
2162 /* Handle the OR case, where we are reassociating:
2163 (inner1 OR inner2) OR (op2a code2 op2b)
2164 => (t OR inner2)
2165 If the partial result t is a constant, we win. Otherwise
2166 continue on to try reassociating with the other inner test. */
2167 if (is_or)
2169 if (integer_onep (t))
2170 return boolean_true_node;
2171 else if (integer_zerop (t))
2172 return inner2;
2175 /* Handle the AND case, where we are redistributing:
2176 (inner1 AND inner2) OR (op2a code2 op2b)
2177 => (t AND (inner2 OR (op2a code op2b))) */
2178 else if (integer_zerop (t))
2179 return boolean_false_node;
2181 /* Save partial result for later. */
2182 partial = t;
2185 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2186 if (TREE_CODE (inner2) == SSA_NAME
2187 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2188 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2189 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2190 gimple_assign_rhs1 (s),
2191 gimple_assign_rhs2 (s),
2192 code2, op2a, op2b)))
2194 /* Handle the OR case, where we are reassociating:
2195 (inner1 OR inner2) OR (op2a code2 op2b)
2196 => (inner1 OR t)
2197 => (t OR partial) */
2198 if (is_or)
2200 if (integer_zerop (t))
2201 return inner1;
2202 else if (integer_onep (t))
2203 return boolean_true_node;
2204 /* If both are the same, we can apply the identity
2205 (x OR x) == x. */
2206 else if (partial && same_bool_result_p (t, partial))
2207 return t;
2210 /* Handle the AND case, where we are redistributing:
2211 (inner1 AND inner2) OR (op2a code2 op2b)
2212 => (t AND (inner1 OR (op2a code2 op2b)))
2213 => (t AND partial) */
2214 else
2216 if (integer_zerop (t))
2217 return boolean_false_node;
2218 else if (partial)
2220 /* We already got a simplification for the other
2221 operand to the redistributed AND expression. The
2222 interesting case is when at least one is true.
2223 Or, if both are the same, we can apply the identity
2224 (x AND x) == x. */
2225 if (integer_onep (partial))
2226 return t;
2227 else if (integer_onep (t))
2228 return partial;
2229 else if (same_bool_result_p (t, partial))
2230 return t;
2235 return NULL_TREE;
2238 /* Try to simplify the OR of two comparisons defined by
2239 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2240 If this can be done without constructing an intermediate value,
2241 return the resulting tree; otherwise NULL_TREE is returned.
2242 This function is deliberately asymmetric as it recurses on SSA_DEFs
2243 in the first comparison but not the second. */
2245 static tree
2246 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2247 enum tree_code code2, tree op2a, tree op2b)
2249 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2251 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2252 if (operand_equal_p (op1a, op2a, 0)
2253 && operand_equal_p (op1b, op2b, 0))
2255 /* Result will be either NULL_TREE, or a combined comparison. */
2256 tree t = combine_comparisons (UNKNOWN_LOCATION,
2257 TRUTH_ORIF_EXPR, code1, code2,
2258 truth_type, op1a, op1b);
2259 if (t)
2260 return t;
2263 /* Likewise the swapped case of the above. */
2264 if (operand_equal_p (op1a, op2b, 0)
2265 && operand_equal_p (op1b, op2a, 0))
2267 /* Result will be either NULL_TREE, or a combined comparison. */
2268 tree t = combine_comparisons (UNKNOWN_LOCATION,
2269 TRUTH_ORIF_EXPR, code1,
2270 swap_tree_comparison (code2),
2271 truth_type, op1a, op1b);
2272 if (t)
2273 return t;
2276 /* If both comparisons are of the same value against constants, we might
2277 be able to merge them. */
2278 if (operand_equal_p (op1a, op2a, 0)
2279 && TREE_CODE (op1b) == INTEGER_CST
2280 && TREE_CODE (op2b) == INTEGER_CST)
2282 int cmp = tree_int_cst_compare (op1b, op2b);
2284 /* If we have (op1a != op1b), we should either be able to
2285 return that or TRUE, depending on whether the constant op1b
2286 also satisfies the other comparison against op2b. */
2287 if (code1 == NE_EXPR)
2289 bool done = true;
2290 bool val;
2291 switch (code2)
2293 case EQ_EXPR: val = (cmp == 0); break;
2294 case NE_EXPR: val = (cmp != 0); break;
2295 case LT_EXPR: val = (cmp < 0); break;
2296 case GT_EXPR: val = (cmp > 0); break;
2297 case LE_EXPR: val = (cmp <= 0); break;
2298 case GE_EXPR: val = (cmp >= 0); break;
2299 default: done = false;
2301 if (done)
2303 if (val)
2304 return boolean_true_node;
2305 else
2306 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2309 /* Likewise if the second comparison is a != comparison. */
2310 else if (code2 == NE_EXPR)
2312 bool done = true;
2313 bool val;
2314 switch (code1)
2316 case EQ_EXPR: val = (cmp == 0); break;
2317 case NE_EXPR: val = (cmp != 0); break;
2318 case LT_EXPR: val = (cmp > 0); break;
2319 case GT_EXPR: val = (cmp < 0); break;
2320 case LE_EXPR: val = (cmp >= 0); break;
2321 case GE_EXPR: val = (cmp <= 0); break;
2322 default: done = false;
2324 if (done)
2326 if (val)
2327 return boolean_true_node;
2328 else
2329 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2333 /* See if an equality test is redundant with the other comparison. */
2334 else if (code1 == EQ_EXPR)
2336 bool val;
2337 switch (code2)
2339 case EQ_EXPR: val = (cmp == 0); break;
2340 case NE_EXPR: val = (cmp != 0); break;
2341 case LT_EXPR: val = (cmp < 0); break;
2342 case GT_EXPR: val = (cmp > 0); break;
2343 case LE_EXPR: val = (cmp <= 0); break;
2344 case GE_EXPR: val = (cmp >= 0); break;
2345 default:
2346 val = false;
2348 if (val)
2349 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2351 else if (code2 == EQ_EXPR)
2353 bool val;
2354 switch (code1)
2356 case EQ_EXPR: val = (cmp == 0); break;
2357 case NE_EXPR: val = (cmp != 0); break;
2358 case LT_EXPR: val = (cmp > 0); break;
2359 case GT_EXPR: val = (cmp < 0); break;
2360 case LE_EXPR: val = (cmp >= 0); break;
2361 case GE_EXPR: val = (cmp <= 0); break;
2362 default:
2363 val = false;
2365 if (val)
2366 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2369 /* Chose the less restrictive of two < or <= comparisons. */
2370 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2371 && (code2 == LT_EXPR || code2 == LE_EXPR))
2373 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2374 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2375 else
2376 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2379 /* Likewise chose the less restrictive of two > or >= comparisons. */
2380 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2381 && (code2 == GT_EXPR || code2 == GE_EXPR))
2383 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2384 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2385 else
2386 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2389 /* Check for singleton ranges. */
2390 else if (cmp == 0
2391 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2392 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2393 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2395 /* Check for less/greater pairs that don't restrict the range at all. */
2396 else if (cmp >= 0
2397 && (code1 == LT_EXPR || code1 == LE_EXPR)
2398 && (code2 == GT_EXPR || code2 == GE_EXPR))
2399 return boolean_true_node;
2400 else if (cmp <= 0
2401 && (code1 == GT_EXPR || code1 == GE_EXPR)
2402 && (code2 == LT_EXPR || code2 == LE_EXPR))
2403 return boolean_true_node;
2406 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2407 NAME's definition is a truth value. See if there are any simplifications
2408 that can be done against the NAME's definition. */
2409 if (TREE_CODE (op1a) == SSA_NAME
2410 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2411 && (integer_zerop (op1b) || integer_onep (op1b)))
2413 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2414 || (code1 == NE_EXPR && integer_onep (op1b)));
2415 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2416 switch (gimple_code (stmt))
2418 case GIMPLE_ASSIGN:
2419 /* Try to simplify by copy-propagating the definition. */
2420 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2422 case GIMPLE_PHI:
2423 /* If every argument to the PHI produces the same result when
2424 ORed with the second comparison, we win.
2425 Do not do this unless the type is bool since we need a bool
2426 result here anyway. */
2427 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2429 tree result = NULL_TREE;
2430 unsigned i;
2431 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2433 tree arg = gimple_phi_arg_def (stmt, i);
2435 /* If this PHI has itself as an argument, ignore it.
2436 If all the other args produce the same result,
2437 we're still OK. */
2438 if (arg == gimple_phi_result (stmt))
2439 continue;
2440 else if (TREE_CODE (arg) == INTEGER_CST)
2442 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2444 if (!result)
2445 result = boolean_true_node;
2446 else if (!integer_onep (result))
2447 return NULL_TREE;
2449 else if (!result)
2450 result = fold_build2 (code2, boolean_type_node,
2451 op2a, op2b);
2452 else if (!same_bool_comparison_p (result,
2453 code2, op2a, op2b))
2454 return NULL_TREE;
2456 else if (TREE_CODE (arg) == SSA_NAME
2457 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2459 tree temp;
2460 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2461 /* In simple cases we can look through PHI nodes,
2462 but we have to be careful with loops.
2463 See PR49073. */
2464 if (! dom_info_available_p (CDI_DOMINATORS)
2465 || gimple_bb (def_stmt) == gimple_bb (stmt)
2466 || dominated_by_p (CDI_DOMINATORS,
2467 gimple_bb (def_stmt),
2468 gimple_bb (stmt)))
2469 return NULL_TREE;
2470 temp = or_var_with_comparison (arg, invert, code2,
2471 op2a, op2b);
2472 if (!temp)
2473 return NULL_TREE;
2474 else if (!result)
2475 result = temp;
2476 else if (!same_bool_result_p (result, temp))
2477 return NULL_TREE;
2479 else
2480 return NULL_TREE;
2482 return result;
2485 default:
2486 break;
2489 return NULL_TREE;
2492 /* Try to simplify the OR of two comparisons, specified by
2493 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2494 If this can be simplified to a single expression (without requiring
2495 introducing more SSA variables to hold intermediate values),
2496 return the resulting tree. Otherwise return NULL_TREE.
2497 If the result expression is non-null, it has boolean type. */
2499 tree
2500 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2501 enum tree_code code2, tree op2a, tree op2b)
2503 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2504 if (t)
2505 return t;
2506 else
2507 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2511 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2513 Either NULL_TREE, a simplified but non-constant or a constant
2514 is returned.
2516 ??? This should go into a gimple-fold-inline.h file to be eventually
2517 privatized with the single valueize function used in the various TUs
2518 to avoid the indirect function call overhead. */
2520 tree
2521 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2523 location_t loc = gimple_location (stmt);
2524 switch (gimple_code (stmt))
2526 case GIMPLE_ASSIGN:
2528 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2530 switch (get_gimple_rhs_class (subcode))
2532 case GIMPLE_SINGLE_RHS:
2534 tree rhs = gimple_assign_rhs1 (stmt);
2535 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2537 if (TREE_CODE (rhs) == SSA_NAME)
2539 /* If the RHS is an SSA_NAME, return its known constant value,
2540 if any. */
2541 return (*valueize) (rhs);
2543 /* Handle propagating invariant addresses into address
2544 operations. */
2545 else if (TREE_CODE (rhs) == ADDR_EXPR
2546 && !is_gimple_min_invariant (rhs))
2548 HOST_WIDE_INT offset = 0;
2549 tree base;
2550 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2551 &offset,
2552 valueize);
2553 if (base
2554 && (CONSTANT_CLASS_P (base)
2555 || decl_address_invariant_p (base)))
2556 return build_invariant_address (TREE_TYPE (rhs),
2557 base, offset);
2559 else if (TREE_CODE (rhs) == CONSTRUCTOR
2560 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2561 && (CONSTRUCTOR_NELTS (rhs)
2562 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2564 unsigned i;
2565 tree val, *vec;
2567 vec = XALLOCAVEC (tree,
2568 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2569 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2571 val = (*valueize) (val);
2572 if (TREE_CODE (val) == INTEGER_CST
2573 || TREE_CODE (val) == REAL_CST
2574 || TREE_CODE (val) == FIXED_CST)
2575 vec[i] = val;
2576 else
2577 return NULL_TREE;
2580 return build_vector (TREE_TYPE (rhs), vec);
2582 if (subcode == OBJ_TYPE_REF)
2584 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
2585 /* If callee is constant, we can fold away the wrapper. */
2586 if (is_gimple_min_invariant (val))
2587 return val;
2590 if (kind == tcc_reference)
2592 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2593 || TREE_CODE (rhs) == REALPART_EXPR
2594 || TREE_CODE (rhs) == IMAGPART_EXPR)
2595 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2597 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2598 return fold_unary_loc (EXPR_LOCATION (rhs),
2599 TREE_CODE (rhs),
2600 TREE_TYPE (rhs), val);
2602 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2603 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2605 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2606 return fold_ternary_loc (EXPR_LOCATION (rhs),
2607 TREE_CODE (rhs),
2608 TREE_TYPE (rhs), val,
2609 TREE_OPERAND (rhs, 1),
2610 TREE_OPERAND (rhs, 2));
2612 else if (TREE_CODE (rhs) == MEM_REF
2613 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2615 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2616 if (TREE_CODE (val) == ADDR_EXPR
2617 && is_gimple_min_invariant (val))
2619 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2620 unshare_expr (val),
2621 TREE_OPERAND (rhs, 1));
2622 if (tem)
2623 rhs = tem;
2626 return fold_const_aggregate_ref_1 (rhs, valueize);
2628 else if (kind == tcc_declaration)
2629 return get_symbol_constant_value (rhs);
2630 return rhs;
2633 case GIMPLE_UNARY_RHS:
2635 /* Handle unary operators that can appear in GIMPLE form.
2636 Note that we know the single operand must be a constant,
2637 so this should almost always return a simplified RHS. */
2638 tree lhs = gimple_assign_lhs (stmt);
2639 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2641 /* Conversions are useless for CCP purposes if they are
2642 value-preserving. Thus the restrictions that
2643 useless_type_conversion_p places for restrict qualification
2644 of pointer types should not apply here.
2645 Substitution later will only substitute to allowed places. */
2646 if (CONVERT_EXPR_CODE_P (subcode)
2647 && POINTER_TYPE_P (TREE_TYPE (lhs))
2648 && POINTER_TYPE_P (TREE_TYPE (op0))
2649 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2650 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2651 && TYPE_MODE (TREE_TYPE (lhs))
2652 == TYPE_MODE (TREE_TYPE (op0)))
2653 return op0;
2655 return
2656 fold_unary_ignore_overflow_loc (loc, subcode,
2657 gimple_expr_type (stmt), op0);
2660 case GIMPLE_BINARY_RHS:
2662 /* Handle binary operators that can appear in GIMPLE form. */
2663 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2664 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2666 /* Translate &x + CST into an invariant form suitable for
2667 further propagation. */
2668 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2669 && TREE_CODE (op0) == ADDR_EXPR
2670 && TREE_CODE (op1) == INTEGER_CST)
2672 tree off = fold_convert (ptr_type_node, op1);
2673 return build_fold_addr_expr_loc
2674 (loc,
2675 fold_build2 (MEM_REF,
2676 TREE_TYPE (TREE_TYPE (op0)),
2677 unshare_expr (op0), off));
2680 return fold_binary_loc (loc, subcode,
2681 gimple_expr_type (stmt), op0, op1);
2684 case GIMPLE_TERNARY_RHS:
2686 /* Handle ternary operators that can appear in GIMPLE form. */
2687 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2688 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2689 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2691 /* Fold embedded expressions in ternary codes. */
2692 if ((subcode == COND_EXPR
2693 || subcode == VEC_COND_EXPR)
2694 && COMPARISON_CLASS_P (op0))
2696 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2697 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2698 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2699 TREE_TYPE (op0), op00, op01);
2700 if (tem)
2701 op0 = tem;
2704 return fold_ternary_loc (loc, subcode,
2705 gimple_expr_type (stmt), op0, op1, op2);
2708 default:
2709 gcc_unreachable ();
2713 case GIMPLE_CALL:
2715 tree fn;
2717 if (gimple_call_internal_p (stmt))
2719 enum tree_code subcode = ERROR_MARK;
2720 switch (gimple_call_internal_fn (stmt))
2722 case IFN_UBSAN_CHECK_ADD:
2723 subcode = PLUS_EXPR;
2724 break;
2725 case IFN_UBSAN_CHECK_SUB:
2726 subcode = MINUS_EXPR;
2727 break;
2728 case IFN_UBSAN_CHECK_MUL:
2729 subcode = MULT_EXPR;
2730 break;
2731 default:
2732 return NULL_TREE;
2734 tree arg0 = gimple_call_arg (stmt, 0);
2735 tree arg1 = gimple_call_arg (stmt, 1);
2736 tree op0 = (*valueize) (arg0);
2737 tree op1 = (*valueize) (arg1);
2739 if (TREE_CODE (op0) != INTEGER_CST
2740 || TREE_CODE (op1) != INTEGER_CST)
2742 switch (subcode)
2744 case MULT_EXPR:
2745 /* x * 0 = 0 * x = 0 without overflow. */
2746 if (integer_zerop (op0) || integer_zerop (op1))
2747 return build_zero_cst (TREE_TYPE (arg0));
2748 break;
2749 case MINUS_EXPR:
2750 /* y - y = 0 without overflow. */
2751 if (operand_equal_p (op0, op1, 0))
2752 return build_zero_cst (TREE_TYPE (arg0));
2753 break;
2754 default:
2755 break;
2758 tree res
2759 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
2760 if (res
2761 && TREE_CODE (res) == INTEGER_CST
2762 && !TREE_OVERFLOW (res))
2763 return res;
2764 return NULL_TREE;
2767 fn = (*valueize) (gimple_call_fn (stmt));
2768 if (TREE_CODE (fn) == ADDR_EXPR
2769 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2770 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
2771 && gimple_builtin_call_types_compatible_p (stmt,
2772 TREE_OPERAND (fn, 0)))
2774 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2775 tree call, retval;
2776 unsigned i;
2777 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2778 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2779 call = build_call_array_loc (loc,
2780 gimple_call_return_type (stmt),
2781 fn, gimple_call_num_args (stmt), args);
2782 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2783 if (retval)
2785 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2786 STRIP_NOPS (retval);
2787 retval = fold_convert (gimple_call_return_type (stmt), retval);
2789 return retval;
2791 return NULL_TREE;
2794 default:
2795 return NULL_TREE;
2799 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2800 Returns NULL_TREE if folding to a constant is not possible, otherwise
2801 returns a constant according to is_gimple_min_invariant. */
2803 tree
2804 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2806 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2807 if (res && is_gimple_min_invariant (res))
2808 return res;
2809 return NULL_TREE;
2813 /* The following set of functions are supposed to fold references using
2814 their constant initializers. */
2816 static tree fold_ctor_reference (tree type, tree ctor,
2817 unsigned HOST_WIDE_INT offset,
2818 unsigned HOST_WIDE_INT size, tree);
2820 /* See if we can find constructor defining value of BASE.
2821 When we know the consructor with constant offset (such as
2822 base is array[40] and we do know constructor of array), then
2823 BIT_OFFSET is adjusted accordingly.
2825 As a special case, return error_mark_node when constructor
2826 is not explicitly available, but it is known to be zero
2827 such as 'static const int a;'. */
2828 static tree
2829 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2830 tree (*valueize)(tree))
2832 HOST_WIDE_INT bit_offset2, size, max_size;
2833 if (TREE_CODE (base) == MEM_REF)
2835 if (!integer_zerop (TREE_OPERAND (base, 1)))
2837 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
2838 return NULL_TREE;
2839 *bit_offset += (mem_ref_offset (base).low
2840 * BITS_PER_UNIT);
2843 if (valueize
2844 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2845 base = valueize (TREE_OPERAND (base, 0));
2846 if (!base || TREE_CODE (base) != ADDR_EXPR)
2847 return NULL_TREE;
2848 base = TREE_OPERAND (base, 0);
2851 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2852 DECL_INITIAL. If BASE is a nested reference into another
2853 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2854 the inner reference. */
2855 switch (TREE_CODE (base))
2857 case VAR_DECL:
2858 case CONST_DECL:
2860 tree init = ctor_for_folding (base);
2862 /* Our semantic is exact opposite of ctor_for_folding;
2863 NULL means unknown, while error_mark_node is 0. */
2864 if (init == error_mark_node)
2865 return NULL_TREE;
2866 if (!init)
2867 return error_mark_node;
2868 return init;
2871 case ARRAY_REF:
2872 case COMPONENT_REF:
2873 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2874 if (max_size == -1 || size != max_size)
2875 return NULL_TREE;
2876 *bit_offset += bit_offset2;
2877 return get_base_constructor (base, bit_offset, valueize);
2879 case STRING_CST:
2880 case CONSTRUCTOR:
2881 return base;
2883 default:
2884 return NULL_TREE;
2888 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2889 to the memory at bit OFFSET.
2891 We do only simple job of folding byte accesses. */
2893 static tree
2894 fold_string_cst_ctor_reference (tree type, tree ctor,
2895 unsigned HOST_WIDE_INT offset,
2896 unsigned HOST_WIDE_INT size)
2898 if (INTEGRAL_TYPE_P (type)
2899 && (TYPE_MODE (type)
2900 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2901 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2902 == MODE_INT)
2903 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2904 && size == BITS_PER_UNIT
2905 && !(offset % BITS_PER_UNIT))
2907 offset /= BITS_PER_UNIT;
2908 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2909 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2910 [offset]));
2911 /* Folding
2912 const char a[20]="hello";
2913 return a[10];
2915 might lead to offset greater than string length. In this case we
2916 know value is either initialized to 0 or out of bounds. Return 0
2917 in both cases. */
2918 return build_zero_cst (type);
2920 return NULL_TREE;
2923 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2924 SIZE to the memory at bit OFFSET. */
2926 static tree
2927 fold_array_ctor_reference (tree type, tree ctor,
2928 unsigned HOST_WIDE_INT offset,
2929 unsigned HOST_WIDE_INT size,
2930 tree from_decl)
2932 unsigned HOST_WIDE_INT cnt;
2933 tree cfield, cval;
2934 double_int low_bound, elt_size;
2935 double_int index, max_index;
2936 double_int access_index;
2937 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2938 HOST_WIDE_INT inner_offset;
2940 /* Compute low bound and elt size. */
2941 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2942 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2943 if (domain_type && TYPE_MIN_VALUE (domain_type))
2945 /* Static constructors for variably sized objects makes no sense. */
2946 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2947 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2948 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2950 else
2951 low_bound = double_int_zero;
2952 /* Static constructors for variably sized objects makes no sense. */
2953 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2954 == INTEGER_CST);
2955 elt_size =
2956 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2959 /* We can handle only constantly sized accesses that are known to not
2960 be larger than size of array element. */
2961 if (!TYPE_SIZE_UNIT (type)
2962 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2963 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type)))
2964 || elt_size.is_zero ())
2965 return NULL_TREE;
2967 /* Compute the array index we look for. */
2968 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2969 .udiv (elt_size, TRUNC_DIV_EXPR);
2970 access_index += low_bound;
2971 if (index_type)
2972 access_index = access_index.ext (TYPE_PRECISION (index_type),
2973 TYPE_UNSIGNED (index_type));
2975 /* And offset within the access. */
2976 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2978 /* See if the array field is large enough to span whole access. We do not
2979 care to fold accesses spanning multiple array indexes. */
2980 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2981 return NULL_TREE;
2983 index = low_bound - double_int_one;
2984 if (index_type)
2985 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2987 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2989 /* Array constructor might explicitely set index, or specify range
2990 or leave index NULL meaning that it is next index after previous
2991 one. */
2992 if (cfield)
2994 if (TREE_CODE (cfield) == INTEGER_CST)
2995 max_index = index = tree_to_double_int (cfield);
2996 else
2998 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2999 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3000 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3003 else
3005 index += double_int_one;
3006 if (index_type)
3007 index = index.ext (TYPE_PRECISION (index_type),
3008 TYPE_UNSIGNED (index_type));
3009 max_index = index;
3012 /* Do we have match? */
3013 if (access_index.cmp (index, 1) >= 0
3014 && access_index.cmp (max_index, 1) <= 0)
3015 return fold_ctor_reference (type, cval, inner_offset, size,
3016 from_decl);
3018 /* When memory is not explicitely mentioned in constructor,
3019 it is 0 (or out of range). */
3020 return build_zero_cst (type);
3023 /* CTOR is CONSTRUCTOR of an aggregate or vector.
3024 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3026 static tree
3027 fold_nonarray_ctor_reference (tree type, tree ctor,
3028 unsigned HOST_WIDE_INT offset,
3029 unsigned HOST_WIDE_INT size,
3030 tree from_decl)
3032 unsigned HOST_WIDE_INT cnt;
3033 tree cfield, cval;
3035 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3036 cval)
3038 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3039 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3040 tree field_size = DECL_SIZE (cfield);
3041 double_int bitoffset;
3042 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3043 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
3044 double_int bitoffset_end, access_end;
3046 /* Variable sized objects in static constructors makes no sense,
3047 but field_size can be NULL for flexible array members. */
3048 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3049 && TREE_CODE (byte_offset) == INTEGER_CST
3050 && (field_size != NULL_TREE
3051 ? TREE_CODE (field_size) == INTEGER_CST
3052 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3054 /* Compute bit offset of the field. */
3055 bitoffset = tree_to_double_int (field_offset)
3056 + byte_offset_cst * bits_per_unit_cst;
3057 /* Compute bit offset where the field ends. */
3058 if (field_size != NULL_TREE)
3059 bitoffset_end = bitoffset + tree_to_double_int (field_size);
3060 else
3061 bitoffset_end = double_int_zero;
3063 access_end = double_int::from_uhwi (offset)
3064 + double_int::from_uhwi (size);
3066 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3067 [BITOFFSET, BITOFFSET_END)? */
3068 if (access_end.cmp (bitoffset, 0) > 0
3069 && (field_size == NULL_TREE
3070 || double_int::from_uhwi (offset).slt (bitoffset_end)))
3072 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
3073 /* We do have overlap. Now see if field is large enough to
3074 cover the access. Give up for accesses spanning multiple
3075 fields. */
3076 if (access_end.cmp (bitoffset_end, 0) > 0)
3077 return NULL_TREE;
3078 if (double_int::from_uhwi (offset).slt (bitoffset))
3079 return NULL_TREE;
3080 return fold_ctor_reference (type, cval,
3081 inner_offset.to_uhwi (), size,
3082 from_decl);
3085 /* When memory is not explicitely mentioned in constructor, it is 0. */
3086 return build_zero_cst (type);
3089 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3090 to the memory at bit OFFSET. */
3092 static tree
3093 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3094 unsigned HOST_WIDE_INT size, tree from_decl)
3096 tree ret;
3098 /* We found the field with exact match. */
3099 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3100 && !offset)
3101 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3103 /* We are at the end of walk, see if we can view convert the
3104 result. */
3105 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3106 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3107 && operand_equal_p (TYPE_SIZE (type),
3108 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3110 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
3111 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3112 if (ret)
3113 STRIP_NOPS (ret);
3114 return ret;
3116 if (TREE_CODE (ctor) == STRING_CST)
3117 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3118 if (TREE_CODE (ctor) == CONSTRUCTOR)
3121 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3122 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3123 return fold_array_ctor_reference (type, ctor, offset, size,
3124 from_decl);
3125 else
3126 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3127 from_decl);
3130 return NULL_TREE;
3133 /* Return the tree representing the element referenced by T if T is an
3134 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3135 names using VALUEIZE. Return NULL_TREE otherwise. */
3137 tree
3138 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3140 tree ctor, idx, base;
3141 HOST_WIDE_INT offset, size, max_size;
3142 tree tem;
3144 if (TREE_THIS_VOLATILE (t))
3145 return NULL_TREE;
3147 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3148 return get_symbol_constant_value (t);
3150 tem = fold_read_from_constant_string (t);
3151 if (tem)
3152 return tem;
3154 switch (TREE_CODE (t))
3156 case ARRAY_REF:
3157 case ARRAY_RANGE_REF:
3158 /* Constant indexes are handled well by get_base_constructor.
3159 Only special case variable offsets.
3160 FIXME: This code can't handle nested references with variable indexes
3161 (they will be handled only by iteration of ccp). Perhaps we can bring
3162 get_ref_base_and_extent here and make it use a valueize callback. */
3163 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3164 && valueize
3165 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3166 && TREE_CODE (idx) == INTEGER_CST)
3168 tree low_bound, unit_size;
3169 double_int doffset;
3171 /* If the resulting bit-offset is constant, track it. */
3172 if ((low_bound = array_ref_low_bound (t),
3173 TREE_CODE (low_bound) == INTEGER_CST)
3174 && (unit_size = array_ref_element_size (t),
3175 tree_fits_uhwi_p (unit_size))
3176 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3177 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3178 doffset.fits_shwi ()))
3180 offset = doffset.to_shwi ();
3181 offset *= tree_to_uhwi (unit_size);
3182 offset *= BITS_PER_UNIT;
3184 base = TREE_OPERAND (t, 0);
3185 ctor = get_base_constructor (base, &offset, valueize);
3186 /* Empty constructor. Always fold to 0. */
3187 if (ctor == error_mark_node)
3188 return build_zero_cst (TREE_TYPE (t));
3189 /* Out of bound array access. Value is undefined,
3190 but don't fold. */
3191 if (offset < 0)
3192 return NULL_TREE;
3193 /* We can not determine ctor. */
3194 if (!ctor)
3195 return NULL_TREE;
3196 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3197 tree_to_uhwi (unit_size)
3198 * BITS_PER_UNIT,
3199 base);
3202 /* Fallthru. */
3204 case COMPONENT_REF:
3205 case BIT_FIELD_REF:
3206 case TARGET_MEM_REF:
3207 case MEM_REF:
3208 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3209 ctor = get_base_constructor (base, &offset, valueize);
3211 /* Empty constructor. Always fold to 0. */
3212 if (ctor == error_mark_node)
3213 return build_zero_cst (TREE_TYPE (t));
3214 /* We do not know precise address. */
3215 if (max_size == -1 || max_size != size)
3216 return NULL_TREE;
3217 /* We can not determine ctor. */
3218 if (!ctor)
3219 return NULL_TREE;
3221 /* Out of bound array access. Value is undefined, but don't fold. */
3222 if (offset < 0)
3223 return NULL_TREE;
3225 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3226 base);
3228 case REALPART_EXPR:
3229 case IMAGPART_EXPR:
3231 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3232 if (c && TREE_CODE (c) == COMPLEX_CST)
3233 return fold_build1_loc (EXPR_LOCATION (t),
3234 TREE_CODE (t), TREE_TYPE (t), c);
3235 break;
3238 default:
3239 break;
3242 return NULL_TREE;
3245 tree
3246 fold_const_aggregate_ref (tree t)
3248 return fold_const_aggregate_ref_1 (t, NULL);
3251 /* Lookup virtual method with index TOKEN in a virtual table V
3252 at OFFSET.
3253 Set CAN_REFER if non-NULL to false if method
3254 is not referable or if the virtual table is ill-formed (such as rewriten
3255 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3257 tree
3258 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
3259 tree v,
3260 unsigned HOST_WIDE_INT offset,
3261 bool *can_refer)
3263 tree vtable = v, init, fn;
3264 unsigned HOST_WIDE_INT size;
3265 unsigned HOST_WIDE_INT elt_size, access_index;
3266 tree domain_type;
3268 if (can_refer)
3269 *can_refer = true;
3271 /* First of all double check we have virtual table. */
3272 if (TREE_CODE (v) != VAR_DECL
3273 || !DECL_VIRTUAL_P (v))
3275 gcc_assert (in_lto_p);
3276 /* Pass down that we lost track of the target. */
3277 if (can_refer)
3278 *can_refer = false;
3279 return NULL_TREE;
3282 init = ctor_for_folding (v);
3284 /* The virtual tables should always be born with constructors
3285 and we always should assume that they are avaialble for
3286 folding. At the moment we do not stream them in all cases,
3287 but it should never happen that ctor seem unreachable. */
3288 gcc_assert (init);
3289 if (init == error_mark_node)
3291 gcc_assert (in_lto_p);
3292 /* Pass down that we lost track of the target. */
3293 if (can_refer)
3294 *can_refer = false;
3295 return NULL_TREE;
3297 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3298 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
3299 offset *= BITS_PER_UNIT;
3300 offset += token * size;
3302 /* Lookup the value in the constructor that is assumed to be array.
3303 This is equivalent to
3304 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3305 offset, size, NULL);
3306 but in a constant time. We expect that frontend produced a simple
3307 array without indexed initializers. */
3309 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
3310 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
3311 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
3312 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
3314 access_index = offset / BITS_PER_UNIT / elt_size;
3315 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
3317 /* This code makes an assumption that there are no
3318 indexed fileds produced by C++ FE, so we can directly index the array. */
3319 if (access_index < CONSTRUCTOR_NELTS (init))
3321 fn = CONSTRUCTOR_ELT (init, access_index)->value;
3322 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
3323 STRIP_NOPS (fn);
3325 else
3326 fn = NULL;
3328 /* For type inconsistent program we may end up looking up virtual method
3329 in virtual table that does not contain TOKEN entries. We may overrun
3330 the virtual table and pick up a constant or RTTI info pointer.
3331 In any case the call is undefined. */
3332 if (!fn
3333 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
3334 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3335 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3336 else
3338 fn = TREE_OPERAND (fn, 0);
3340 /* When cgraph node is missing and function is not public, we cannot
3341 devirtualize. This can happen in WHOPR when the actual method
3342 ends up in other partition, because we found devirtualization
3343 possibility too late. */
3344 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3346 if (can_refer)
3348 *can_refer = false;
3349 return fn;
3351 return NULL_TREE;
3355 /* Make sure we create a cgraph node for functions we'll reference.
3356 They can be non-existent if the reference comes from an entry
3357 of an external vtable for example. */
3358 cgraph_get_create_node (fn);
3360 return fn;
3363 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3364 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3365 KNOWN_BINFO carries the binfo describing the true type of
3366 OBJ_TYPE_REF_OBJECT(REF).
3367 Set CAN_REFER if non-NULL to false if method
3368 is not referable or if the virtual table is ill-formed (such as rewriten
3369 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
3371 tree
3372 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
3373 bool *can_refer)
3375 unsigned HOST_WIDE_INT offset;
3376 tree v;
3378 v = BINFO_VTABLE (known_binfo);
3379 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3380 if (!v)
3381 return NULL_TREE;
3383 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
3385 if (can_refer)
3386 *can_refer = false;
3387 return NULL_TREE;
3389 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
3392 /* Return true iff VAL is a gimple expression that is known to be
3393 non-negative. Restricted to floating-point inputs. */
3395 bool
3396 gimple_val_nonnegative_real_p (tree val)
3398 gimple def_stmt;
3400 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3402 /* Use existing logic for non-gimple trees. */
3403 if (tree_expr_nonnegative_p (val))
3404 return true;
3406 if (TREE_CODE (val) != SSA_NAME)
3407 return false;
3409 /* Currently we look only at the immediately defining statement
3410 to make this determination, since recursion on defining
3411 statements of operands can lead to quadratic behavior in the
3412 worst case. This is expected to catch almost all occurrences
3413 in practice. It would be possible to implement limited-depth
3414 recursion if important cases are lost. Alternatively, passes
3415 that need this information (such as the pow/powi lowering code
3416 in the cse_sincos pass) could be revised to provide it through
3417 dataflow propagation. */
3419 def_stmt = SSA_NAME_DEF_STMT (val);
3421 if (is_gimple_assign (def_stmt))
3423 tree op0, op1;
3425 /* See fold-const.c:tree_expr_nonnegative_p for additional
3426 cases that could be handled with recursion. */
3428 switch (gimple_assign_rhs_code (def_stmt))
3430 case ABS_EXPR:
3431 /* Always true for floating-point operands. */
3432 return true;
3434 case MULT_EXPR:
3435 /* True if the two operands are identical (since we are
3436 restricted to floating-point inputs). */
3437 op0 = gimple_assign_rhs1 (def_stmt);
3438 op1 = gimple_assign_rhs2 (def_stmt);
3440 if (op0 == op1
3441 || operand_equal_p (op0, op1, 0))
3442 return true;
3444 default:
3445 return false;
3448 else if (is_gimple_call (def_stmt))
3450 tree fndecl = gimple_call_fndecl (def_stmt);
3451 if (fndecl
3452 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3454 tree arg1;
3456 switch (DECL_FUNCTION_CODE (fndecl))
3458 CASE_FLT_FN (BUILT_IN_ACOS):
3459 CASE_FLT_FN (BUILT_IN_ACOSH):
3460 CASE_FLT_FN (BUILT_IN_CABS):
3461 CASE_FLT_FN (BUILT_IN_COSH):
3462 CASE_FLT_FN (BUILT_IN_ERFC):
3463 CASE_FLT_FN (BUILT_IN_EXP):
3464 CASE_FLT_FN (BUILT_IN_EXP10):
3465 CASE_FLT_FN (BUILT_IN_EXP2):
3466 CASE_FLT_FN (BUILT_IN_FABS):
3467 CASE_FLT_FN (BUILT_IN_FDIM):
3468 CASE_FLT_FN (BUILT_IN_HYPOT):
3469 CASE_FLT_FN (BUILT_IN_POW10):
3470 return true;
3472 CASE_FLT_FN (BUILT_IN_SQRT):
3473 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3474 nonnegative inputs. */
3475 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3476 return true;
3478 break;
3480 CASE_FLT_FN (BUILT_IN_POWI):
3481 /* True if the second argument is an even integer. */
3482 arg1 = gimple_call_arg (def_stmt, 1);
3484 if (TREE_CODE (arg1) == INTEGER_CST
3485 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3486 return true;
3488 break;
3490 CASE_FLT_FN (BUILT_IN_POW):
3491 /* True if the second argument is an even integer-valued
3492 real. */
3493 arg1 = gimple_call_arg (def_stmt, 1);
3495 if (TREE_CODE (arg1) == REAL_CST)
3497 REAL_VALUE_TYPE c;
3498 HOST_WIDE_INT n;
3500 c = TREE_REAL_CST (arg1);
3501 n = real_to_integer (&c);
3503 if ((n & 1) == 0)
3505 REAL_VALUE_TYPE cint;
3506 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3507 if (real_identical (&c, &cint))
3508 return true;
3512 break;
3514 default:
3515 return false;
3520 return false;
3523 /* Given a pointer value OP0, return a simplified version of an
3524 indirection through OP0, or NULL_TREE if no simplification is
3525 possible. Note that the resulting type may be different from
3526 the type pointed to in the sense that it is still compatible
3527 from the langhooks point of view. */
3529 tree
3530 gimple_fold_indirect_ref (tree t)
3532 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3533 tree sub = t;
3534 tree subtype;
3536 STRIP_NOPS (sub);
3537 subtype = TREE_TYPE (sub);
3538 if (!POINTER_TYPE_P (subtype))
3539 return NULL_TREE;
3541 if (TREE_CODE (sub) == ADDR_EXPR)
3543 tree op = TREE_OPERAND (sub, 0);
3544 tree optype = TREE_TYPE (op);
3545 /* *&p => p */
3546 if (useless_type_conversion_p (type, optype))
3547 return op;
3549 /* *(foo *)&fooarray => fooarray[0] */
3550 if (TREE_CODE (optype) == ARRAY_TYPE
3551 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3552 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3554 tree type_domain = TYPE_DOMAIN (optype);
3555 tree min_val = size_zero_node;
3556 if (type_domain && TYPE_MIN_VALUE (type_domain))
3557 min_val = TYPE_MIN_VALUE (type_domain);
3558 if (TREE_CODE (min_val) == INTEGER_CST)
3559 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3561 /* *(foo *)&complexfoo => __real__ complexfoo */
3562 else if (TREE_CODE (optype) == COMPLEX_TYPE
3563 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3564 return fold_build1 (REALPART_EXPR, type, op);
3565 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3566 else if (TREE_CODE (optype) == VECTOR_TYPE
3567 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3569 tree part_width = TYPE_SIZE (type);
3570 tree index = bitsize_int (0);
3571 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3575 /* *(p + CST) -> ... */
3576 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3577 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3579 tree addr = TREE_OPERAND (sub, 0);
3580 tree off = TREE_OPERAND (sub, 1);
3581 tree addrtype;
3583 STRIP_NOPS (addr);
3584 addrtype = TREE_TYPE (addr);
3586 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3587 if (TREE_CODE (addr) == ADDR_EXPR
3588 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3589 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3590 && tree_fits_uhwi_p (off))
3592 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
3593 tree part_width = TYPE_SIZE (type);
3594 unsigned HOST_WIDE_INT part_widthi
3595 = tree_to_shwi (part_width) / BITS_PER_UNIT;
3596 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3597 tree index = bitsize_int (indexi);
3598 if (offset / part_widthi
3599 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3600 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3601 part_width, index);
3604 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3605 if (TREE_CODE (addr) == ADDR_EXPR
3606 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3607 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3609 tree size = TYPE_SIZE_UNIT (type);
3610 if (tree_int_cst_equal (size, off))
3611 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3614 /* *(p + CST) -> MEM_REF <p, CST>. */
3615 if (TREE_CODE (addr) != ADDR_EXPR
3616 || DECL_P (TREE_OPERAND (addr, 0)))
3617 return fold_build2 (MEM_REF, type,
3618 addr,
3619 build_int_cst_wide (ptype,
3620 TREE_INT_CST_LOW (off),
3621 TREE_INT_CST_HIGH (off)));
3624 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3625 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3626 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3627 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3629 tree type_domain;
3630 tree min_val = size_zero_node;
3631 tree osub = sub;
3632 sub = gimple_fold_indirect_ref (sub);
3633 if (! sub)
3634 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3635 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3636 if (type_domain && TYPE_MIN_VALUE (type_domain))
3637 min_val = TYPE_MIN_VALUE (type_domain);
3638 if (TREE_CODE (min_val) == INTEGER_CST)
3639 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3642 return NULL_TREE;
3645 /* Return true if CODE is an operation that when operating on signed
3646 integer types involves undefined behavior on overflow and the
3647 operation can be expressed with unsigned arithmetic. */
3649 bool
3650 arith_code_with_undefined_signed_overflow (tree_code code)
3652 switch (code)
3654 case PLUS_EXPR:
3655 case MINUS_EXPR:
3656 case MULT_EXPR:
3657 case NEGATE_EXPR:
3658 case POINTER_PLUS_EXPR:
3659 return true;
3660 default:
3661 return false;
3665 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3666 operation that can be transformed to unsigned arithmetic by converting
3667 its operand, carrying out the operation in the corresponding unsigned
3668 type and converting the result back to the original type.
3670 Returns a sequence of statements that replace STMT and also contain
3671 a modified form of STMT itself. */
3673 gimple_seq
3674 rewrite_to_defined_overflow (gimple stmt)
3676 if (dump_file && (dump_flags & TDF_DETAILS))
3678 fprintf (dump_file, "rewriting stmt with undefined signed "
3679 "overflow ");
3680 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3683 tree lhs = gimple_assign_lhs (stmt);
3684 tree type = unsigned_type_for (TREE_TYPE (lhs));
3685 gimple_seq stmts = NULL;
3686 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
3688 gimple_seq stmts2 = NULL;
3689 gimple_set_op (stmt, i,
3690 force_gimple_operand (fold_convert (type,
3691 gimple_op (stmt, i)),
3692 &stmts2, true, NULL_TREE));
3693 gimple_seq_add_seq (&stmts, stmts2);
3695 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
3696 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3697 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
3698 gimple_seq_add_stmt (&stmts, stmt);
3699 gimple cvt = gimple_build_assign_with_ops
3700 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
3701 gimple_seq_add_stmt (&stmts, cvt);
3703 return stmts;