1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 /* Conditional constant propagation (CCP) is based on the SSA
25 propagation engine (tree-ssa-propagate.c). Constant assignments of
26 the form VAR = CST are propagated from the assignments into uses of
27 VAR, which in turn may generate new constants. The simulation uses
28 a four level lattice to keep track of constant values associated
29 with SSA names. Given an SSA name V_i, it may take one of the
32 UNINITIALIZED -> This is the default starting value. V_i
33 has not been processed yet.
35 UNDEFINED -> V_i is a local variable whose definition
36 has not been processed yet. Therefore we
37 don't yet know if its value is a constant
40 CONSTANT -> V_i has been found to hold a constant
43 VARYING -> V_i cannot take a constant value, or if it
44 does, it is not possible to determine it
47 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
49 1- In ccp_visit_stmt, we are interested in assignments whose RHS
50 evaluates into a constant and conditional jumps whose predicate
51 evaluates into a boolean true or false. When an assignment of
52 the form V_i = CONST is found, V_i's lattice value is set to
53 CONSTANT and CONST is associated with it. This causes the
54 propagation engine to add all the SSA edges coming out the
55 assignment into the worklists, so that statements that use V_i
58 If the statement is a conditional with a constant predicate, we
59 mark the outgoing edges as executable or not executable
60 depending on the predicate's value. This is then used when
61 visiting PHI nodes to know when a PHI argument can be ignored.
64 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
65 same constant C, then the LHS of the PHI is set to C. This
66 evaluation is known as the "meet operation". Since one of the
67 goals of this evaluation is to optimistically return constant
68 values as often as possible, it uses two main short cuts:
70 - If an argument is flowing in through a non-executable edge, it
71 is ignored. This is useful in cases like this:
77 a_11 = PHI (a_9, a_10)
79 If PRED is known to always evaluate to false, then we can
80 assume that a_11 will always take its value from a_10, meaning
81 that instead of consider it VARYING (a_9 and a_10 have
82 different values), we can consider it CONSTANT 100.
84 - If an argument has an UNDEFINED value, then it does not affect
85 the outcome of the meet operation. If a variable V_i has an
86 UNDEFINED value, it means that either its defining statement
87 hasn't been visited yet or V_i has no defining statement, in
88 which case the original symbol 'V' is being used
89 uninitialized. Since 'V' is a local variable, the compiler
90 may assume any initial value for it.
93 After propagation, every variable V_i that ends up with a lattice
94 value of CONSTANT will have the associated constant value in the
95 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
96 final substitution and folding.
99 Constant propagation in stores and loads (STORE-CCP)
100 ----------------------------------------------------
102 While CCP has all the logic to propagate constants in GIMPLE
103 registers, it is missing the ability to associate constants with
104 stores and loads (i.e., pointer dereferences, structures and
105 global/aliased variables). We don't keep loads and stores in
106 SSA, but we do build a factored use-def web for them (in the
109 For instance, consider the following code fragment:
128 We should be able to deduce that the predicate 'a.a != B' is always
129 false. To achieve this, we associate constant values to the SSA
130 names in the V_MAY_DEF and V_MUST_DEF operands for each store.
131 Additionally, since we also glob partial loads/stores with the base
132 symbol, we also keep track of the memory reference where the
133 constant value was stored (in the MEM_REF field of PROP_VALUE_T).
136 # a_5 = V_MAY_DEF <a_4>
142 In the example above, CCP will associate value '2' with 'a_5', but
143 it would be wrong to replace the load from 'a.b' with '2', because
144 '2' had been stored into a.a.
146 To support STORE-CCP, it is necessary to add a new value to the
147 constant propagation lattice. When evaluating a load for a memory
148 reference we can no longer assume a value of UNDEFINED if we
149 haven't seen a preceding store to the same memory location.
150 Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'. Therefore,
169 when doing STORE-CCP, we introduce a fifth lattice value
170 (UNKNOWN_VAL), which overrides any other value when computing the
171 meet operation in PHI nodes.
173 Though STORE-CCP is not too expensive, it does have to do more work
174 than regular CCP, so it is only enabled at -O2. Both regular CCP
175 and STORE-CCP use the exact same algorithm. The only distinction
176 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
177 set to true. This affects the evaluation of statements and PHI
182 Constant propagation with conditional branches,
183 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
185 Building an Optimizing Compiler,
186 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
188 Advanced Compiler Design and Implementation,
189 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
193 #include "coretypes.h"
200 #include "basic-block.h"
203 #include "function.h"
204 #include "diagnostic.h"
206 #include "tree-dump.h"
207 #include "tree-flow.h"
208 #include "tree-pass.h"
209 #include "tree-ssa-propagate.h"
210 #include "langhooks.h"
214 /* Possible lattice values. */
224 /* Array of propagated constant values. After propagation,
225 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
226 the constant is held in an SSA name representing a memory store
227 (i.e., a V_MAY_DEF or V_MUST_DEF), CONST_VAL[I].MEM_REF will
228 contain the actual memory reference used to store (i.e., the LHS of
229 the assignment doing the store). */
230 static prop_value_t
*const_val
;
232 /* True if we are also propagating constants in stores and loads. */
233 static bool do_store_ccp
;
235 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
238 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
240 switch (val
.lattice_val
)
243 fprintf (outf
, "%sUNINITIALIZED", prefix
);
246 fprintf (outf
, "%sUNDEFINED", prefix
);
249 fprintf (outf
, "%sVARYING", prefix
);
252 fprintf (outf
, "%sUNKNOWN_VAL", prefix
);
255 fprintf (outf
, "%sCONSTANT ", prefix
);
256 print_generic_expr (outf
, val
.value
, dump_flags
);
264 /* Print lattice value VAL to stderr. */
266 void debug_lattice_value (prop_value_t val
);
269 debug_lattice_value (prop_value_t val
)
271 dump_lattice_value (stderr
, "", val
);
272 fprintf (stderr
, "\n");
276 /* The regular is_gimple_min_invariant does a shallow test of the object.
277 It assumes that full gimplification has happened, or will happen on the
278 object. For a value coming from DECL_INITIAL, this is not true, so we
279 have to be more strict outselves. */
282 ccp_decl_initial_min_invariant (tree t
)
284 if (!is_gimple_min_invariant (t
))
286 if (TREE_CODE (t
) == ADDR_EXPR
)
288 /* Inline and unroll is_gimple_addressable. */
291 t
= TREE_OPERAND (t
, 0);
292 if (is_gimple_id (t
))
294 if (!handled_component_p (t
))
302 /* Compute a default value for variable VAR and store it in the
303 CONST_VAL array. The following rules are used to get default
306 1- Global and static variables that are declared constant are
309 2- Any other value is considered UNDEFINED. This is useful when
310 considering PHI nodes. PHI arguments that are undefined do not
311 change the constant value of the PHI node, which allows for more
312 constants to be propagated.
314 3- If SSA_NAME_VALUE is set and it is a constant, its value is
317 4- Variables defined by statements other than assignments and PHI
318 nodes are considered VARYING.
320 5- Variables that are not GIMPLE registers are considered
321 UNKNOWN_VAL, which is really a stronger version of UNDEFINED.
322 It's used to avoid the short circuit evaluation implied by
323 UNDEFINED in ccp_lattice_meet. */
326 get_default_value (tree var
)
328 tree sym
= SSA_NAME_VAR (var
);
329 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
, NULL_TREE
};
331 if (!do_store_ccp
&& !is_gimple_reg (var
))
333 /* Short circuit for regular CCP. We are not interested in any
334 non-register when DO_STORE_CCP is false. */
335 val
.lattice_val
= VARYING
;
337 else if (SSA_NAME_VALUE (var
)
338 && is_gimple_min_invariant (SSA_NAME_VALUE (var
)))
340 val
.lattice_val
= CONSTANT
;
341 val
.value
= SSA_NAME_VALUE (var
);
343 else if (TREE_STATIC (sym
)
344 && TREE_READONLY (sym
)
345 && DECL_INITIAL (sym
)
346 && ccp_decl_initial_min_invariant (DECL_INITIAL (sym
)))
348 /* Globals and static variables declared 'const' take their
350 val
.lattice_val
= CONSTANT
;
351 val
.value
= DECL_INITIAL (sym
);
356 tree stmt
= SSA_NAME_DEF_STMT (var
);
358 if (IS_EMPTY_STMT (stmt
))
360 /* Variables defined by an empty statement are those used
361 before being initialized. If VAR is a local variable, we
362 can assume initially that it is UNDEFINED. If we are
363 doing STORE-CCP, function arguments and non-register
364 variables are initially UNKNOWN_VAL, because we cannot
365 discard the value incoming from outside of this function
366 (see ccp_lattice_meet for details). */
367 if (is_gimple_reg (sym
) && TREE_CODE (sym
) != PARM_DECL
)
368 val
.lattice_val
= UNDEFINED
;
369 else if (do_store_ccp
)
370 val
.lattice_val
= UNKNOWN_VAL
;
372 val
.lattice_val
= VARYING
;
374 else if (TREE_CODE (stmt
) == MODIFY_EXPR
375 || TREE_CODE (stmt
) == PHI_NODE
)
377 /* Any other variable defined by an assignment or a PHI node
378 is considered UNDEFINED (or UNKNOWN_VAL if VAR is not a
380 val
.lattice_val
= is_gimple_reg (sym
) ? UNDEFINED
: UNKNOWN_VAL
;
384 /* Otherwise, VAR will never take on a constant value. */
385 val
.lattice_val
= VARYING
;
393 /* Get the constant value associated with variable VAR. If
394 MAY_USE_DEFAULT_P is true, call get_default_value on variables that
395 have the lattice value UNINITIALIZED. */
397 static prop_value_t
*
398 get_value (tree var
, bool may_use_default_p
)
400 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
401 if (may_use_default_p
&& val
->lattice_val
== UNINITIALIZED
)
402 *val
= get_default_value (var
);
408 /* Set the value for variable VAR to NEW_VAL. Return true if the new
409 value is different from VAR's previous value. */
412 set_lattice_value (tree var
, prop_value_t new_val
)
414 prop_value_t
*old_val
= get_value (var
, false);
416 /* Lattice transitions must always be monotonically increasing in
417 value. We allow two exceptions:
419 1- If *OLD_VAL and NEW_VAL are the same, return false to
420 inform the caller that this was a non-transition.
422 2- If we are doing store-ccp (i.e., DOING_STORE_CCP is true),
423 allow CONSTANT->UNKNOWN_VAL. The UNKNOWN_VAL state is a
424 special type of UNDEFINED state which prevents the short
425 circuit evaluation of PHI arguments (see ccp_visit_phi_node
426 and ccp_lattice_meet). */
427 gcc_assert (old_val
->lattice_val
<= new_val
.lattice_val
428 || (old_val
->lattice_val
== new_val
.lattice_val
429 && old_val
->value
== new_val
.value
430 && old_val
->mem_ref
== new_val
.mem_ref
)
432 && old_val
->lattice_val
== CONSTANT
433 && new_val
.lattice_val
== UNKNOWN_VAL
));
435 if (old_val
->lattice_val
!= new_val
.lattice_val
)
437 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
439 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
440 fprintf (dump_file
, ". %sdding SSA edges to worklist.\n",
441 new_val
.lattice_val
!= UNDEFINED
? "A" : "Not a");
446 /* Transitions UNINITIALIZED -> UNDEFINED are never interesting
447 for propagation purposes. In these cases return false to
448 avoid doing useless work. */
449 return (new_val
.lattice_val
!= UNDEFINED
);
456 /* Return the likely CCP lattice value for STMT.
458 If STMT has no operands, then return CONSTANT.
460 Else if any operands of STMT are undefined, then return UNDEFINED.
462 Else if any operands of STMT are constants, then return CONSTANT.
464 Else return VARYING. */
467 likely_value (tree stmt
)
474 ann
= stmt_ann (stmt
);
476 /* If the statement has volatile operands, it won't fold to a
478 if (ann
->has_volatile_ops
)
481 /* If we are not doing store-ccp, statements with loads
482 and/or stores will never fold into a constant. */
484 && (ann
->makes_aliased_stores
485 || ann
->makes_aliased_loads
486 || !ZERO_SSA_OPERANDS (stmt
, SSA_OP_ALL_VIRTUALS
)))
490 /* A CALL_EXPR is assumed to be varying. NOTE: This may be overly
491 conservative, in the presence of const and pure calls. */
492 if (get_call_expr_in (stmt
) != NULL_TREE
)
495 /* Anything other than assignments and conditional jumps are not
496 interesting for CCP. */
497 if (TREE_CODE (stmt
) != MODIFY_EXPR
498 && TREE_CODE (stmt
) != COND_EXPR
499 && TREE_CODE (stmt
) != SWITCH_EXPR
)
502 if (is_gimple_min_invariant (get_rhs (stmt
)))
505 found_constant
= false;
506 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
508 prop_value_t
*val
= get_value (use
, true);
510 if (val
->lattice_val
== VARYING
)
513 if (val
->lattice_val
== UNKNOWN_VAL
)
515 /* UNKNOWN_VAL is invalid when not doing STORE-CCP. */
516 gcc_assert (do_store_ccp
);
520 if (val
->lattice_val
== CONSTANT
)
521 found_constant
= true;
525 || ZERO_SSA_OPERANDS (stmt
, SSA_OP_USE
)
526 || ZERO_SSA_OPERANDS (stmt
, SSA_OP_VUSE
))
533 /* Initialize local data structures for CCP. */
536 ccp_initialize (void)
540 const_val
= xmalloc (num_ssa_names
* sizeof (*const_val
));
541 memset (const_val
, 0, num_ssa_names
* sizeof (*const_val
));
543 /* Initialize simulation flags for PHI nodes and statements. */
546 block_stmt_iterator i
;
548 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
550 bool is_varying
= false;
551 tree stmt
= bsi_stmt (i
);
553 if (likely_value (stmt
) == VARYING
)
559 /* If the statement will not produce a constant, mark
560 all its outputs VARYING. */
561 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
562 get_value (def
, false)->lattice_val
= VARYING
;
564 /* Never mark conditional jumps with DONT_SIMULATE_AGAIN,
565 otherwise the propagator will never add the outgoing
567 if (TREE_CODE (stmt
) != COND_EXPR
568 && TREE_CODE (stmt
) != SWITCH_EXPR
)
572 DONT_SIMULATE_AGAIN (stmt
) = is_varying
;
576 /* Now process PHI nodes. */
581 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
585 prop_value_t
*val
= get_value (PHI_RESULT (phi
), false);
587 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
589 arg
= PHI_ARG_DEF (phi
, i
);
591 if (TREE_CODE (arg
) == SSA_NAME
592 && get_value (arg
, false)->lattice_val
== VARYING
)
594 val
->lattice_val
= VARYING
;
599 DONT_SIMULATE_AGAIN (phi
) = (val
->lattice_val
== VARYING
);
605 /* Do final substitution of propagated values, cleanup the flowgraph and
606 free allocated storage. */
611 /* Perform substitutions based on the known constant values. */
612 substitute_and_fold (const_val
, false);
618 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
621 any M UNDEFINED = any
622 any M UNKNOWN_VAL = UNKNOWN_VAL
623 any M VARYING = VARYING
624 Ci M Cj = Ci if (i == j)
625 Ci M Cj = VARYING if (i != j)
627 Lattice values UNKNOWN_VAL and UNDEFINED are similar but have
628 different semantics at PHI nodes. Both values imply that we don't
629 know whether the variable is constant or not. However, UNKNOWN_VAL
630 values override all others. For instance, suppose that A is a
640 | A_3 = PHI (A_2, A_1)
645 If the edge into A_2 is not executable, the first visit to A_3 will
646 yield the constant 4. But the second visit to A_3 will be with A_2
647 in state UNKNOWN_VAL. We can no longer conclude that A_3 is 4
648 because A_2 may have been set in another function. If we had used
649 the lattice value UNDEFINED, we would have had wrongly concluded
654 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
656 if (val1
->lattice_val
== UNDEFINED
)
658 /* UNDEFINED M any = any */
661 else if (val2
->lattice_val
== UNDEFINED
)
663 /* any M UNDEFINED = any
664 Nothing to do. VAL1 already contains the value we want. */
667 else if (val1
->lattice_val
== UNKNOWN_VAL
668 || val2
->lattice_val
== UNKNOWN_VAL
)
670 /* UNKNOWN_VAL values are invalid if we are not doing STORE-CCP. */
671 gcc_assert (do_store_ccp
);
673 /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
674 val1
->lattice_val
= UNKNOWN_VAL
;
675 val1
->value
= NULL_TREE
;
676 val1
->mem_ref
= NULL_TREE
;
678 else if (val1
->lattice_val
== VARYING
679 || val2
->lattice_val
== VARYING
)
681 /* any M VARYING = VARYING. */
682 val1
->lattice_val
= VARYING
;
683 val1
->value
= NULL_TREE
;
684 val1
->mem_ref
= NULL_TREE
;
686 else if (val1
->lattice_val
== CONSTANT
687 && val2
->lattice_val
== CONSTANT
688 && simple_cst_equal (val1
->value
, val2
->value
) == 1
690 || (val1
->mem_ref
&& val2
->mem_ref
691 && operand_equal_p (val1
->mem_ref
, val2
->mem_ref
, 0))))
693 /* Ci M Cj = Ci if (i == j)
694 Ci M Cj = VARYING if (i != j)
696 If these two values come from memory stores, make sure that
697 they come from the same memory reference. */
698 val1
->lattice_val
= CONSTANT
;
699 val1
->value
= val1
->value
;
700 val1
->mem_ref
= val1
->mem_ref
;
704 /* Any other combination is VARYING. */
705 val1
->lattice_val
= VARYING
;
706 val1
->value
= NULL_TREE
;
707 val1
->mem_ref
= NULL_TREE
;
712 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
713 lattice values to determine PHI_NODE's lattice value. The value of a
714 PHI node is determined calling ccp_lattice_meet with all the arguments
715 of the PHI node that are incoming via executable edges. */
717 static enum ssa_prop_result
718 ccp_visit_phi_node (tree phi
)
721 prop_value_t
*old_val
, new_val
;
723 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
725 fprintf (dump_file
, "\nVisiting PHI node: ");
726 print_generic_expr (dump_file
, phi
, dump_flags
);
729 old_val
= get_value (PHI_RESULT (phi
), false);
730 switch (old_val
->lattice_val
)
733 return SSA_PROP_VARYING
;
740 /* To avoid the default value of UNKNOWN_VAL overriding
741 that of its possible constant arguments, temporarily
742 set the PHI node's default lattice value to be
743 UNDEFINED. If the PHI node's old value was UNKNOWN_VAL and
744 the new value is UNDEFINED, then we prevent the invalid
745 transition by not calling set_lattice_value. */
746 gcc_assert (do_store_ccp
);
752 new_val
.lattice_val
= UNDEFINED
;
753 new_val
.value
= NULL_TREE
;
754 new_val
.mem_ref
= NULL_TREE
;
761 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
763 /* Compute the meet operator over all the PHI arguments flowing
764 through executable edges. */
765 edge e
= PHI_ARG_EDGE (phi
, i
);
767 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
770 "\n Argument #%d (%d -> %d %sexecutable)\n",
771 i
, e
->src
->index
, e
->dest
->index
,
772 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
775 /* If the incoming edge is executable, Compute the meet operator for
776 the existing value of the PHI node and the current PHI argument. */
777 if (e
->flags
& EDGE_EXECUTABLE
)
779 tree arg
= PHI_ARG_DEF (phi
, i
);
780 prop_value_t arg_val
;
782 if (is_gimple_min_invariant (arg
))
784 arg_val
.lattice_val
= CONSTANT
;
786 arg_val
.mem_ref
= NULL_TREE
;
789 arg_val
= *(get_value (arg
, true));
791 ccp_lattice_meet (&new_val
, &arg_val
);
793 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
795 fprintf (dump_file
, "\t");
796 print_generic_expr (dump_file
, arg
, dump_flags
);
797 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
798 fprintf (dump_file
, "\n");
801 if (new_val
.lattice_val
== VARYING
)
806 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
808 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
809 fprintf (dump_file
, "\n\n");
812 /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
814 && old_val
->lattice_val
== UNKNOWN_VAL
815 && new_val
.lattice_val
== UNDEFINED
)
816 return SSA_PROP_NOT_INTERESTING
;
818 /* Otherwise, make the transition to the new value. */
819 if (set_lattice_value (PHI_RESULT (phi
), new_val
))
821 if (new_val
.lattice_val
== VARYING
)
822 return SSA_PROP_VARYING
;
824 return SSA_PROP_INTERESTING
;
827 return SSA_PROP_NOT_INTERESTING
;
831 /* CCP specific front-end to the non-destructive constant folding
834 Attempt to simplify the RHS of STMT knowing that one or more
835 operands are constants.
837 If simplification is possible, return the simplified RHS,
838 otherwise return the original RHS. */
843 tree rhs
= get_rhs (stmt
);
844 enum tree_code code
= TREE_CODE (rhs
);
845 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
846 tree retval
= NULL_TREE
;
848 if (TREE_CODE (rhs
) == SSA_NAME
)
850 /* If the RHS is an SSA_NAME, return its known constant value,
852 return get_value (rhs
, true)->value
;
854 else if (do_store_ccp
&& stmt_makes_single_load (stmt
))
856 /* If the RHS is a memory load, see if the VUSEs associated with
857 it are a valid constant for that memory load. */
858 prop_value_t
*val
= get_value_loaded_by (stmt
, const_val
);
859 if (val
&& val
->mem_ref
860 && operand_equal_p (val
->mem_ref
, rhs
, 0))
866 /* Unary operators. Note that we know the single operand must
867 be a constant. So this should almost always return a
869 if (kind
== tcc_unary
)
871 /* Handle unary operators which can appear in GIMPLE form. */
872 tree op0
= TREE_OPERAND (rhs
, 0);
874 /* Simplify the operand down to a constant. */
875 if (TREE_CODE (op0
) == SSA_NAME
)
877 prop_value_t
*val
= get_value (op0
, true);
878 if (val
->lattice_val
== CONSTANT
)
879 op0
= get_value (op0
, true)->value
;
882 return fold_unary (code
, TREE_TYPE (rhs
), op0
);
885 /* Binary and comparison operators. We know one or both of the
886 operands are constants. */
887 else if (kind
== tcc_binary
888 || kind
== tcc_comparison
889 || code
== TRUTH_AND_EXPR
890 || code
== TRUTH_OR_EXPR
891 || code
== TRUTH_XOR_EXPR
)
893 /* Handle binary and comparison operators that can appear in
895 tree op0
= TREE_OPERAND (rhs
, 0);
896 tree op1
= TREE_OPERAND (rhs
, 1);
898 /* Simplify the operands down to constants when appropriate. */
899 if (TREE_CODE (op0
) == SSA_NAME
)
901 prop_value_t
*val
= get_value (op0
, true);
902 if (val
->lattice_val
== CONSTANT
)
906 if (TREE_CODE (op1
) == SSA_NAME
)
908 prop_value_t
*val
= get_value (op1
, true);
909 if (val
->lattice_val
== CONSTANT
)
913 return fold_binary (code
, TREE_TYPE (rhs
), op0
, op1
);
916 /* We may be able to fold away calls to builtin functions if their
917 arguments are constants. */
918 else if (code
== CALL_EXPR
919 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == ADDR_EXPR
920 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0))
922 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)))
924 if (!ZERO_SSA_OPERANDS (stmt
, SSA_OP_USE
))
927 tree fndecl
, arglist
;
932 /* Preserve the original values of every operand. */
933 orig
= xmalloc (sizeof (tree
) * NUM_SSA_OPERANDS (stmt
, SSA_OP_USE
));
934 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
937 /* Substitute operands with their values and try to fold. */
938 replace_uses_in (stmt
, NULL
, const_val
);
939 fndecl
= get_callee_fndecl (rhs
);
940 arglist
= TREE_OPERAND (rhs
, 1);
941 retval
= fold_builtin (fndecl
, arglist
, false);
943 /* Restore operands to their original form. */
945 FOR_EACH_SSA_USE_OPERAND (var_p
, stmt
, iter
, SSA_OP_USE
)
946 SET_USE (var_p
, orig
[i
++]);
953 /* If we got a simplified form, see if we need to convert its type. */
955 return fold_convert (TREE_TYPE (rhs
), retval
);
957 /* No simplification was possible. */
962 /* Return the tree representing the element referenced by T if T is an
963 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
964 NULL_TREE otherwise. */
967 fold_const_aggregate_ref (tree t
)
970 tree base
, ctor
, idx
, field
;
971 unsigned HOST_WIDE_INT cnt
;
974 switch (TREE_CODE (t
))
977 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
978 DECL_INITIAL. If BASE is a nested reference into another
979 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
980 the inner reference. */
981 base
= TREE_OPERAND (t
, 0);
982 switch (TREE_CODE (base
))
985 if (!TREE_READONLY (base
)
986 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
987 || !targetm
.binds_local_p (base
))
990 ctor
= DECL_INITIAL (base
);
995 ctor
= fold_const_aggregate_ref (base
);
1002 if (ctor
== NULL_TREE
1003 || TREE_CODE (ctor
) != CONSTRUCTOR
1004 || !TREE_STATIC (ctor
))
1007 /* Get the index. If we have an SSA_NAME, try to resolve it
1008 with the current lattice value for the SSA_NAME. */
1009 idx
= TREE_OPERAND (t
, 1);
1010 switch (TREE_CODE (idx
))
1013 if ((value
= get_value (idx
, true))
1014 && value
->lattice_val
== CONSTANT
1015 && TREE_CODE (value
->value
) == INTEGER_CST
)
1028 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1029 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1030 if (tree_int_cst_equal (cfield
, idx
))
1035 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1036 DECL_INITIAL. If BASE is a nested reference into another
1037 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1038 the inner reference. */
1039 base
= TREE_OPERAND (t
, 0);
1040 switch (TREE_CODE (base
))
1043 if (!TREE_READONLY (base
)
1044 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1045 || !targetm
.binds_local_p (base
))
1048 ctor
= DECL_INITIAL (base
);
1053 ctor
= fold_const_aggregate_ref (base
);
1060 if (ctor
== NULL_TREE
1061 || TREE_CODE (ctor
) != CONSTRUCTOR
1062 || !TREE_STATIC (ctor
))
1065 field
= TREE_OPERAND (t
, 1);
1067 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1069 /* FIXME: Handle bit-fields. */
1070 && ! DECL_BIT_FIELD (cfield
))
1077 tree c
= fold_const_aggregate_ref (TREE_OPERAND (t
, 0));
1078 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
1079 return fold_build1 (TREE_CODE (t
), TREE_TYPE (t
), c
);
1090 /* Evaluate statement STMT. */
1093 evaluate_stmt (tree stmt
)
1097 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1099 val
.mem_ref
= NULL_TREE
;
1101 /* If the statement is likely to have a CONSTANT result, then try
1102 to fold the statement to determine the constant value. */
1103 if (likelyvalue
== CONSTANT
)
1104 simplified
= ccp_fold (stmt
);
1105 /* If the statement is likely to have a VARYING result, then do not
1106 bother folding the statement. */
1107 else if (likelyvalue
== VARYING
)
1108 simplified
= get_rhs (stmt
);
1109 /* If the statement is an ARRAY_REF or COMPONENT_REF into constant
1110 aggregates, extract the referenced constant. Otherwise the
1111 statement is likely to have an UNDEFINED value, and there will be
1112 nothing to do. Note that fold_const_aggregate_ref returns
1113 NULL_TREE if the first case does not match. */
1115 simplified
= fold_const_aggregate_ref (get_rhs (stmt
));
1117 if (simplified
&& is_gimple_min_invariant (simplified
))
1119 /* The statement produced a constant value. */
1120 val
.lattice_val
= CONSTANT
;
1121 val
.value
= simplified
;
1125 /* The statement produced a nonconstant value. If the statement
1126 had UNDEFINED operands, then the result of the statement
1127 should be UNDEFINED. Otherwise, the statement is VARYING. */
1128 if (likelyvalue
== UNDEFINED
|| likelyvalue
== UNKNOWN_VAL
)
1129 val
.lattice_val
= likelyvalue
;
1131 val
.lattice_val
= VARYING
;
1133 val
.value
= NULL_TREE
;
1140 /* Visit the assignment statement STMT. Set the value of its LHS to the
1141 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1142 creates virtual definitions, set the value of each new name to that
1143 of the RHS (if we can derive a constant out of the RHS). */
1145 static enum ssa_prop_result
1146 visit_assignment (tree stmt
, tree
*output_p
)
1150 enum ssa_prop_result retval
;
1152 lhs
= TREE_OPERAND (stmt
, 0);
1153 rhs
= TREE_OPERAND (stmt
, 1);
1155 if (TREE_CODE (rhs
) == SSA_NAME
)
1157 /* For a simple copy operation, we copy the lattice values. */
1158 prop_value_t
*nval
= get_value (rhs
, true);
1161 else if (do_store_ccp
&& stmt_makes_single_load (stmt
))
1163 /* Same as above, but the RHS is not a gimple register and yet
1164 has a known VUSE. If STMT is loading from the same memory
1165 location that created the SSA_NAMEs for the virtual operands,
1166 we can propagate the value on the RHS. */
1167 prop_value_t
*nval
= get_value_loaded_by (stmt
, const_val
);
1169 if (nval
&& nval
->mem_ref
1170 && operand_equal_p (nval
->mem_ref
, rhs
, 0))
1173 val
= evaluate_stmt (stmt
);
1176 /* Evaluate the statement. */
1177 val
= evaluate_stmt (stmt
);
1179 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
1180 value to be a VIEW_CONVERT_EXPR of the old constant value.
1182 ??? Also, if this was a definition of a bitfield, we need to widen
1183 the constant value into the type of the destination variable. This
1184 should not be necessary if GCC represented bitfields properly. */
1186 tree orig_lhs
= TREE_OPERAND (stmt
, 0);
1188 if (TREE_CODE (orig_lhs
) == VIEW_CONVERT_EXPR
1189 && val
.lattice_val
== CONSTANT
)
1191 tree w
= fold_build1 (VIEW_CONVERT_EXPR
,
1192 TREE_TYPE (TREE_OPERAND (orig_lhs
, 0)),
1195 orig_lhs
= TREE_OPERAND (orig_lhs
, 0);
1196 if (w
&& is_gimple_min_invariant (w
))
1200 val
.lattice_val
= VARYING
;
1205 if (val
.lattice_val
== CONSTANT
1206 && TREE_CODE (orig_lhs
) == COMPONENT_REF
1207 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs
, 1)))
1209 tree w
= widen_bitfield (val
.value
, TREE_OPERAND (orig_lhs
, 1),
1212 if (w
&& is_gimple_min_invariant (w
))
1216 val
.lattice_val
= VARYING
;
1217 val
.value
= NULL_TREE
;
1218 val
.mem_ref
= NULL_TREE
;
1223 retval
= SSA_PROP_NOT_INTERESTING
;
1225 /* Set the lattice value of the statement's output. */
1226 if (TREE_CODE (lhs
) == SSA_NAME
)
1228 /* If STMT is an assignment to an SSA_NAME, we only have one
1230 if (set_lattice_value (lhs
, val
))
1233 if (val
.lattice_val
== VARYING
)
1234 retval
= SSA_PROP_VARYING
;
1236 retval
= SSA_PROP_INTERESTING
;
1239 else if (do_store_ccp
&& stmt_makes_single_store (stmt
))
1241 /* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
1242 to the new constant value and mark the LHS as the memory
1243 reference associated with VAL. */
1248 /* Stores cannot take on an UNDEFINED value. */
1249 if (val
.lattice_val
== UNDEFINED
)
1250 val
.lattice_val
= UNKNOWN_VAL
;
1252 /* Mark VAL as stored in the LHS of this assignment. */
1255 /* Set the value of every VDEF to VAL. */
1257 FOR_EACH_SSA_TREE_OPERAND (vdef
, stmt
, i
, SSA_OP_VIRTUAL_DEFS
)
1258 changed
|= set_lattice_value (vdef
, val
);
1260 /* Note that for propagation purposes, we are only interested in
1261 visiting statements that load the exact same memory reference
1262 stored here. Those statements will have the exact same list
1263 of virtual uses, so it is enough to set the output of this
1264 statement to be its first virtual definition. */
1265 *output_p
= first_vdef (stmt
);
1268 if (val
.lattice_val
== VARYING
)
1269 retval
= SSA_PROP_VARYING
;
1271 retval
= SSA_PROP_INTERESTING
;
1279 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1280 if it can determine which edge will be taken. Otherwise, return
1281 SSA_PROP_VARYING. */
1283 static enum ssa_prop_result
1284 visit_cond_stmt (tree stmt
, edge
*taken_edge_p
)
1289 block
= bb_for_stmt (stmt
);
1290 val
= evaluate_stmt (stmt
);
1292 /* Find which edge out of the conditional block will be taken and add it
1293 to the worklist. If no single edge can be determined statically,
1294 return SSA_PROP_VARYING to feed all the outgoing edges to the
1295 propagation engine. */
1296 *taken_edge_p
= val
.value
? find_taken_edge (block
, val
.value
) : 0;
1298 return SSA_PROP_INTERESTING
;
1300 return SSA_PROP_VARYING
;
1304 /* Evaluate statement STMT. If the statement produces an output value and
1305 its evaluation changes the lattice value of its output, return
1306 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1309 If STMT is a conditional branch and we can determine its truth
1310 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1311 value, return SSA_PROP_VARYING. */
1313 static enum ssa_prop_result
1314 ccp_visit_stmt (tree stmt
, edge
*taken_edge_p
, tree
*output_p
)
1319 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1321 fprintf (dump_file
, "\nVisiting statement:\n");
1322 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1323 fprintf (dump_file
, "\n");
1326 if (TREE_CODE (stmt
) == MODIFY_EXPR
)
1328 /* If the statement is an assignment that produces a single
1329 output value, evaluate its RHS to see if the lattice value of
1330 its output has changed. */
1331 return visit_assignment (stmt
, output_p
);
1333 else if (TREE_CODE (stmt
) == COND_EXPR
|| TREE_CODE (stmt
) == SWITCH_EXPR
)
1335 /* If STMT is a conditional branch, see if we can determine
1336 which branch will be taken. */
1337 return visit_cond_stmt (stmt
, taken_edge_p
);
1340 /* Any other kind of statement is not interesting for constant
1341 propagation and, therefore, not worth simulating. */
1342 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1343 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1345 /* Definitions made by statements other than assignments to
1346 SSA_NAMEs represent unknown modifications to their outputs.
1347 Mark them VARYING. */
1348 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1350 prop_value_t v
= { VARYING
, NULL_TREE
, NULL_TREE
};
1351 set_lattice_value (def
, v
);
1354 return SSA_PROP_VARYING
;
1358 /* Main entry point for SSA Conditional Constant Propagation. */
1361 execute_ssa_ccp (bool store_ccp
)
1363 do_store_ccp
= store_ccp
;
1365 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1373 execute_ssa_ccp (false);
1380 return flag_tree_ccp
!= 0;
1384 struct tree_opt_pass pass_ccp
=
1387 gate_ccp
, /* gate */
1388 do_ssa_ccp
, /* execute */
1391 0, /* static_pass_number */
1392 TV_TREE_CCP
, /* tv_id */
1393 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
1394 0, /* properties_provided */
1395 0, /* properties_destroyed */
1396 0, /* todo_flags_start */
1397 TODO_cleanup_cfg
| TODO_dump_func
| TODO_update_ssa
1398 | TODO_ggc_collect
| TODO_verify_ssa
1399 | TODO_verify_stmts
, /* todo_flags_finish */
1405 do_ssa_store_ccp (void)
1407 /* If STORE-CCP is not enabled, we just run regular CCP. */
1408 execute_ssa_ccp (flag_tree_store_ccp
!= 0);
1412 gate_store_ccp (void)
1414 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1415 -fno-tree-store-ccp is specified, we should run regular CCP.
1416 That's why the pass is enabled with either flag. */
1417 return flag_tree_store_ccp
!= 0 || flag_tree_ccp
!= 0;
1421 struct tree_opt_pass pass_store_ccp
=
1423 "store_ccp", /* name */
1424 gate_store_ccp
, /* gate */
1425 do_ssa_store_ccp
, /* execute */
1428 0, /* static_pass_number */
1429 TV_TREE_STORE_CCP
, /* tv_id */
1430 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
1431 0, /* properties_provided */
1432 0, /* properties_destroyed */
1433 0, /* todo_flags_start */
1434 TODO_dump_func
| TODO_update_ssa
1435 | TODO_ggc_collect
| TODO_verify_ssa
1437 | TODO_verify_stmts
, /* todo_flags_finish */
1441 /* Given a constant value VAL for bitfield FIELD, and a destination
1442 variable VAR, return VAL appropriately widened to fit into VAR. If
1443 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1446 widen_bitfield (tree val
, tree field
, tree var
)
1448 unsigned HOST_WIDE_INT var_size
, field_size
;
1450 unsigned HOST_WIDE_INT mask
;
1453 /* We can only do this if the size of the type and field and VAL are
1454 all constants representable in HOST_WIDE_INT. */
1455 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var
)), 1)
1456 || !host_integerp (DECL_SIZE (field
), 1)
1457 || !host_integerp (val
, 0))
1460 var_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1);
1461 field_size
= tree_low_cst (DECL_SIZE (field
), 1);
1463 /* Give up if either the bitfield or the variable are too wide. */
1464 if (field_size
> HOST_BITS_PER_WIDE_INT
|| var_size
> HOST_BITS_PER_WIDE_INT
)
1467 gcc_assert (var_size
>= field_size
);
1469 /* If the sign bit of the value is not set or the field's type is unsigned,
1470 just mask off the high order bits of the value. */
1471 if (DECL_UNSIGNED (field
)
1472 || !(tree_low_cst (val
, 0) & (((HOST_WIDE_INT
)1) << (field_size
- 1))))
1474 /* Zero extension. Build a mask with the lower 'field_size' bits
1475 set and a BIT_AND_EXPR node to clear the high order bits of
1477 for (i
= 0, mask
= 0; i
< field_size
; i
++)
1478 mask
|= ((HOST_WIDE_INT
) 1) << i
;
1480 wide_val
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (var
), val
,
1481 build_int_cst (TREE_TYPE (var
), mask
));
1485 /* Sign extension. Create a mask with the upper 'field_size'
1486 bits set and a BIT_IOR_EXPR to set the high order bits of the
1488 for (i
= 0, mask
= 0; i
< (var_size
- field_size
); i
++)
1489 mask
|= ((HOST_WIDE_INT
) 1) << (var_size
- i
- 1);
1491 wide_val
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (var
), val
,
1492 build_int_cst (TREE_TYPE (var
), mask
));
1499 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1500 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1501 is the desired result type. */
1504 maybe_fold_offset_to_array_ref (tree base
, tree offset
, tree orig_type
)
1506 tree min_idx
, idx
, elt_offset
= integer_zero_node
;
1507 tree array_type
, elt_type
, elt_size
;
1509 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1510 measured in units of the size of elements type) from that ARRAY_REF).
1511 We can't do anything if either is variable.
1513 The case we handle here is *(&A[N]+O). */
1514 if (TREE_CODE (base
) == ARRAY_REF
)
1516 tree low_bound
= array_ref_low_bound (base
);
1518 elt_offset
= TREE_OPERAND (base
, 1);
1519 if (TREE_CODE (low_bound
) != INTEGER_CST
1520 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1523 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1524 base
= TREE_OPERAND (base
, 0);
1527 /* Ignore stupid user tricks of indexing non-array variables. */
1528 array_type
= TREE_TYPE (base
);
1529 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1531 elt_type
= TREE_TYPE (array_type
);
1532 if (!lang_hooks
.types_compatible_p (orig_type
, elt_type
))
1535 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1536 element type (so we can use the alignment if it's not constant).
1537 Otherwise, compute the offset as an index by using a division. If the
1538 division isn't exact, then don't do anything. */
1539 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1540 if (integer_zerop (offset
))
1542 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1543 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1545 idx
= integer_zero_node
;
1549 unsigned HOST_WIDE_INT lquo
, lrem
;
1550 HOST_WIDE_INT hquo
, hrem
;
1552 if (TREE_CODE (elt_size
) != INTEGER_CST
1553 || div_and_round_double (TRUNC_DIV_EXPR
, 1,
1554 TREE_INT_CST_LOW (offset
),
1555 TREE_INT_CST_HIGH (offset
),
1556 TREE_INT_CST_LOW (elt_size
),
1557 TREE_INT_CST_HIGH (elt_size
),
1558 &lquo
, &hquo
, &lrem
, &hrem
)
1562 idx
= build_int_cst_wide (NULL_TREE
, lquo
, hquo
);
1565 /* Assume the low bound is zero. If there is a domain type, get the
1566 low bound, if any, convert the index into that type, and add the
1568 min_idx
= integer_zero_node
;
1569 if (TYPE_DOMAIN (array_type
))
1571 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
)))
1572 min_idx
= TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
));
1574 min_idx
= fold_convert (TYPE_DOMAIN (array_type
), min_idx
);
1576 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1579 idx
= fold_convert (TYPE_DOMAIN (array_type
), idx
);
1580 elt_offset
= fold_convert (TYPE_DOMAIN (array_type
), elt_offset
);
1583 if (!integer_zerop (min_idx
))
1584 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1585 if (!integer_zerop (elt_offset
))
1586 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1588 return build (ARRAY_REF
, orig_type
, base
, idx
, min_idx
,
1589 size_int (tree_low_cst (elt_size
, 1)
1590 / (TYPE_ALIGN_UNIT (elt_type
))));
1594 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1595 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1596 is the desired result type. */
1597 /* ??? This doesn't handle class inheritance. */
1600 maybe_fold_offset_to_component_ref (tree record_type
, tree base
, tree offset
,
1601 tree orig_type
, bool base_is_ptr
)
1603 tree f
, t
, field_type
, tail_array_field
, field_offset
;
1605 if (TREE_CODE (record_type
) != RECORD_TYPE
1606 && TREE_CODE (record_type
) != UNION_TYPE
1607 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1610 /* Short-circuit silly cases. */
1611 if (lang_hooks
.types_compatible_p (record_type
, orig_type
))
1614 tail_array_field
= NULL_TREE
;
1615 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1619 if (TREE_CODE (f
) != FIELD_DECL
)
1621 if (DECL_BIT_FIELD (f
))
1624 field_offset
= byte_position (f
);
1625 if (TREE_CODE (field_offset
) != INTEGER_CST
)
1628 /* ??? Java creates "interesting" fields for representing base classes.
1629 They have no name, and have no context. With no context, we get into
1630 trouble with nonoverlapping_component_refs_p. Skip them. */
1631 if (!DECL_FIELD_CONTEXT (f
))
1634 /* The previous array field isn't at the end. */
1635 tail_array_field
= NULL_TREE
;
1637 /* Check to see if this offset overlaps with the field. */
1638 cmp
= tree_int_cst_compare (field_offset
, offset
);
1642 field_type
= TREE_TYPE (f
);
1644 /* Here we exactly match the offset being checked. If the types match,
1645 then we can return that field. */
1647 && lang_hooks
.types_compatible_p (orig_type
, field_type
))
1650 base
= build1 (INDIRECT_REF
, record_type
, base
);
1651 t
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1655 /* Don't care about offsets into the middle of scalars. */
1656 if (!AGGREGATE_TYPE_P (field_type
))
1659 /* Check for array at the end of the struct. This is often
1660 used as for flexible array members. We should be able to
1661 turn this into an array access anyway. */
1662 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1663 tail_array_field
= f
;
1665 /* Check the end of the field against the offset. */
1666 if (!DECL_SIZE_UNIT (f
)
1667 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1669 t
= int_const_binop (MINUS_EXPR
, offset
, field_offset
, 1);
1670 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1673 /* If we matched, then set offset to the displacement into
1679 if (!tail_array_field
)
1682 f
= tail_array_field
;
1683 field_type
= TREE_TYPE (f
);
1684 offset
= int_const_binop (MINUS_EXPR
, offset
, byte_position (f
), 1);
1687 /* If we get here, we've got an aggregate field, and a possibly
1688 nonzero offset into them. Recurse and hope for a valid match. */
1690 base
= build1 (INDIRECT_REF
, record_type
, base
);
1691 base
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1693 t
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
);
1696 return maybe_fold_offset_to_component_ref (field_type
, base
, offset
,
1701 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1702 Return the simplified expression, or NULL if nothing could be done. */
1705 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
1709 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1710 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1711 are sometimes added. */
1713 STRIP_TYPE_NOPS (base
);
1714 TREE_OPERAND (expr
, 0) = base
;
1716 /* One possibility is that the address reduces to a string constant. */
1717 t
= fold_read_from_constant_string (expr
);
1721 /* Add in any offset from a PLUS_EXPR. */
1722 if (TREE_CODE (base
) == PLUS_EXPR
)
1726 offset2
= TREE_OPERAND (base
, 1);
1727 if (TREE_CODE (offset2
) != INTEGER_CST
)
1729 base
= TREE_OPERAND (base
, 0);
1731 offset
= int_const_binop (PLUS_EXPR
, offset
, offset2
, 1);
1734 if (TREE_CODE (base
) == ADDR_EXPR
)
1736 /* Strip the ADDR_EXPR. */
1737 base
= TREE_OPERAND (base
, 0);
1739 /* Fold away CONST_DECL to its value, if the type is scalar. */
1740 if (TREE_CODE (base
) == CONST_DECL
1741 && ccp_decl_initial_min_invariant (DECL_INITIAL (base
)))
1742 return DECL_INITIAL (base
);
1744 /* Try folding *(&B+O) to B[X]. */
1745 t
= maybe_fold_offset_to_array_ref (base
, offset
, TREE_TYPE (expr
));
1749 /* Try folding *(&B+O) to B.X. */
1750 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (base
), base
, offset
,
1751 TREE_TYPE (expr
), false);
1755 /* Fold *&B to B. We can only do this if EXPR is the same type
1756 as BASE. We can't do this if EXPR is the element type of an array
1757 and BASE is the array. */
1758 if (integer_zerop (offset
)
1759 && lang_hooks
.types_compatible_p (TREE_TYPE (base
),
1765 /* We can get here for out-of-range string constant accesses,
1766 such as "_"[3]. Bail out of the entire substitution search
1767 and arrange for the entire statement to be replaced by a
1768 call to __builtin_trap. In all likelihood this will all be
1769 constant-folded away, but in the meantime we can't leave with
1770 something that get_expr_operands can't understand. */
1774 if (TREE_CODE (t
) == ADDR_EXPR
1775 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
1777 /* FIXME: Except that this causes problems elsewhere with dead
1778 code not being deleted, and we die in the rtl expanders
1779 because we failed to remove some ssa_name. In the meantime,
1780 just return zero. */
1781 /* FIXME2: This condition should be signaled by
1782 fold_read_from_constant_string directly, rather than
1783 re-checking for it here. */
1784 return integer_zero_node
;
1787 /* Try folding *(B+O) to B->X. Still an improvement. */
1788 if (POINTER_TYPE_P (TREE_TYPE (base
)))
1790 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base
)),
1792 TREE_TYPE (expr
), true);
1798 /* Otherwise we had an offset that we could not simplify. */
1803 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1805 A quaint feature extant in our address arithmetic is that there
1806 can be hidden type changes here. The type of the result need
1807 not be the same as the type of the input pointer.
1809 What we're after here is an expression of the form
1810 (T *)(&array + const)
1811 where the cast doesn't actually exist, but is implicit in the
1812 type of the PLUS_EXPR. We'd like to turn this into
1814 which may be able to propagate further. */
1817 maybe_fold_stmt_addition (tree expr
)
1819 tree op0
= TREE_OPERAND (expr
, 0);
1820 tree op1
= TREE_OPERAND (expr
, 1);
1821 tree ptr_type
= TREE_TYPE (expr
);
1824 bool subtract
= (TREE_CODE (expr
) == MINUS_EXPR
);
1826 /* We're only interested in pointer arithmetic. */
1827 if (!POINTER_TYPE_P (ptr_type
))
1829 /* Canonicalize the integral operand to op1. */
1830 if (INTEGRAL_TYPE_P (TREE_TYPE (op0
)))
1834 t
= op0
, op0
= op1
, op1
= t
;
1836 /* It had better be a constant. */
1837 if (TREE_CODE (op1
) != INTEGER_CST
)
1839 /* The first operand should be an ADDR_EXPR. */
1840 if (TREE_CODE (op0
) != ADDR_EXPR
)
1842 op0
= TREE_OPERAND (op0
, 0);
1844 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1845 the offset into it. */
1846 while (TREE_CODE (op0
) == ARRAY_REF
)
1848 tree array_obj
= TREE_OPERAND (op0
, 0);
1849 tree array_idx
= TREE_OPERAND (op0
, 1);
1850 tree elt_type
= TREE_TYPE (op0
);
1851 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
1854 if (TREE_CODE (array_idx
) != INTEGER_CST
)
1856 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1859 /* Un-bias the index by the min index of the array type. */
1860 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
1863 min_idx
= TYPE_MIN_VALUE (min_idx
);
1866 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1869 array_idx
= convert (TREE_TYPE (min_idx
), array_idx
);
1870 if (!integer_zerop (min_idx
))
1871 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
1876 /* Convert the index to a byte offset. */
1877 array_idx
= convert (sizetype
, array_idx
);
1878 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
1880 /* Update the operands for the next round, or for folding. */
1881 /* If we're manipulating unsigned types, then folding into negative
1882 values can produce incorrect results. Particularly if the type
1883 is smaller than the width of the pointer. */
1885 && TYPE_UNSIGNED (TREE_TYPE (op1
))
1886 && tree_int_cst_lt (array_idx
, op1
))
1888 op1
= int_const_binop (subtract
? MINUS_EXPR
: PLUS_EXPR
,
1894 /* If we weren't able to fold the subtraction into another array reference,
1895 canonicalize the integer for passing to the array and component ref
1896 simplification functions. */
1899 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
1901 op1
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (op1
), op1
);
1902 /* ??? In theory fold should always produce another integer. */
1903 if (TREE_CODE (op1
) != INTEGER_CST
)
1907 ptd_type
= TREE_TYPE (ptr_type
);
1909 /* At which point we can try some of the same things as for indirects. */
1910 t
= maybe_fold_offset_to_array_ref (op0
, op1
, ptd_type
);
1912 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (op0
), op0
, op1
,
1915 t
= build1 (ADDR_EXPR
, ptr_type
, t
);
1920 /* Subroutine of fold_stmt called via walk_tree. We perform several
1921 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1924 fold_stmt_r (tree
*expr_p
, int *walk_subtrees
, void *data
)
1926 bool *changed_p
= data
;
1927 tree expr
= *expr_p
, t
;
1929 /* ??? It'd be nice if walk_tree had a pre-order option. */
1930 switch (TREE_CODE (expr
))
1933 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1938 t
= maybe_fold_stmt_indirect (expr
, TREE_OPERAND (expr
, 0),
1942 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1943 We'd only want to bother decomposing an existing ARRAY_REF if
1944 the base array is found to have another offset contained within.
1945 Otherwise we'd be wasting time. */
1948 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1953 /* Set TREE_INVARIANT properly so that the value is properly
1954 considered constant, and so gets propagated as expected. */
1956 recompute_tree_invarant_for_addr_expr (expr
);
1961 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1964 t
= walk_tree (&TREE_OPERAND (expr
, 1), fold_stmt_r
, data
, NULL
);
1969 t
= maybe_fold_stmt_addition (expr
);
1973 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1978 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
1979 We've already checked that the records are compatible, so we should
1980 come up with a set of compatible fields. */
1982 tree expr_record
= TREE_TYPE (TREE_OPERAND (expr
, 0));
1983 tree expr_field
= TREE_OPERAND (expr
, 1);
1985 if (DECL_FIELD_CONTEXT (expr_field
) != TYPE_MAIN_VARIANT (expr_record
))
1987 expr_field
= find_compatible_field (expr_record
, expr_field
);
1988 TREE_OPERAND (expr
, 1) = expr_field
;
1993 case TARGET_MEM_REF
:
1994 t
= maybe_fold_tmr (expr
);
2011 /* Return the string length, maximum string length or maximum value of
2013 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2014 is not NULL and, for TYPE == 0, its value is not equal to the length
2015 we determine or if we are unable to determine the length or value,
2016 return false. VISITED is a bitmap of visited variables.
2017 TYPE is 0 if string length should be returned, 1 for maximum string
2018 length and 2 for maximum value ARG can have. */
2021 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
2023 tree var
, def_stmt
, val
;
2025 if (TREE_CODE (arg
) != SSA_NAME
)
2030 if (TREE_CODE (val
) != INTEGER_CST
2031 || tree_int_cst_sgn (val
) < 0)
2035 val
= c_strlen (arg
, 1);
2043 if (TREE_CODE (*length
) != INTEGER_CST
2044 || TREE_CODE (val
) != INTEGER_CST
)
2047 if (tree_int_cst_lt (*length
, val
))
2051 else if (simple_cst_equal (val
, *length
) != 1)
2059 /* If we were already here, break the infinite cycle. */
2060 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
2062 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
2065 def_stmt
= SSA_NAME_DEF_STMT (var
);
2067 switch (TREE_CODE (def_stmt
))
2073 /* The RHS of the statement defining VAR must either have a
2074 constant length or come from another SSA_NAME with a constant
2076 rhs
= TREE_OPERAND (def_stmt
, 1);
2078 return get_maxval_strlen (rhs
, length
, visited
, type
);
2083 /* All the arguments of the PHI node must have the same constant
2087 for (i
= 0; i
< PHI_NUM_ARGS (def_stmt
); i
++)
2089 tree arg
= PHI_ARG_DEF (def_stmt
, i
);
2091 /* If this PHI has itself as an argument, we cannot
2092 determine the string length of this argument. However,
2093 if we can find a constant string length for the other
2094 PHI args then we can still be sure that this is a
2095 constant string length. So be optimistic and just
2096 continue with the next argument. */
2097 if (arg
== PHI_RESULT (def_stmt
))
2100 if (!get_maxval_strlen (arg
, length
, visited
, type
))
2116 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
2117 constant, return NULL_TREE. Otherwise, return its constant value. */
2120 ccp_fold_builtin (tree stmt
, tree fn
)
2122 tree result
, val
[3];
2123 tree callee
, arglist
, a
;
2124 int arg_mask
, i
, type
;
2128 ignore
= TREE_CODE (stmt
) != MODIFY_EXPR
;
2130 /* First try the generic builtin folder. If that succeeds, return the
2132 callee
= get_callee_fndecl (fn
);
2133 arglist
= TREE_OPERAND (fn
, 1);
2134 result
= fold_builtin (callee
, arglist
, ignore
);
2138 STRIP_NOPS (result
);
2142 /* Ignore MD builtins. */
2143 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
2146 /* If the builtin could not be folded, and it has no argument list,
2151 /* Limit the work only for builtins we know how to simplify. */
2152 switch (DECL_FUNCTION_CODE (callee
))
2154 case BUILT_IN_STRLEN
:
2155 case BUILT_IN_FPUTS
:
2156 case BUILT_IN_FPUTS_UNLOCKED
:
2160 case BUILT_IN_STRCPY
:
2161 case BUILT_IN_STRNCPY
:
2165 case BUILT_IN_MEMCPY_CHK
:
2166 case BUILT_IN_MEMPCPY_CHK
:
2167 case BUILT_IN_MEMMOVE_CHK
:
2168 case BUILT_IN_MEMSET_CHK
:
2169 case BUILT_IN_STRNCPY_CHK
:
2173 case BUILT_IN_STRCPY_CHK
:
2174 case BUILT_IN_STPCPY_CHK
:
2178 case BUILT_IN_SNPRINTF_CHK
:
2179 case BUILT_IN_VSNPRINTF_CHK
:
2187 /* Try to use the dataflow information gathered by the CCP process. */
2188 visited
= BITMAP_ALLOC (NULL
);
2190 memset (val
, 0, sizeof (val
));
2191 for (i
= 0, a
= arglist
;
2193 i
++, arg_mask
>>= 1, a
= TREE_CHAIN (a
))
2196 bitmap_clear (visited
);
2197 if (!get_maxval_strlen (TREE_VALUE (a
), &val
[i
], visited
, type
))
2201 BITMAP_FREE (visited
);
2204 switch (DECL_FUNCTION_CODE (callee
))
2206 case BUILT_IN_STRLEN
:
2209 tree
new = fold_convert (TREE_TYPE (fn
), val
[0]);
2211 /* If the result is not a valid gimple value, or not a cast
2212 of a valid gimple value, then we can not use the result. */
2213 if (is_gimple_val (new)
2214 || (is_gimple_cast (new)
2215 && is_gimple_val (TREE_OPERAND (new, 0))))
2220 case BUILT_IN_STRCPY
:
2221 if (val
[1] && is_gimple_val (val
[1]))
2222 result
= fold_builtin_strcpy (callee
, arglist
, val
[1]);
2225 case BUILT_IN_STRNCPY
:
2226 if (val
[1] && is_gimple_val (val
[1]))
2227 result
= fold_builtin_strncpy (callee
, arglist
, val
[1]);
2230 case BUILT_IN_FPUTS
:
2231 result
= fold_builtin_fputs (arglist
,
2232 TREE_CODE (stmt
) != MODIFY_EXPR
, 0,
2236 case BUILT_IN_FPUTS_UNLOCKED
:
2237 result
= fold_builtin_fputs (arglist
,
2238 TREE_CODE (stmt
) != MODIFY_EXPR
, 1,
2242 case BUILT_IN_MEMCPY_CHK
:
2243 case BUILT_IN_MEMPCPY_CHK
:
2244 case BUILT_IN_MEMMOVE_CHK
:
2245 case BUILT_IN_MEMSET_CHK
:
2246 if (val
[2] && is_gimple_val (val
[2]))
2247 result
= fold_builtin_memory_chk (callee
, arglist
, val
[2], ignore
,
2248 DECL_FUNCTION_CODE (callee
));
2251 case BUILT_IN_STRCPY_CHK
:
2252 case BUILT_IN_STPCPY_CHK
:
2253 if (val
[1] && is_gimple_val (val
[1]))
2254 result
= fold_builtin_stxcpy_chk (callee
, arglist
, val
[1], ignore
,
2255 DECL_FUNCTION_CODE (callee
));
2258 case BUILT_IN_STRNCPY_CHK
:
2259 if (val
[2] && is_gimple_val (val
[2]))
2260 result
= fold_builtin_strncpy_chk (arglist
, val
[2]);
2263 case BUILT_IN_SNPRINTF_CHK
:
2264 case BUILT_IN_VSNPRINTF_CHK
:
2265 if (val
[1] && is_gimple_val (val
[1]))
2266 result
= fold_builtin_snprintf_chk (arglist
, val
[1],
2267 DECL_FUNCTION_CODE (callee
));
2274 if (result
&& ignore
)
2275 result
= fold_ignored_result (result
);
2280 /* Fold the statement pointed to by STMT_P. In some cases, this function may
2281 replace the whole statement with a new one. Returns true iff folding
2282 makes any changes. */
2285 fold_stmt (tree
*stmt_p
)
2287 tree rhs
, result
, stmt
;
2288 bool changed
= false;
2292 /* If we replaced constants and the statement makes pointer dereferences,
2293 then we may need to fold instances of *&VAR into VAR, etc. */
2294 if (walk_tree (stmt_p
, fold_stmt_r
, &changed
, NULL
))
2297 = build_function_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
],
2302 rhs
= get_rhs (stmt
);
2307 if (TREE_CODE (rhs
) == CALL_EXPR
)
2311 /* Check for builtins that CCP can handle using information not
2312 available in the generic fold routines. */
2313 callee
= get_callee_fndecl (rhs
);
2314 if (callee
&& DECL_BUILT_IN (callee
))
2315 result
= ccp_fold_builtin (stmt
, rhs
);
2318 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2319 here are when we've propagated the address of a decl into the
2321 /* ??? Should perhaps do this in fold proper. However, doing it
2322 there requires that we create a new CALL_EXPR, and that requires
2323 copying EH region info to the new node. Easier to just do it
2324 here where we can just smash the call operand. */
2325 callee
= TREE_OPERAND (rhs
, 0);
2326 if (TREE_CODE (callee
) == OBJ_TYPE_REF
2327 && lang_hooks
.fold_obj_type_ref
2328 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
2329 && DECL_P (TREE_OPERAND
2330 (OBJ_TYPE_REF_OBJECT (callee
), 0)))
2334 /* ??? Caution: Broken ADDR_EXPR semantics means that
2335 looking at the type of the operand of the addr_expr
2336 can yield an array type. See silly exception in
2337 check_pointer_types_r. */
2339 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
2340 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
2343 TREE_OPERAND (rhs
, 0) = t
;
2350 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2351 if (result
== NULL_TREE
)
2352 result
= fold (rhs
);
2354 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2355 may have been added by fold, and "useless" type conversions that might
2356 now be apparent due to propagation. */
2357 STRIP_USELESS_TYPE_CONVERSION (result
);
2360 changed
|= set_rhs (stmt_p
, result
);
2365 /* Perform the minimal folding on statement STMT. Only operations like
2366 *&x created by constant propagation are handled. The statement cannot
2367 be replaced with a new one. */
2370 fold_stmt_inplace (tree stmt
)
2372 tree old_stmt
= stmt
, rhs
, new_rhs
;
2373 bool changed
= false;
2375 walk_tree (&stmt
, fold_stmt_r
, &changed
, NULL
);
2376 gcc_assert (stmt
== old_stmt
);
2378 rhs
= get_rhs (stmt
);
2379 if (!rhs
|| rhs
== stmt
)
2382 new_rhs
= fold (rhs
);
2383 STRIP_USELESS_TYPE_CONVERSION (new_rhs
);
2387 changed
|= set_rhs (&stmt
, new_rhs
);
2388 gcc_assert (stmt
== old_stmt
);
2393 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2394 RHS of an assignment. Insert the necessary statements before
2398 convert_to_gimple_builtin (block_stmt_iterator
*si_p
, tree expr
)
2400 tree_stmt_iterator ti
;
2401 tree stmt
= bsi_stmt (*si_p
);
2402 tree tmp
, stmts
= NULL
;
2404 push_gimplify_context ();
2405 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
2406 pop_gimplify_context (NULL
);
2408 if (EXPR_HAS_LOCATION (stmt
))
2409 annotate_all_with_locus (&stmts
, EXPR_LOCATION (stmt
));
2411 /* The replacement can expose previously unreferenced variables. */
2412 for (ti
= tsi_start (stmts
); !tsi_end_p (ti
); tsi_next (&ti
))
2414 tree new_stmt
= tsi_stmt (ti
);
2415 find_new_referenced_vars (tsi_stmt_ptr (ti
));
2416 bsi_insert_before (si_p
, new_stmt
, BSI_NEW_STMT
);
2417 mark_new_vars_to_rename (bsi_stmt (*si_p
));
2425 /* A simple pass that attempts to fold all builtin functions. This pass
2426 is run after we've propagated as many constants as we can. */
2429 execute_fold_all_builtins (void)
2431 bool cfg_changed
= false;
2435 block_stmt_iterator i
;
2436 for (i
= bsi_start (bb
); !bsi_end_p (i
); )
2438 tree
*stmtp
= bsi_stmt_ptr (i
);
2439 tree old_stmt
= *stmtp
;
2440 tree call
= get_rhs (*stmtp
);
2441 tree callee
, result
;
2442 enum built_in_function fcode
;
2444 if (!call
|| TREE_CODE (call
) != CALL_EXPR
)
2449 callee
= get_callee_fndecl (call
);
2450 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2455 fcode
= DECL_FUNCTION_CODE (callee
);
2457 result
= ccp_fold_builtin (*stmtp
, call
);
2459 switch (DECL_FUNCTION_CODE (callee
))
2461 case BUILT_IN_CONSTANT_P
:
2462 /* Resolve __builtin_constant_p. If it hasn't been
2463 folded to integer_one_node by now, it's fairly
2464 certain that the value simply isn't constant. */
2465 result
= integer_zero_node
;
2473 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2475 fprintf (dump_file
, "Simplified\n ");
2476 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2479 if (!set_rhs (stmtp
, result
))
2481 result
= convert_to_gimple_builtin (&i
, result
);
2484 bool ok
= set_rhs (stmtp
, result
);
2489 mark_new_vars_to_rename (*stmtp
);
2490 if (maybe_clean_or_replace_eh_stmt (old_stmt
, *stmtp
)
2491 && tree_purge_dead_eh_edges (bb
))
2494 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2496 fprintf (dump_file
, "to\n ");
2497 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2498 fprintf (dump_file
, "\n");
2501 /* Retry the same statement if it changed into another
2502 builtin, there might be new opportunities now. */
2503 call
= get_rhs (*stmtp
);
2504 if (!call
|| TREE_CODE (call
) != CALL_EXPR
)
2509 callee
= get_callee_fndecl (call
);
2511 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2512 || DECL_FUNCTION_CODE (callee
) == fcode
)
2517 /* Delete unreachable blocks. */
2519 cleanup_tree_cfg ();
2523 struct tree_opt_pass pass_fold_builtins
=
2527 execute_fold_all_builtins
, /* execute */
2530 0, /* static_pass_number */
2532 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
2533 0, /* properties_provided */
2534 0, /* properties_destroyed */
2535 0, /* todo_flags_start */
2538 | TODO_update_ssa
, /* todo_flags_finish */