1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 /* Conditional constant propagation (CCP) is based on the SSA
25 propagation engine (tree-ssa-propagate.c). Constant assignments of
26 the form VAR = CST are propagated from the assignments into uses of
27 VAR, which in turn may generate new constants. The simulation uses
28 a four level lattice to keep track of constant values associated
29 with SSA names. Given an SSA name V_i, it may take one of the
32 UNINITIALIZED -> This is the default starting value. V_i
33 has not been processed yet.
35 UNDEFINED -> V_i is a local variable whose definition
36 has not been processed yet. Therefore we
37 don't yet know if its value is a constant
40 CONSTANT -> V_i has been found to hold a constant
43 VARYING -> V_i cannot take a constant value, or if it
44 does, it is not possible to determine it
47 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
49 1- In ccp_visit_stmt, we are interested in assignments whose RHS
50 evaluates into a constant and conditional jumps whose predicate
51 evaluates into a boolean true or false. When an assignment of
52 the form V_i = CONST is found, V_i's lattice value is set to
53 CONSTANT and CONST is associated with it. This causes the
54 propagation engine to add all the SSA edges coming out the
55 assignment into the worklists, so that statements that use V_i
58 If the statement is a conditional with a constant predicate, we
59 mark the outgoing edges as executable or not executable
60 depending on the predicate's value. This is then used when
61 visiting PHI nodes to know when a PHI argument can be ignored.
64 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
65 same constant C, then the LHS of the PHI is set to C. This
66 evaluation is known as the "meet operation". Since one of the
67 goals of this evaluation is to optimistically return constant
68 values as often as possible, it uses two main short cuts:
70 - If an argument is flowing in through a non-executable edge, it
71 is ignored. This is useful in cases like this:
77 a_11 = PHI (a_9, a_10)
79 If PRED is known to always evaluate to false, then we can
80 assume that a_11 will always take its value from a_10, meaning
81 that instead of consider it VARYING (a_9 and a_10 have
82 different values), we can consider it CONSTANT 100.
84 - If an argument has an UNDEFINED value, then it does not affect
85 the outcome of the meet operation. If a variable V_i has an
86 UNDEFINED value, it means that either its defining statement
87 hasn't been visited yet or V_i has no defining statement, in
88 which case the original symbol 'V' is being used
89 uninitialized. Since 'V' is a local variable, the compiler
90 may assume any initial value for it.
93 After propagation, every variable V_i that ends up with a lattice
94 value of CONSTANT will have the associated constant value in the
95 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
96 final substitution and folding.
99 Constant propagation in stores and loads (STORE-CCP)
100 ----------------------------------------------------
102 While CCP has all the logic to propagate constants in GIMPLE
103 registers, it is missing the ability to associate constants with
104 stores and loads (i.e., pointer dereferences, structures and
105 global/aliased variables). We don't keep loads and stores in
106 SSA, but we do build a factored use-def web for them (in the
109 For instance, consider the following code fragment:
128 We should be able to deduce that the predicate 'a.a != B' is always
129 false. To achieve this, we associate constant values to the SSA
130 names in the V_MAY_DEF and V_MUST_DEF operands for each store.
131 Additionally, since we also glob partial loads/stores with the base
132 symbol, we also keep track of the memory reference where the
133 constant value was stored (in the MEM_REF field of PROP_VALUE_T).
136 # a_5 = V_MAY_DEF <a_4>
142 In the example above, CCP will associate value '2' with 'a_5', but
143 it would be wrong to replace the load from 'a.b' with '2', because
144 '2' had been stored into a.a.
146 To support STORE-CCP, it is necessary to add a new value to the
147 constant propagation lattice. When evaluating a load for a memory
148 reference we can no longer assume a value of UNDEFINED if we
149 haven't seen a preceding store to the same memory location.
150 Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'. Therefore,
169 when doing STORE-CCP, we introduce a fifth lattice value
170 (UNKNOWN_VAL), which overrides any other value when computing the
171 meet operation in PHI nodes.
173 Though STORE-CCP is not too expensive, it does have to do more work
174 than regular CCP, so it is only enabled at -O2. Both regular CCP
175 and STORE-CCP use the exact same algorithm. The only distinction
176 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
177 set to true. This affects the evaluation of statements and PHI
182 Constant propagation with conditional branches,
183 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
185 Building an Optimizing Compiler,
186 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
188 Advanced Compiler Design and Implementation,
189 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
193 #include "coretypes.h"
200 #include "basic-block.h"
203 #include "function.h"
204 #include "diagnostic.h"
206 #include "tree-dump.h"
207 #include "tree-flow.h"
208 #include "tree-pass.h"
209 #include "tree-ssa-propagate.h"
210 #include "langhooks.h"
214 /* Possible lattice values. */
224 /* Array of propagated constant values. After propagation,
225 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
226 the constant is held in an SSA name representing a memory store
227 (i.e., a V_MAY_DEF or V_MUST_DEF), CONST_VAL[I].MEM_REF will
228 contain the actual memory reference used to store (i.e., the LHS of
229 the assignment doing the store). */
230 static prop_value_t
*const_val
;
232 /* True if we are also propagating constants in stores and loads. */
233 static bool do_store_ccp
;
235 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
238 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
240 switch (val
.lattice_val
)
243 fprintf (outf
, "%sUNINITIALIZED", prefix
);
246 fprintf (outf
, "%sUNDEFINED", prefix
);
249 fprintf (outf
, "%sVARYING", prefix
);
252 fprintf (outf
, "%sUNKNOWN_VAL", prefix
);
255 fprintf (outf
, "%sCONSTANT ", prefix
);
256 print_generic_expr (outf
, val
.value
, dump_flags
);
264 /* Print lattice value VAL to stderr. */
266 void debug_lattice_value (prop_value_t val
);
269 debug_lattice_value (prop_value_t val
)
271 dump_lattice_value (stderr
, "", val
);
272 fprintf (stderr
, "\n");
276 /* Compute a default value for variable VAR and store it in the
277 CONST_VAL array. The following rules are used to get default
280 1- Global and static variables that are declared constant are
283 2- Any other value is considered UNDEFINED. This is useful when
284 considering PHI nodes. PHI arguments that are undefined do not
285 change the constant value of the PHI node, which allows for more
286 constants to be propagated.
288 3- If SSA_NAME_VALUE is set and it is a constant, its value is
291 4- Variables defined by statements other than assignments and PHI
292 nodes are considered VARYING.
294 5- Variables that are not GIMPLE registers are considered
295 UNKNOWN_VAL, which is really a stronger version of UNDEFINED.
296 It's used to avoid the short circuit evaluation implied by
297 UNDEFINED in ccp_lattice_meet. */
300 get_default_value (tree var
)
302 tree sym
= SSA_NAME_VAR (var
);
303 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
, NULL_TREE
};
305 if (!do_store_ccp
&& !is_gimple_reg (var
))
307 /* Short circuit for regular CCP. We are not interested in any
308 non-register when DO_STORE_CCP is false. */
309 val
.lattice_val
= VARYING
;
311 else if (SSA_NAME_VALUE (var
)
312 && is_gimple_min_invariant (SSA_NAME_VALUE (var
)))
314 val
.lattice_val
= CONSTANT
;
315 val
.value
= SSA_NAME_VALUE (var
);
317 else if (TREE_STATIC (sym
)
318 && TREE_READONLY (sym
)
319 && DECL_INITIAL (sym
)
320 && is_gimple_min_invariant (DECL_INITIAL (sym
)))
322 /* Globals and static variables declared 'const' take their
324 val
.lattice_val
= CONSTANT
;
325 val
.value
= DECL_INITIAL (sym
);
330 tree stmt
= SSA_NAME_DEF_STMT (var
);
332 if (IS_EMPTY_STMT (stmt
))
334 /* Variables defined by an empty statement are those used
335 before being initialized. If VAR is a local variable, we
336 can assume initially that it is UNDEFINED. If we are
337 doing STORE-CCP, function arguments and non-register
338 variables are initially UNKNOWN_VAL, because we cannot
339 discard the value incoming from outside of this function
340 (see ccp_lattice_meet for details). */
341 if (is_gimple_reg (sym
) && TREE_CODE (sym
) != PARM_DECL
)
342 val
.lattice_val
= UNDEFINED
;
343 else if (do_store_ccp
)
344 val
.lattice_val
= UNKNOWN_VAL
;
346 val
.lattice_val
= VARYING
;
348 else if (TREE_CODE (stmt
) == MODIFY_EXPR
349 || TREE_CODE (stmt
) == PHI_NODE
)
351 /* Any other variable defined by an assignment or a PHI node
352 is considered UNDEFINED (or UNKNOWN_VAL if VAR is not a
354 val
.lattice_val
= is_gimple_reg (sym
) ? UNDEFINED
: UNKNOWN_VAL
;
358 /* Otherwise, VAR will never take on a constant value. */
359 val
.lattice_val
= VARYING
;
367 /* Get the constant value associated with variable VAR. If
368 MAY_USE_DEFAULT_P is true, call get_default_value on variables that
369 have the lattice value UNINITIALIZED. */
371 static prop_value_t
*
372 get_value (tree var
, bool may_use_default_p
)
374 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
375 if (may_use_default_p
&& val
->lattice_val
== UNINITIALIZED
)
376 *val
= get_default_value (var
);
382 /* Set the value for variable VAR to NEW_VAL. Return true if the new
383 value is different from VAR's previous value. */
386 set_lattice_value (tree var
, prop_value_t new_val
)
388 prop_value_t
*old_val
= get_value (var
, false);
390 /* Lattice transitions must always be monotonically increasing in
391 value. We allow two exceptions:
393 1- If *OLD_VAL and NEW_VAL are the same, return false to
394 inform the caller that this was a non-transition.
396 2- If we are doing store-ccp (i.e., DOING_STORE_CCP is true),
397 allow CONSTANT->UNKNOWN_VAL. The UNKNOWN_VAL state is a
398 special type of UNDEFINED state which prevents the short
399 circuit evaluation of PHI arguments (see ccp_visit_phi_node
400 and ccp_lattice_meet). */
401 gcc_assert (old_val
->lattice_val
<= new_val
.lattice_val
402 || (old_val
->lattice_val
== new_val
.lattice_val
403 && old_val
->value
== new_val
.value
404 && old_val
->mem_ref
== new_val
.mem_ref
)
406 && old_val
->lattice_val
== CONSTANT
407 && new_val
.lattice_val
== UNKNOWN_VAL
));
409 if (old_val
->lattice_val
!= new_val
.lattice_val
)
411 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
413 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
414 fprintf (dump_file
, ". %sdding SSA edges to worklist.\n",
415 new_val
.lattice_val
!= UNDEFINED
? "A" : "Not a");
420 /* Transitions UNINITIALIZED -> UNDEFINED are never interesting
421 for propagation purposes. In these cases return false to
422 avoid doing useless work. */
423 return (new_val
.lattice_val
!= UNDEFINED
);
430 /* Return the likely CCP lattice value for STMT.
432 If STMT has no operands, then return CONSTANT.
434 Else if any operands of STMT are undefined, then return UNDEFINED.
436 Else if any operands of STMT are constants, then return CONSTANT.
438 Else return VARYING. */
441 likely_value (tree stmt
)
448 ann
= stmt_ann (stmt
);
450 /* If the statement has volatile operands, it won't fold to a
452 if (ann
->has_volatile_ops
)
455 /* If we are not doing store-ccp, statements with loads
456 and/or stores will never fold into a constant. */
458 && (ann
->makes_aliased_stores
459 || ann
->makes_aliased_loads
460 || !ZERO_SSA_OPERANDS (stmt
, SSA_OP_ALL_VIRTUALS
)))
464 /* A CALL_EXPR is assumed to be varying. NOTE: This may be overly
465 conservative, in the presence of const and pure calls. */
466 if (get_call_expr_in (stmt
) != NULL_TREE
)
469 /* Anything other than assignments and conditional jumps are not
470 interesting for CCP. */
471 if (TREE_CODE (stmt
) != MODIFY_EXPR
472 && TREE_CODE (stmt
) != COND_EXPR
473 && TREE_CODE (stmt
) != SWITCH_EXPR
)
476 if (is_gimple_min_invariant (get_rhs (stmt
)))
479 found_constant
= false;
480 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
482 prop_value_t
*val
= get_value (use
, true);
484 if (val
->lattice_val
== VARYING
)
487 if (val
->lattice_val
== UNKNOWN_VAL
)
489 /* UNKNOWN_VAL is invalid when not doing STORE-CCP. */
490 gcc_assert (do_store_ccp
);
494 if (val
->lattice_val
== CONSTANT
)
495 found_constant
= true;
499 || ZERO_SSA_OPERANDS (stmt
, SSA_OP_USE
)
500 || ZERO_SSA_OPERANDS (stmt
, SSA_OP_VUSE
))
507 /* Initialize local data structures for CCP. */
510 ccp_initialize (void)
514 const_val
= xmalloc (num_ssa_names
* sizeof (*const_val
));
515 memset (const_val
, 0, num_ssa_names
* sizeof (*const_val
));
517 /* Initialize simulation flags for PHI nodes and statements. */
520 block_stmt_iterator i
;
522 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
524 bool is_varying
= false;
525 tree stmt
= bsi_stmt (i
);
527 if (likely_value (stmt
) == VARYING
)
533 /* If the statement will not produce a constant, mark
534 all its outputs VARYING. */
535 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
536 get_value (def
, false)->lattice_val
= VARYING
;
538 /* Never mark conditional jumps with DONT_SIMULATE_AGAIN,
539 otherwise the propagator will never add the outgoing
541 if (TREE_CODE (stmt
) != COND_EXPR
542 && TREE_CODE (stmt
) != SWITCH_EXPR
)
546 DONT_SIMULATE_AGAIN (stmt
) = is_varying
;
550 /* Now process PHI nodes. */
555 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
559 prop_value_t
*val
= get_value (PHI_RESULT (phi
), false);
561 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
563 arg
= PHI_ARG_DEF (phi
, i
);
565 if (TREE_CODE (arg
) == SSA_NAME
566 && get_value (arg
, false)->lattice_val
== VARYING
)
568 val
->lattice_val
= VARYING
;
573 DONT_SIMULATE_AGAIN (phi
) = (val
->lattice_val
== VARYING
);
579 /* Do final substitution of propagated values, cleanup the flowgraph and
580 free allocated storage. */
585 /* Perform substitutions based on the known constant values. */
586 substitute_and_fold (const_val
, false);
592 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
595 any M UNDEFINED = any
596 any M UNKNOWN_VAL = UNKNOWN_VAL
597 any M VARYING = VARYING
598 Ci M Cj = Ci if (i == j)
599 Ci M Cj = VARYING if (i != j)
601 Lattice values UNKNOWN_VAL and UNDEFINED are similar but have
602 different semantics at PHI nodes. Both values imply that we don't
603 know whether the variable is constant or not. However, UNKNOWN_VAL
604 values override all others. For instance, suppose that A is a
614 | A_3 = PHI (A_2, A_1)
619 If the edge into A_2 is not executable, the first visit to A_3 will
620 yield the constant 4. But the second visit to A_3 will be with A_2
621 in state UNKNOWN_VAL. We can no longer conclude that A_3 is 4
622 because A_2 may have been set in another function. If we had used
623 the lattice value UNDEFINED, we would have had wrongly concluded
628 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
630 if (val1
->lattice_val
== UNDEFINED
)
632 /* UNDEFINED M any = any */
635 else if (val2
->lattice_val
== UNDEFINED
)
637 /* any M UNDEFINED = any
638 Nothing to do. VAL1 already contains the value we want. */
641 else if (val1
->lattice_val
== UNKNOWN_VAL
642 || val2
->lattice_val
== UNKNOWN_VAL
)
644 /* UNKNOWN_VAL values are invalid if we are not doing STORE-CCP. */
645 gcc_assert (do_store_ccp
);
647 /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
648 val1
->lattice_val
= UNKNOWN_VAL
;
649 val1
->value
= NULL_TREE
;
650 val1
->mem_ref
= NULL_TREE
;
652 else if (val1
->lattice_val
== VARYING
653 || val2
->lattice_val
== VARYING
)
655 /* any M VARYING = VARYING. */
656 val1
->lattice_val
= VARYING
;
657 val1
->value
= NULL_TREE
;
658 val1
->mem_ref
= NULL_TREE
;
660 else if (val1
->lattice_val
== CONSTANT
661 && val2
->lattice_val
== CONSTANT
662 && simple_cst_equal (val1
->value
, val2
->value
) == 1
664 || (val1
->mem_ref
&& val2
->mem_ref
665 && operand_equal_p (val1
->mem_ref
, val2
->mem_ref
, 0))))
667 /* Ci M Cj = Ci if (i == j)
668 Ci M Cj = VARYING if (i != j)
670 If these two values come from memory stores, make sure that
671 they come from the same memory reference. */
672 val1
->lattice_val
= CONSTANT
;
673 val1
->value
= val1
->value
;
674 val1
->mem_ref
= val1
->mem_ref
;
678 /* Any other combination is VARYING. */
679 val1
->lattice_val
= VARYING
;
680 val1
->value
= NULL_TREE
;
681 val1
->mem_ref
= NULL_TREE
;
686 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
687 lattice values to determine PHI_NODE's lattice value. The value of a
688 PHI node is determined calling ccp_lattice_meet with all the arguments
689 of the PHI node that are incoming via executable edges. */
691 static enum ssa_prop_result
692 ccp_visit_phi_node (tree phi
)
695 prop_value_t
*old_val
, new_val
;
697 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
699 fprintf (dump_file
, "\nVisiting PHI node: ");
700 print_generic_expr (dump_file
, phi
, dump_flags
);
703 old_val
= get_value (PHI_RESULT (phi
), false);
704 switch (old_val
->lattice_val
)
707 return SSA_PROP_VARYING
;
714 /* To avoid the default value of UNKNOWN_VAL overriding
715 that of its possible constant arguments, temporarily
716 set the PHI node's default lattice value to be
717 UNDEFINED. If the PHI node's old value was UNKNOWN_VAL and
718 the new value is UNDEFINED, then we prevent the invalid
719 transition by not calling set_lattice_value. */
720 gcc_assert (do_store_ccp
);
726 new_val
.lattice_val
= UNDEFINED
;
727 new_val
.value
= NULL_TREE
;
728 new_val
.mem_ref
= NULL_TREE
;
735 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
737 /* Compute the meet operator over all the PHI arguments flowing
738 through executable edges. */
739 edge e
= PHI_ARG_EDGE (phi
, i
);
741 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
744 "\n Argument #%d (%d -> %d %sexecutable)\n",
745 i
, e
->src
->index
, e
->dest
->index
,
746 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
749 /* If the incoming edge is executable, Compute the meet operator for
750 the existing value of the PHI node and the current PHI argument. */
751 if (e
->flags
& EDGE_EXECUTABLE
)
753 tree arg
= PHI_ARG_DEF (phi
, i
);
754 prop_value_t arg_val
;
756 if (is_gimple_min_invariant (arg
))
758 arg_val
.lattice_val
= CONSTANT
;
760 arg_val
.mem_ref
= NULL_TREE
;
763 arg_val
= *(get_value (arg
, true));
765 ccp_lattice_meet (&new_val
, &arg_val
);
767 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
769 fprintf (dump_file
, "\t");
770 print_generic_expr (dump_file
, arg
, dump_flags
);
771 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
772 fprintf (dump_file
, "\n");
775 if (new_val
.lattice_val
== VARYING
)
780 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
782 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
783 fprintf (dump_file
, "\n\n");
786 /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
788 && old_val
->lattice_val
== UNKNOWN_VAL
789 && new_val
.lattice_val
== UNDEFINED
)
790 return SSA_PROP_NOT_INTERESTING
;
792 /* Otherwise, make the transition to the new value. */
793 if (set_lattice_value (PHI_RESULT (phi
), new_val
))
795 if (new_val
.lattice_val
== VARYING
)
796 return SSA_PROP_VARYING
;
798 return SSA_PROP_INTERESTING
;
801 return SSA_PROP_NOT_INTERESTING
;
805 /* CCP specific front-end to the non-destructive constant folding
808 Attempt to simplify the RHS of STMT knowing that one or more
809 operands are constants.
811 If simplification is possible, return the simplified RHS,
812 otherwise return the original RHS. */
817 tree rhs
= get_rhs (stmt
);
818 enum tree_code code
= TREE_CODE (rhs
);
819 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
820 tree retval
= NULL_TREE
;
822 if (TREE_CODE (rhs
) == SSA_NAME
)
824 /* If the RHS is an SSA_NAME, return its known constant value,
826 return get_value (rhs
, true)->value
;
828 else if (do_store_ccp
&& stmt_makes_single_load (stmt
))
830 /* If the RHS is a memory load, see if the VUSEs associated with
831 it are a valid constant for that memory load. */
832 prop_value_t
*val
= get_value_loaded_by (stmt
, const_val
);
833 if (val
&& val
->mem_ref
834 && operand_equal_p (val
->mem_ref
, rhs
, 0))
840 /* Unary operators. Note that we know the single operand must
841 be a constant. So this should almost always return a
843 if (kind
== tcc_unary
)
845 /* Handle unary operators which can appear in GIMPLE form. */
846 tree op0
= TREE_OPERAND (rhs
, 0);
848 /* Simplify the operand down to a constant. */
849 if (TREE_CODE (op0
) == SSA_NAME
)
851 prop_value_t
*val
= get_value (op0
, true);
852 if (val
->lattice_val
== CONSTANT
)
853 op0
= get_value (op0
, true)->value
;
856 return fold_unary (code
, TREE_TYPE (rhs
), op0
);
859 /* Binary and comparison operators. We know one or both of the
860 operands are constants. */
861 else if (kind
== tcc_binary
862 || kind
== tcc_comparison
863 || code
== TRUTH_AND_EXPR
864 || code
== TRUTH_OR_EXPR
865 || code
== TRUTH_XOR_EXPR
)
867 /* Handle binary and comparison operators that can appear in
869 tree op0
= TREE_OPERAND (rhs
, 0);
870 tree op1
= TREE_OPERAND (rhs
, 1);
872 /* Simplify the operands down to constants when appropriate. */
873 if (TREE_CODE (op0
) == SSA_NAME
)
875 prop_value_t
*val
= get_value (op0
, true);
876 if (val
->lattice_val
== CONSTANT
)
880 if (TREE_CODE (op1
) == SSA_NAME
)
882 prop_value_t
*val
= get_value (op1
, true);
883 if (val
->lattice_val
== CONSTANT
)
887 return fold_binary (code
, TREE_TYPE (rhs
), op0
, op1
);
890 /* We may be able to fold away calls to builtin functions if their
891 arguments are constants. */
892 else if (code
== CALL_EXPR
893 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == ADDR_EXPR
894 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0))
896 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)))
898 if (!ZERO_SSA_OPERANDS (stmt
, SSA_OP_USE
))
901 tree fndecl
, arglist
;
906 /* Preserve the original values of every operand. */
907 orig
= xmalloc (sizeof (tree
) * NUM_SSA_OPERANDS (stmt
, SSA_OP_USE
));
908 FOR_EACH_SSA_TREE_OPERAND (var
, stmt
, iter
, SSA_OP_USE
)
911 /* Substitute operands with their values and try to fold. */
912 replace_uses_in (stmt
, NULL
, const_val
);
913 fndecl
= get_callee_fndecl (rhs
);
914 arglist
= TREE_OPERAND (rhs
, 1);
915 retval
= fold_builtin (fndecl
, arglist
, false);
917 /* Restore operands to their original form. */
919 FOR_EACH_SSA_USE_OPERAND (var_p
, stmt
, iter
, SSA_OP_USE
)
920 SET_USE (var_p
, orig
[i
++]);
927 /* If we got a simplified form, see if we need to convert its type. */
929 return fold_convert (TREE_TYPE (rhs
), retval
);
931 /* No simplification was possible. */
936 /* Return the tree representing the element referenced by T if T is an
937 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
938 NULL_TREE otherwise. */
941 fold_const_aggregate_ref (tree t
)
944 tree base
, ctor
, idx
, field
;
945 unsigned HOST_WIDE_INT cnt
;
948 switch (TREE_CODE (t
))
951 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
952 DECL_INITIAL. If BASE is a nested reference into another
953 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
954 the inner reference. */
955 base
= TREE_OPERAND (t
, 0);
956 switch (TREE_CODE (base
))
959 if (!TREE_READONLY (base
)
960 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
961 || !targetm
.binds_local_p (base
))
964 ctor
= DECL_INITIAL (base
);
969 ctor
= fold_const_aggregate_ref (base
);
976 if (ctor
== NULL_TREE
977 || TREE_CODE (ctor
) != CONSTRUCTOR
978 || !TREE_STATIC (ctor
))
981 /* Get the index. If we have an SSA_NAME, try to resolve it
982 with the current lattice value for the SSA_NAME. */
983 idx
= TREE_OPERAND (t
, 1);
984 switch (TREE_CODE (idx
))
987 if ((value
= get_value (idx
, true))
988 && value
->lattice_val
== CONSTANT
989 && TREE_CODE (value
->value
) == INTEGER_CST
)
1002 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1003 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1004 if (tree_int_cst_equal (cfield
, idx
))
1009 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1010 DECL_INITIAL. If BASE is a nested reference into another
1011 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1012 the inner reference. */
1013 base
= TREE_OPERAND (t
, 0);
1014 switch (TREE_CODE (base
))
1017 if (!TREE_READONLY (base
)
1018 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1019 || !targetm
.binds_local_p (base
))
1022 ctor
= DECL_INITIAL (base
);
1027 ctor
= fold_const_aggregate_ref (base
);
1034 if (ctor
== NULL_TREE
1035 || TREE_CODE (ctor
) != CONSTRUCTOR
1036 || !TREE_STATIC (ctor
))
1039 field
= TREE_OPERAND (t
, 1);
1041 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1043 /* FIXME: Handle bit-fields. */
1044 && ! DECL_BIT_FIELD (cfield
))
1055 /* Evaluate statement STMT. */
1058 evaluate_stmt (tree stmt
)
1062 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1064 val
.mem_ref
= NULL_TREE
;
1066 /* If the statement is likely to have a CONSTANT result, then try
1067 to fold the statement to determine the constant value. */
1068 if (likelyvalue
== CONSTANT
)
1069 simplified
= ccp_fold (stmt
);
1070 /* If the statement is likely to have a VARYING result, then do not
1071 bother folding the statement. */
1072 else if (likelyvalue
== VARYING
)
1073 simplified
= get_rhs (stmt
);
1074 /* If the statement is an ARRAY_REF or COMPONENT_REF into constant
1075 aggregates, extract the referenced constant. Otherwise the
1076 statement is likely to have an UNDEFINED value, and there will be
1077 nothing to do. Note that fold_const_aggregate_ref returns
1078 NULL_TREE if the first case does not match. */
1080 simplified
= fold_const_aggregate_ref (get_rhs (stmt
));
1082 if (simplified
&& is_gimple_min_invariant (simplified
))
1084 /* The statement produced a constant value. */
1085 val
.lattice_val
= CONSTANT
;
1086 val
.value
= simplified
;
1090 /* The statement produced a nonconstant value. If the statement
1091 had UNDEFINED operands, then the result of the statement
1092 should be UNDEFINED. Otherwise, the statement is VARYING. */
1093 if (likelyvalue
== UNDEFINED
|| likelyvalue
== UNKNOWN_VAL
)
1094 val
.lattice_val
= likelyvalue
;
1096 val
.lattice_val
= VARYING
;
1098 val
.value
= NULL_TREE
;
1105 /* Visit the assignment statement STMT. Set the value of its LHS to the
1106 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1107 creates virtual definitions, set the value of each new name to that
1108 of the RHS (if we can derive a constant out of the RHS). */
1110 static enum ssa_prop_result
1111 visit_assignment (tree stmt
, tree
*output_p
)
1115 enum ssa_prop_result retval
;
1117 lhs
= TREE_OPERAND (stmt
, 0);
1118 rhs
= TREE_OPERAND (stmt
, 1);
1120 if (TREE_CODE (rhs
) == SSA_NAME
)
1122 /* For a simple copy operation, we copy the lattice values. */
1123 prop_value_t
*nval
= get_value (rhs
, true);
1126 else if (do_store_ccp
&& stmt_makes_single_load (stmt
))
1128 /* Same as above, but the RHS is not a gimple register and yet
1129 has a known VUSE. If STMT is loading from the same memory
1130 location that created the SSA_NAMEs for the virtual operands,
1131 we can propagate the value on the RHS. */
1132 prop_value_t
*nval
= get_value_loaded_by (stmt
, const_val
);
1134 if (nval
&& nval
->mem_ref
1135 && operand_equal_p (nval
->mem_ref
, rhs
, 0))
1138 val
= evaluate_stmt (stmt
);
1141 /* Evaluate the statement. */
1142 val
= evaluate_stmt (stmt
);
1144 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
1145 value to be a VIEW_CONVERT_EXPR of the old constant value.
1147 ??? Also, if this was a definition of a bitfield, we need to widen
1148 the constant value into the type of the destination variable. This
1149 should not be necessary if GCC represented bitfields properly. */
1151 tree orig_lhs
= TREE_OPERAND (stmt
, 0);
1153 if (TREE_CODE (orig_lhs
) == VIEW_CONVERT_EXPR
1154 && val
.lattice_val
== CONSTANT
)
1156 tree w
= fold_build1 (VIEW_CONVERT_EXPR
,
1157 TREE_TYPE (TREE_OPERAND (orig_lhs
, 0)),
1160 orig_lhs
= TREE_OPERAND (orig_lhs
, 0);
1161 if (w
&& is_gimple_min_invariant (w
))
1165 val
.lattice_val
= VARYING
;
1170 if (val
.lattice_val
== CONSTANT
1171 && TREE_CODE (orig_lhs
) == COMPONENT_REF
1172 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs
, 1)))
1174 tree w
= widen_bitfield (val
.value
, TREE_OPERAND (orig_lhs
, 1),
1177 if (w
&& is_gimple_min_invariant (w
))
1181 val
.lattice_val
= VARYING
;
1182 val
.value
= NULL_TREE
;
1183 val
.mem_ref
= NULL_TREE
;
1188 retval
= SSA_PROP_NOT_INTERESTING
;
1190 /* Set the lattice value of the statement's output. */
1191 if (TREE_CODE (lhs
) == SSA_NAME
)
1193 /* If STMT is an assignment to an SSA_NAME, we only have one
1195 if (set_lattice_value (lhs
, val
))
1198 if (val
.lattice_val
== VARYING
)
1199 retval
= SSA_PROP_VARYING
;
1201 retval
= SSA_PROP_INTERESTING
;
1204 else if (do_store_ccp
&& stmt_makes_single_store (stmt
))
1206 /* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
1207 to the new constant value and mark the LHS as the memory
1208 reference associated with VAL. */
1213 /* Stores cannot take on an UNDEFINED value. */
1214 if (val
.lattice_val
== UNDEFINED
)
1215 val
.lattice_val
= UNKNOWN_VAL
;
1217 /* Mark VAL as stored in the LHS of this assignment. */
1220 /* Set the value of every VDEF to VAL. */
1222 FOR_EACH_SSA_TREE_OPERAND (vdef
, stmt
, i
, SSA_OP_VIRTUAL_DEFS
)
1223 changed
|= set_lattice_value (vdef
, val
);
1225 /* Note that for propagation purposes, we are only interested in
1226 visiting statements that load the exact same memory reference
1227 stored here. Those statements will have the exact same list
1228 of virtual uses, so it is enough to set the output of this
1229 statement to be its first virtual definition. */
1230 *output_p
= first_vdef (stmt
);
1233 if (val
.lattice_val
== VARYING
)
1234 retval
= SSA_PROP_VARYING
;
1236 retval
= SSA_PROP_INTERESTING
;
1244 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1245 if it can determine which edge will be taken. Otherwise, return
1246 SSA_PROP_VARYING. */
1248 static enum ssa_prop_result
1249 visit_cond_stmt (tree stmt
, edge
*taken_edge_p
)
1254 block
= bb_for_stmt (stmt
);
1255 val
= evaluate_stmt (stmt
);
1257 /* Find which edge out of the conditional block will be taken and add it
1258 to the worklist. If no single edge can be determined statically,
1259 return SSA_PROP_VARYING to feed all the outgoing edges to the
1260 propagation engine. */
1261 *taken_edge_p
= val
.value
? find_taken_edge (block
, val
.value
) : 0;
1263 return SSA_PROP_INTERESTING
;
1265 return SSA_PROP_VARYING
;
1269 /* Evaluate statement STMT. If the statement produces an output value and
1270 its evaluation changes the lattice value of its output, return
1271 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1274 If STMT is a conditional branch and we can determine its truth
1275 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1276 value, return SSA_PROP_VARYING. */
1278 static enum ssa_prop_result
1279 ccp_visit_stmt (tree stmt
, edge
*taken_edge_p
, tree
*output_p
)
1284 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1286 fprintf (dump_file
, "\nVisiting statement:\n");
1287 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1288 fprintf (dump_file
, "\n");
1291 if (TREE_CODE (stmt
) == MODIFY_EXPR
)
1293 /* If the statement is an assignment that produces a single
1294 output value, evaluate its RHS to see if the lattice value of
1295 its output has changed. */
1296 return visit_assignment (stmt
, output_p
);
1298 else if (TREE_CODE (stmt
) == COND_EXPR
|| TREE_CODE (stmt
) == SWITCH_EXPR
)
1300 /* If STMT is a conditional branch, see if we can determine
1301 which branch will be taken. */
1302 return visit_cond_stmt (stmt
, taken_edge_p
);
1305 /* Any other kind of statement is not interesting for constant
1306 propagation and, therefore, not worth simulating. */
1307 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1308 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1310 /* Definitions made by statements other than assignments to
1311 SSA_NAMEs represent unknown modifications to their outputs.
1312 Mark them VARYING. */
1313 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1315 prop_value_t v
= { VARYING
, NULL_TREE
, NULL_TREE
};
1316 set_lattice_value (def
, v
);
1319 return SSA_PROP_VARYING
;
1323 /* Main entry point for SSA Conditional Constant Propagation. */
1326 execute_ssa_ccp (bool store_ccp
)
1328 do_store_ccp
= store_ccp
;
1330 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1338 execute_ssa_ccp (false);
1345 return flag_tree_ccp
!= 0;
1349 struct tree_opt_pass pass_ccp
=
1352 gate_ccp
, /* gate */
1353 do_ssa_ccp
, /* execute */
1356 0, /* static_pass_number */
1357 TV_TREE_CCP
, /* tv_id */
1358 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
1359 0, /* properties_provided */
1360 0, /* properties_destroyed */
1361 0, /* todo_flags_start */
1362 TODO_cleanup_cfg
| TODO_dump_func
| TODO_update_ssa
1363 | TODO_ggc_collect
| TODO_verify_ssa
1364 | TODO_verify_stmts
, /* todo_flags_finish */
1370 do_ssa_store_ccp (void)
1372 /* If STORE-CCP is not enabled, we just run regular CCP. */
1373 execute_ssa_ccp (flag_tree_store_ccp
!= 0);
1377 gate_store_ccp (void)
1379 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1380 -fno-tree-store-ccp is specified, we should run regular CCP.
1381 That's why the pass is enabled with either flag. */
1382 return flag_tree_store_ccp
!= 0 || flag_tree_ccp
!= 0;
1386 struct tree_opt_pass pass_store_ccp
=
1388 "store_ccp", /* name */
1389 gate_store_ccp
, /* gate */
1390 do_ssa_store_ccp
, /* execute */
1393 0, /* static_pass_number */
1394 TV_TREE_STORE_CCP
, /* tv_id */
1395 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
1396 0, /* properties_provided */
1397 0, /* properties_destroyed */
1398 0, /* todo_flags_start */
1399 TODO_dump_func
| TODO_update_ssa
1400 | TODO_ggc_collect
| TODO_verify_ssa
1402 | TODO_verify_stmts
, /* todo_flags_finish */
1406 /* Given a constant value VAL for bitfield FIELD, and a destination
1407 variable VAR, return VAL appropriately widened to fit into VAR. If
1408 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1411 widen_bitfield (tree val
, tree field
, tree var
)
1413 unsigned HOST_WIDE_INT var_size
, field_size
;
1415 unsigned HOST_WIDE_INT mask
;
1418 /* We can only do this if the size of the type and field and VAL are
1419 all constants representable in HOST_WIDE_INT. */
1420 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var
)), 1)
1421 || !host_integerp (DECL_SIZE (field
), 1)
1422 || !host_integerp (val
, 0))
1425 var_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1);
1426 field_size
= tree_low_cst (DECL_SIZE (field
), 1);
1428 /* Give up if either the bitfield or the variable are too wide. */
1429 if (field_size
> HOST_BITS_PER_WIDE_INT
|| var_size
> HOST_BITS_PER_WIDE_INT
)
1432 gcc_assert (var_size
>= field_size
);
1434 /* If the sign bit of the value is not set or the field's type is unsigned,
1435 just mask off the high order bits of the value. */
1436 if (DECL_UNSIGNED (field
)
1437 || !(tree_low_cst (val
, 0) & (((HOST_WIDE_INT
)1) << (field_size
- 1))))
1439 /* Zero extension. Build a mask with the lower 'field_size' bits
1440 set and a BIT_AND_EXPR node to clear the high order bits of
1442 for (i
= 0, mask
= 0; i
< field_size
; i
++)
1443 mask
|= ((HOST_WIDE_INT
) 1) << i
;
1445 wide_val
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (var
), val
,
1446 build_int_cst (TREE_TYPE (var
), mask
));
1450 /* Sign extension. Create a mask with the upper 'field_size'
1451 bits set and a BIT_IOR_EXPR to set the high order bits of the
1453 for (i
= 0, mask
= 0; i
< (var_size
- field_size
); i
++)
1454 mask
|= ((HOST_WIDE_INT
) 1) << (var_size
- i
- 1);
1456 wide_val
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (var
), val
,
1457 build_int_cst (TREE_TYPE (var
), mask
));
1464 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1465 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1466 is the desired result type. */
1469 maybe_fold_offset_to_array_ref (tree base
, tree offset
, tree orig_type
)
1471 tree min_idx
, idx
, elt_offset
= integer_zero_node
;
1472 tree array_type
, elt_type
, elt_size
;
1474 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1475 measured in units of the size of elements type) from that ARRAY_REF).
1476 We can't do anything if either is variable.
1478 The case we handle here is *(&A[N]+O). */
1479 if (TREE_CODE (base
) == ARRAY_REF
)
1481 tree low_bound
= array_ref_low_bound (base
);
1483 elt_offset
= TREE_OPERAND (base
, 1);
1484 if (TREE_CODE (low_bound
) != INTEGER_CST
1485 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1488 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1489 base
= TREE_OPERAND (base
, 0);
1492 /* Ignore stupid user tricks of indexing non-array variables. */
1493 array_type
= TREE_TYPE (base
);
1494 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1496 elt_type
= TREE_TYPE (array_type
);
1497 if (!lang_hooks
.types_compatible_p (orig_type
, elt_type
))
1500 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1501 element type (so we can use the alignment if it's not constant).
1502 Otherwise, compute the offset as an index by using a division. If the
1503 division isn't exact, then don't do anything. */
1504 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1505 if (integer_zerop (offset
))
1507 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1508 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1510 idx
= integer_zero_node
;
1514 unsigned HOST_WIDE_INT lquo
, lrem
;
1515 HOST_WIDE_INT hquo
, hrem
;
1517 if (TREE_CODE (elt_size
) != INTEGER_CST
1518 || div_and_round_double (TRUNC_DIV_EXPR
, 1,
1519 TREE_INT_CST_LOW (offset
),
1520 TREE_INT_CST_HIGH (offset
),
1521 TREE_INT_CST_LOW (elt_size
),
1522 TREE_INT_CST_HIGH (elt_size
),
1523 &lquo
, &hquo
, &lrem
, &hrem
)
1527 idx
= build_int_cst_wide (NULL_TREE
, lquo
, hquo
);
1530 /* Assume the low bound is zero. If there is a domain type, get the
1531 low bound, if any, convert the index into that type, and add the
1533 min_idx
= integer_zero_node
;
1534 if (TYPE_DOMAIN (array_type
))
1536 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
)))
1537 min_idx
= TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
));
1539 min_idx
= fold_convert (TYPE_DOMAIN (array_type
), min_idx
);
1541 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1544 idx
= fold_convert (TYPE_DOMAIN (array_type
), idx
);
1545 elt_offset
= fold_convert (TYPE_DOMAIN (array_type
), elt_offset
);
1548 if (!integer_zerop (min_idx
))
1549 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1550 if (!integer_zerop (elt_offset
))
1551 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1553 return build (ARRAY_REF
, orig_type
, base
, idx
, min_idx
,
1554 size_int (tree_low_cst (elt_size
, 1)
1555 / (TYPE_ALIGN_UNIT (elt_type
))));
1559 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1560 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1561 is the desired result type. */
1562 /* ??? This doesn't handle class inheritance. */
1565 maybe_fold_offset_to_component_ref (tree record_type
, tree base
, tree offset
,
1566 tree orig_type
, bool base_is_ptr
)
1568 tree f
, t
, field_type
, tail_array_field
, field_offset
;
1570 if (TREE_CODE (record_type
) != RECORD_TYPE
1571 && TREE_CODE (record_type
) != UNION_TYPE
1572 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1575 /* Short-circuit silly cases. */
1576 if (lang_hooks
.types_compatible_p (record_type
, orig_type
))
1579 tail_array_field
= NULL_TREE
;
1580 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1584 if (TREE_CODE (f
) != FIELD_DECL
)
1586 if (DECL_BIT_FIELD (f
))
1589 field_offset
= byte_position (f
);
1590 if (TREE_CODE (field_offset
) != INTEGER_CST
)
1593 /* ??? Java creates "interesting" fields for representing base classes.
1594 They have no name, and have no context. With no context, we get into
1595 trouble with nonoverlapping_component_refs_p. Skip them. */
1596 if (!DECL_FIELD_CONTEXT (f
))
1599 /* The previous array field isn't at the end. */
1600 tail_array_field
= NULL_TREE
;
1602 /* Check to see if this offset overlaps with the field. */
1603 cmp
= tree_int_cst_compare (field_offset
, offset
);
1607 field_type
= TREE_TYPE (f
);
1609 /* Here we exactly match the offset being checked. If the types match,
1610 then we can return that field. */
1612 && lang_hooks
.types_compatible_p (orig_type
, field_type
))
1615 base
= build1 (INDIRECT_REF
, record_type
, base
);
1616 t
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1620 /* Don't care about offsets into the middle of scalars. */
1621 if (!AGGREGATE_TYPE_P (field_type
))
1624 /* Check for array at the end of the struct. This is often
1625 used as for flexible array members. We should be able to
1626 turn this into an array access anyway. */
1627 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1628 tail_array_field
= f
;
1630 /* Check the end of the field against the offset. */
1631 if (!DECL_SIZE_UNIT (f
)
1632 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1634 t
= int_const_binop (MINUS_EXPR
, offset
, field_offset
, 1);
1635 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1638 /* If we matched, then set offset to the displacement into
1644 if (!tail_array_field
)
1647 f
= tail_array_field
;
1648 field_type
= TREE_TYPE (f
);
1649 offset
= int_const_binop (MINUS_EXPR
, offset
, byte_position (f
), 1);
1652 /* If we get here, we've got an aggregate field, and a possibly
1653 nonzero offset into them. Recurse and hope for a valid match. */
1655 base
= build1 (INDIRECT_REF
, record_type
, base
);
1656 base
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1658 t
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
);
1661 return maybe_fold_offset_to_component_ref (field_type
, base
, offset
,
1666 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1667 Return the simplified expression, or NULL if nothing could be done. */
1670 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
1674 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1675 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1676 are sometimes added. */
1678 STRIP_TYPE_NOPS (base
);
1679 TREE_OPERAND (expr
, 0) = base
;
1681 /* One possibility is that the address reduces to a string constant. */
1682 t
= fold_read_from_constant_string (expr
);
1686 /* Add in any offset from a PLUS_EXPR. */
1687 if (TREE_CODE (base
) == PLUS_EXPR
)
1691 offset2
= TREE_OPERAND (base
, 1);
1692 if (TREE_CODE (offset2
) != INTEGER_CST
)
1694 base
= TREE_OPERAND (base
, 0);
1696 offset
= int_const_binop (PLUS_EXPR
, offset
, offset2
, 1);
1699 if (TREE_CODE (base
) == ADDR_EXPR
)
1701 /* Strip the ADDR_EXPR. */
1702 base
= TREE_OPERAND (base
, 0);
1704 /* Fold away CONST_DECL to its value, if the type is scalar. */
1705 if (TREE_CODE (base
) == CONST_DECL
1706 && is_gimple_min_invariant (DECL_INITIAL (base
)))
1707 return DECL_INITIAL (base
);
1709 /* Try folding *(&B+O) to B[X]. */
1710 t
= maybe_fold_offset_to_array_ref (base
, offset
, TREE_TYPE (expr
));
1714 /* Try folding *(&B+O) to B.X. */
1715 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (base
), base
, offset
,
1716 TREE_TYPE (expr
), false);
1720 /* Fold *&B to B. We can only do this if EXPR is the same type
1721 as BASE. We can't do this if EXPR is the element type of an array
1722 and BASE is the array. */
1723 if (integer_zerop (offset
)
1724 && lang_hooks
.types_compatible_p (TREE_TYPE (base
),
1730 /* We can get here for out-of-range string constant accesses,
1731 such as "_"[3]. Bail out of the entire substitution search
1732 and arrange for the entire statement to be replaced by a
1733 call to __builtin_trap. In all likelihood this will all be
1734 constant-folded away, but in the meantime we can't leave with
1735 something that get_expr_operands can't understand. */
1739 if (TREE_CODE (t
) == ADDR_EXPR
1740 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
1742 /* FIXME: Except that this causes problems elsewhere with dead
1743 code not being deleted, and we die in the rtl expanders
1744 because we failed to remove some ssa_name. In the meantime,
1745 just return zero. */
1746 /* FIXME2: This condition should be signaled by
1747 fold_read_from_constant_string directly, rather than
1748 re-checking for it here. */
1749 return integer_zero_node
;
1752 /* Try folding *(B+O) to B->X. Still an improvement. */
1753 if (POINTER_TYPE_P (TREE_TYPE (base
)))
1755 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base
)),
1757 TREE_TYPE (expr
), true);
1763 /* Otherwise we had an offset that we could not simplify. */
1768 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1770 A quaint feature extant in our address arithmetic is that there
1771 can be hidden type changes here. The type of the result need
1772 not be the same as the type of the input pointer.
1774 What we're after here is an expression of the form
1775 (T *)(&array + const)
1776 where the cast doesn't actually exist, but is implicit in the
1777 type of the PLUS_EXPR. We'd like to turn this into
1779 which may be able to propagate further. */
1782 maybe_fold_stmt_addition (tree expr
)
1784 tree op0
= TREE_OPERAND (expr
, 0);
1785 tree op1
= TREE_OPERAND (expr
, 1);
1786 tree ptr_type
= TREE_TYPE (expr
);
1789 bool subtract
= (TREE_CODE (expr
) == MINUS_EXPR
);
1791 /* We're only interested in pointer arithmetic. */
1792 if (!POINTER_TYPE_P (ptr_type
))
1794 /* Canonicalize the integral operand to op1. */
1795 if (INTEGRAL_TYPE_P (TREE_TYPE (op0
)))
1799 t
= op0
, op0
= op1
, op1
= t
;
1801 /* It had better be a constant. */
1802 if (TREE_CODE (op1
) != INTEGER_CST
)
1804 /* The first operand should be an ADDR_EXPR. */
1805 if (TREE_CODE (op0
) != ADDR_EXPR
)
1807 op0
= TREE_OPERAND (op0
, 0);
1809 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1810 the offset into it. */
1811 while (TREE_CODE (op0
) == ARRAY_REF
)
1813 tree array_obj
= TREE_OPERAND (op0
, 0);
1814 tree array_idx
= TREE_OPERAND (op0
, 1);
1815 tree elt_type
= TREE_TYPE (op0
);
1816 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
1819 if (TREE_CODE (array_idx
) != INTEGER_CST
)
1821 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1824 /* Un-bias the index by the min index of the array type. */
1825 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
1828 min_idx
= TYPE_MIN_VALUE (min_idx
);
1831 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1834 array_idx
= convert (TREE_TYPE (min_idx
), array_idx
);
1835 if (!integer_zerop (min_idx
))
1836 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
1841 /* Convert the index to a byte offset. */
1842 array_idx
= convert (sizetype
, array_idx
);
1843 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
1845 /* Update the operands for the next round, or for folding. */
1846 /* If we're manipulating unsigned types, then folding into negative
1847 values can produce incorrect results. Particularly if the type
1848 is smaller than the width of the pointer. */
1850 && TYPE_UNSIGNED (TREE_TYPE (op1
))
1851 && tree_int_cst_lt (array_idx
, op1
))
1853 op1
= int_const_binop (subtract
? MINUS_EXPR
: PLUS_EXPR
,
1859 /* If we weren't able to fold the subtraction into another array reference,
1860 canonicalize the integer for passing to the array and component ref
1861 simplification functions. */
1864 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
1866 op1
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (op1
), op1
);
1867 /* ??? In theory fold should always produce another integer. */
1868 if (TREE_CODE (op1
) != INTEGER_CST
)
1872 ptd_type
= TREE_TYPE (ptr_type
);
1874 /* At which point we can try some of the same things as for indirects. */
1875 t
= maybe_fold_offset_to_array_ref (op0
, op1
, ptd_type
);
1877 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (op0
), op0
, op1
,
1880 t
= build1 (ADDR_EXPR
, ptr_type
, t
);
1885 /* Subroutine of fold_stmt called via walk_tree. We perform several
1886 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1889 fold_stmt_r (tree
*expr_p
, int *walk_subtrees
, void *data
)
1891 bool *changed_p
= data
;
1892 tree expr
= *expr_p
, t
;
1894 /* ??? It'd be nice if walk_tree had a pre-order option. */
1895 switch (TREE_CODE (expr
))
1898 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1903 t
= maybe_fold_stmt_indirect (expr
, TREE_OPERAND (expr
, 0),
1907 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1908 We'd only want to bother decomposing an existing ARRAY_REF if
1909 the base array is found to have another offset contained within.
1910 Otherwise we'd be wasting time. */
1913 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1918 /* Set TREE_INVARIANT properly so that the value is properly
1919 considered constant, and so gets propagated as expected. */
1921 recompute_tree_invarant_for_addr_expr (expr
);
1926 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1929 t
= walk_tree (&TREE_OPERAND (expr
, 1), fold_stmt_r
, data
, NULL
);
1934 t
= maybe_fold_stmt_addition (expr
);
1938 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1943 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
1944 We've already checked that the records are compatible, so we should
1945 come up with a set of compatible fields. */
1947 tree expr_record
= TREE_TYPE (TREE_OPERAND (expr
, 0));
1948 tree expr_field
= TREE_OPERAND (expr
, 1);
1950 if (DECL_FIELD_CONTEXT (expr_field
) != TYPE_MAIN_VARIANT (expr_record
))
1952 expr_field
= find_compatible_field (expr_record
, expr_field
);
1953 TREE_OPERAND (expr
, 1) = expr_field
;
1958 case TARGET_MEM_REF
:
1959 t
= maybe_fold_tmr (expr
);
1976 /* Return the string length, maximum string length or maximum value of
1978 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1979 is not NULL and, for TYPE == 0, its value is not equal to the length
1980 we determine or if we are unable to determine the length or value,
1981 return false. VISITED is a bitmap of visited variables.
1982 TYPE is 0 if string length should be returned, 1 for maximum string
1983 length and 2 for maximum value ARG can have. */
1986 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
1988 tree var
, def_stmt
, val
;
1990 if (TREE_CODE (arg
) != SSA_NAME
)
1995 if (TREE_CODE (val
) != INTEGER_CST
1996 || tree_int_cst_sgn (val
) < 0)
2000 val
= c_strlen (arg
, 1);
2008 if (TREE_CODE (*length
) != INTEGER_CST
2009 || TREE_CODE (val
) != INTEGER_CST
)
2012 if (tree_int_cst_lt (*length
, val
))
2016 else if (simple_cst_equal (val
, *length
) != 1)
2024 /* If we were already here, break the infinite cycle. */
2025 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
2027 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
2030 def_stmt
= SSA_NAME_DEF_STMT (var
);
2032 switch (TREE_CODE (def_stmt
))
2038 /* The RHS of the statement defining VAR must either have a
2039 constant length or come from another SSA_NAME with a constant
2041 rhs
= TREE_OPERAND (def_stmt
, 1);
2043 return get_maxval_strlen (rhs
, length
, visited
, type
);
2048 /* All the arguments of the PHI node must have the same constant
2052 for (i
= 0; i
< PHI_NUM_ARGS (def_stmt
); i
++)
2054 tree arg
= PHI_ARG_DEF (def_stmt
, i
);
2056 /* If this PHI has itself as an argument, we cannot
2057 determine the string length of this argument. However,
2058 if we can find a constant string length for the other
2059 PHI args then we can still be sure that this is a
2060 constant string length. So be optimistic and just
2061 continue with the next argument. */
2062 if (arg
== PHI_RESULT (def_stmt
))
2065 if (!get_maxval_strlen (arg
, length
, visited
, type
))
2081 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
2082 constant, return NULL_TREE. Otherwise, return its constant value. */
2085 ccp_fold_builtin (tree stmt
, tree fn
)
2087 tree result
, val
[3];
2088 tree callee
, arglist
, a
;
2089 int arg_mask
, i
, type
;
2093 ignore
= TREE_CODE (stmt
) != MODIFY_EXPR
;
2095 /* First try the generic builtin folder. If that succeeds, return the
2097 callee
= get_callee_fndecl (fn
);
2098 arglist
= TREE_OPERAND (fn
, 1);
2099 result
= fold_builtin (callee
, arglist
, ignore
);
2103 STRIP_NOPS (result
);
2107 /* Ignore MD builtins. */
2108 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
2111 /* If the builtin could not be folded, and it has no argument list,
2116 /* Limit the work only for builtins we know how to simplify. */
2117 switch (DECL_FUNCTION_CODE (callee
))
2119 case BUILT_IN_STRLEN
:
2120 case BUILT_IN_FPUTS
:
2121 case BUILT_IN_FPUTS_UNLOCKED
:
2125 case BUILT_IN_STRCPY
:
2126 case BUILT_IN_STRNCPY
:
2130 case BUILT_IN_MEMCPY_CHK
:
2131 case BUILT_IN_MEMPCPY_CHK
:
2132 case BUILT_IN_MEMMOVE_CHK
:
2133 case BUILT_IN_MEMSET_CHK
:
2134 case BUILT_IN_STRNCPY_CHK
:
2138 case BUILT_IN_STRCPY_CHK
:
2139 case BUILT_IN_STPCPY_CHK
:
2143 case BUILT_IN_SNPRINTF_CHK
:
2144 case BUILT_IN_VSNPRINTF_CHK
:
2152 /* Try to use the dataflow information gathered by the CCP process. */
2153 visited
= BITMAP_ALLOC (NULL
);
2155 memset (val
, 0, sizeof (val
));
2156 for (i
= 0, a
= arglist
;
2158 i
++, arg_mask
>>= 1, a
= TREE_CHAIN (a
))
2161 bitmap_clear (visited
);
2162 if (!get_maxval_strlen (TREE_VALUE (a
), &val
[i
], visited
, type
))
2166 BITMAP_FREE (visited
);
2169 switch (DECL_FUNCTION_CODE (callee
))
2171 case BUILT_IN_STRLEN
:
2174 tree
new = fold_convert (TREE_TYPE (fn
), val
[0]);
2176 /* If the result is not a valid gimple value, or not a cast
2177 of a valid gimple value, then we can not use the result. */
2178 if (is_gimple_val (new)
2179 || (is_gimple_cast (new)
2180 && is_gimple_val (TREE_OPERAND (new, 0))))
2185 case BUILT_IN_STRCPY
:
2186 if (val
[1] && is_gimple_val (val
[1]))
2187 result
= fold_builtin_strcpy (callee
, arglist
, val
[1]);
2190 case BUILT_IN_STRNCPY
:
2191 if (val
[1] && is_gimple_val (val
[1]))
2192 result
= fold_builtin_strncpy (callee
, arglist
, val
[1]);
2195 case BUILT_IN_FPUTS
:
2196 result
= fold_builtin_fputs (arglist
,
2197 TREE_CODE (stmt
) != MODIFY_EXPR
, 0,
2201 case BUILT_IN_FPUTS_UNLOCKED
:
2202 result
= fold_builtin_fputs (arglist
,
2203 TREE_CODE (stmt
) != MODIFY_EXPR
, 1,
2207 case BUILT_IN_MEMCPY_CHK
:
2208 case BUILT_IN_MEMPCPY_CHK
:
2209 case BUILT_IN_MEMMOVE_CHK
:
2210 case BUILT_IN_MEMSET_CHK
:
2211 if (val
[2] && is_gimple_val (val
[2]))
2212 result
= fold_builtin_memory_chk (callee
, arglist
, val
[2], ignore
,
2213 DECL_FUNCTION_CODE (callee
));
2216 case BUILT_IN_STRCPY_CHK
:
2217 case BUILT_IN_STPCPY_CHK
:
2218 if (val
[1] && is_gimple_val (val
[1]))
2219 result
= fold_builtin_stxcpy_chk (callee
, arglist
, val
[1], ignore
,
2220 DECL_FUNCTION_CODE (callee
));
2223 case BUILT_IN_STRNCPY_CHK
:
2224 if (val
[2] && is_gimple_val (val
[2]))
2225 result
= fold_builtin_strncpy_chk (arglist
, val
[2]);
2228 case BUILT_IN_SNPRINTF_CHK
:
2229 case BUILT_IN_VSNPRINTF_CHK
:
2230 if (val
[1] && is_gimple_val (val
[1]))
2231 result
= fold_builtin_snprintf_chk (arglist
, val
[1],
2232 DECL_FUNCTION_CODE (callee
));
2239 if (result
&& ignore
)
2240 result
= fold_ignored_result (result
);
2245 /* Fold the statement pointed to by STMT_P. In some cases, this function may
2246 replace the whole statement with a new one. Returns true iff folding
2247 makes any changes. */
2250 fold_stmt (tree
*stmt_p
)
2252 tree rhs
, result
, stmt
;
2253 bool changed
= false;
2257 /* If we replaced constants and the statement makes pointer dereferences,
2258 then we may need to fold instances of *&VAR into VAR, etc. */
2259 if (walk_tree (stmt_p
, fold_stmt_r
, &changed
, NULL
))
2262 = build_function_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
],
2267 rhs
= get_rhs (stmt
);
2272 if (TREE_CODE (rhs
) == CALL_EXPR
)
2276 /* Check for builtins that CCP can handle using information not
2277 available in the generic fold routines. */
2278 callee
= get_callee_fndecl (rhs
);
2279 if (callee
&& DECL_BUILT_IN (callee
))
2280 result
= ccp_fold_builtin (stmt
, rhs
);
2283 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2284 here are when we've propagated the address of a decl into the
2286 /* ??? Should perhaps do this in fold proper. However, doing it
2287 there requires that we create a new CALL_EXPR, and that requires
2288 copying EH region info to the new node. Easier to just do it
2289 here where we can just smash the call operand. */
2290 callee
= TREE_OPERAND (rhs
, 0);
2291 if (TREE_CODE (callee
) == OBJ_TYPE_REF
2292 && lang_hooks
.fold_obj_type_ref
2293 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
2294 && DECL_P (TREE_OPERAND
2295 (OBJ_TYPE_REF_OBJECT (callee
), 0)))
2299 /* ??? Caution: Broken ADDR_EXPR semantics means that
2300 looking at the type of the operand of the addr_expr
2301 can yield an array type. See silly exception in
2302 check_pointer_types_r. */
2304 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
2305 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
2308 TREE_OPERAND (rhs
, 0) = t
;
2315 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2316 if (result
== NULL_TREE
)
2317 result
= fold (rhs
);
2319 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2320 may have been added by fold, and "useless" type conversions that might
2321 now be apparent due to propagation. */
2322 STRIP_USELESS_TYPE_CONVERSION (result
);
2325 changed
|= set_rhs (stmt_p
, result
);
2330 /* Perform the minimal folding on statement STMT. Only operations like
2331 *&x created by constant propagation are handled. The statement cannot
2332 be replaced with a new one. */
2335 fold_stmt_inplace (tree stmt
)
2337 tree old_stmt
= stmt
, rhs
, new_rhs
;
2338 bool changed
= false;
2340 walk_tree (&stmt
, fold_stmt_r
, &changed
, NULL
);
2341 gcc_assert (stmt
== old_stmt
);
2343 rhs
= get_rhs (stmt
);
2344 if (!rhs
|| rhs
== stmt
)
2347 new_rhs
= fold (rhs
);
2348 STRIP_USELESS_TYPE_CONVERSION (new_rhs
);
2352 changed
|= set_rhs (&stmt
, new_rhs
);
2353 gcc_assert (stmt
== old_stmt
);
2358 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2359 RHS of an assignment. Insert the necessary statements before
2363 convert_to_gimple_builtin (block_stmt_iterator
*si_p
, tree expr
)
2365 tree_stmt_iterator ti
;
2366 tree stmt
= bsi_stmt (*si_p
);
2367 tree tmp
, stmts
= NULL
;
2369 push_gimplify_context ();
2370 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
2371 pop_gimplify_context (NULL
);
2373 if (EXPR_HAS_LOCATION (stmt
))
2374 annotate_all_with_locus (&stmts
, EXPR_LOCATION (stmt
));
2376 /* The replacement can expose previously unreferenced variables. */
2377 for (ti
= tsi_start (stmts
); !tsi_end_p (ti
); tsi_next (&ti
))
2379 tree new_stmt
= tsi_stmt (ti
);
2380 find_new_referenced_vars (tsi_stmt_ptr (ti
));
2381 bsi_insert_before (si_p
, new_stmt
, BSI_NEW_STMT
);
2382 mark_new_vars_to_rename (bsi_stmt (*si_p
));
2390 /* A simple pass that attempts to fold all builtin functions. This pass
2391 is run after we've propagated as many constants as we can. */
2394 execute_fold_all_builtins (void)
2396 bool cfg_changed
= false;
2400 block_stmt_iterator i
;
2401 for (i
= bsi_start (bb
); !bsi_end_p (i
); )
2403 tree
*stmtp
= bsi_stmt_ptr (i
);
2404 tree old_stmt
= *stmtp
;
2405 tree call
= get_rhs (*stmtp
);
2406 tree callee
, result
;
2407 enum built_in_function fcode
;
2409 if (!call
|| TREE_CODE (call
) != CALL_EXPR
)
2414 callee
= get_callee_fndecl (call
);
2415 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2420 fcode
= DECL_FUNCTION_CODE (callee
);
2422 result
= ccp_fold_builtin (*stmtp
, call
);
2424 switch (DECL_FUNCTION_CODE (callee
))
2426 case BUILT_IN_CONSTANT_P
:
2427 /* Resolve __builtin_constant_p. If it hasn't been
2428 folded to integer_one_node by now, it's fairly
2429 certain that the value simply isn't constant. */
2430 result
= integer_zero_node
;
2438 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2440 fprintf (dump_file
, "Simplified\n ");
2441 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2444 if (!set_rhs (stmtp
, result
))
2446 result
= convert_to_gimple_builtin (&i
, result
);
2449 bool ok
= set_rhs (stmtp
, result
);
2454 update_stmt (*stmtp
);
2455 if (maybe_clean_or_replace_eh_stmt (old_stmt
, *stmtp
)
2456 && tree_purge_dead_eh_edges (bb
))
2459 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2461 fprintf (dump_file
, "to\n ");
2462 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2463 fprintf (dump_file
, "\n");
2466 /* Retry the same statement if it changed into another
2467 builtin, there might be new opportunities now. */
2468 call
= get_rhs (*stmtp
);
2469 if (!call
|| TREE_CODE (call
) != CALL_EXPR
)
2474 callee
= get_callee_fndecl (call
);
2476 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2477 || DECL_FUNCTION_CODE (callee
) == fcode
)
2482 /* Delete unreachable blocks. */
2484 cleanup_tree_cfg ();
2488 struct tree_opt_pass pass_fold_builtins
=
2492 execute_fold_all_builtins
, /* execute */
2495 0, /* static_pass_number */
2497 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
2498 0, /* properties_provided */
2499 0, /* properties_destroyed */
2500 0, /* todo_flags_start */
2503 | TODO_update_ssa
, /* todo_flags_finish */