1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
195 #include "basic-block.h"
197 #include "function.h"
198 #include "tree-pretty-print.h"
199 #include "gimple-pretty-print.h"
201 #include "tree-dump.h"
202 #include "tree-flow.h"
203 #include "tree-pass.h"
204 #include "tree-ssa-propagate.h"
205 #include "value-prof.h"
206 #include "langhooks.h"
208 #include "diagnostic-core.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
228 static prop_value_t
*const_val
;
230 static void canonicalize_float_value (prop_value_t
*);
231 static bool ccp_fold_stmt (gimple_stmt_iterator
*);
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
236 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
238 switch (val
.lattice_val
)
241 fprintf (outf
, "%sUNINITIALIZED", prefix
);
244 fprintf (outf
, "%sUNDEFINED", prefix
);
247 fprintf (outf
, "%sVARYING", prefix
);
250 fprintf (outf
, "%sCONSTANT ", prefix
);
251 print_generic_expr (outf
, val
.value
, dump_flags
);
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val
);
264 debug_lattice_value (prop_value_t val
)
266 dump_lattice_value (stderr
, "", val
);
267 fprintf (stderr
, "\n");
271 /* Compute a default value for variable VAR and store it in the
272 CONST_VAL array. The following rules are used to get default
275 1- Global and static variables that are declared constant are
278 2- Any other value is considered UNDEFINED. This is useful when
279 considering PHI nodes. PHI arguments that are undefined do not
280 change the constant value of the PHI node, which allows for more
281 constants to be propagated.
283 3- Variables defined by statements other than assignments and PHI
284 nodes are considered VARYING.
286 4- Initial values of variables that are not GIMPLE registers are
287 considered VARYING. */
290 get_default_value (tree var
)
292 tree sym
= SSA_NAME_VAR (var
);
293 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
};
296 stmt
= SSA_NAME_DEF_STMT (var
);
298 if (gimple_nop_p (stmt
))
300 /* Variables defined by an empty statement are those used
301 before being initialized. If VAR is a local variable, we
302 can assume initially that it is UNDEFINED, otherwise we must
303 consider it VARYING. */
304 if (is_gimple_reg (sym
)
305 && TREE_CODE (sym
) == VAR_DECL
)
306 val
.lattice_val
= UNDEFINED
;
308 val
.lattice_val
= VARYING
;
310 else if (is_gimple_assign (stmt
)
311 /* Value-returning GIMPLE_CALL statements assign to
312 a variable, and are treated similarly to GIMPLE_ASSIGN. */
313 || (is_gimple_call (stmt
)
314 && gimple_call_lhs (stmt
) != NULL_TREE
)
315 || gimple_code (stmt
) == GIMPLE_PHI
)
318 if (gimple_assign_single_p (stmt
)
319 && DECL_P (gimple_assign_rhs1 (stmt
))
320 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
322 val
.lattice_val
= CONSTANT
;
326 /* Any other variable defined by an assignment or a PHI node
327 is considered UNDEFINED. */
328 val
.lattice_val
= UNDEFINED
;
332 /* Otherwise, VAR will never take on a constant value. */
333 val
.lattice_val
= VARYING
;
340 /* Get the constant value associated with variable VAR. */
342 static inline prop_value_t
*
347 if (const_val
== NULL
)
350 val
= &const_val
[SSA_NAME_VERSION (var
)];
351 if (val
->lattice_val
== UNINITIALIZED
)
352 *val
= get_default_value (var
);
354 canonicalize_float_value (val
);
359 /* Sets the value associated with VAR to VARYING. */
362 set_value_varying (tree var
)
364 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
366 val
->lattice_val
= VARYING
;
367 val
->value
= NULL_TREE
;
370 /* For float types, modify the value of VAL to make ccp work correctly
371 for non-standard values (-0, NaN):
373 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
374 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
375 This is to fix the following problem (see PR 29921): Suppose we have
379 and we set value of y to NaN. This causes value of x to be set to NaN.
380 When we later determine that y is in fact VARYING, fold uses the fact
381 that HONOR_NANS is false, and we try to change the value of x to 0,
382 causing an ICE. With HONOR_NANS being false, the real appearance of
383 NaN would cause undefined behavior, though, so claiming that y (and x)
384 are UNDEFINED initially is correct. */
387 canonicalize_float_value (prop_value_t
*val
)
389 enum machine_mode mode
;
393 if (val
->lattice_val
!= CONSTANT
394 || TREE_CODE (val
->value
) != REAL_CST
)
397 d
= TREE_REAL_CST (val
->value
);
398 type
= TREE_TYPE (val
->value
);
399 mode
= TYPE_MODE (type
);
401 if (!HONOR_SIGNED_ZEROS (mode
)
402 && REAL_VALUE_MINUS_ZERO (d
))
404 val
->value
= build_real (type
, dconst0
);
408 if (!HONOR_NANS (mode
)
409 && REAL_VALUE_ISNAN (d
))
411 val
->lattice_val
= UNDEFINED
;
417 /* Set the value for variable VAR to NEW_VAL. Return true if the new
418 value is different from VAR's previous value. */
421 set_lattice_value (tree var
, prop_value_t new_val
)
423 prop_value_t
*old_val
= get_value (var
);
425 canonicalize_float_value (&new_val
);
427 /* Lattice transitions must always be monotonically increasing in
428 value. If *OLD_VAL and NEW_VAL are the same, return false to
429 inform the caller that this was a non-transition. */
431 gcc_assert (old_val
->lattice_val
< new_val
.lattice_val
432 || (old_val
->lattice_val
== new_val
.lattice_val
433 && ((!old_val
->value
&& !new_val
.value
)
434 || operand_equal_p (old_val
->value
, new_val
.value
, 0))));
436 if (old_val
->lattice_val
!= new_val
.lattice_val
)
438 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
440 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
441 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
446 gcc_assert (new_val
.lattice_val
!= UNDEFINED
);
454 /* Return the likely CCP lattice value for STMT.
456 If STMT has no operands, then return CONSTANT.
458 Else if undefinedness of operands of STMT cause its value to be
459 undefined, then return UNDEFINED.
461 Else if any operands of STMT are constants, then return CONSTANT.
463 Else return VARYING. */
466 likely_value (gimple stmt
)
468 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
473 enum gimple_code code
= gimple_code (stmt
);
475 /* This function appears to be called only for assignments, calls,
476 conditionals, and switches, due to the logic in visit_stmt. */
477 gcc_assert (code
== GIMPLE_ASSIGN
478 || code
== GIMPLE_CALL
479 || code
== GIMPLE_COND
480 || code
== GIMPLE_SWITCH
);
482 /* If the statement has volatile operands, it won't fold to a
484 if (gimple_has_volatile_ops (stmt
))
487 /* Arrive here for more complex cases. */
488 has_constant_operand
= false;
489 has_undefined_operand
= false;
490 all_undefined_operands
= true;
491 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
493 prop_value_t
*val
= get_value (use
);
495 if (val
->lattice_val
== UNDEFINED
)
496 has_undefined_operand
= true;
498 all_undefined_operands
= false;
500 if (val
->lattice_val
== CONSTANT
)
501 has_constant_operand
= true;
504 /* There may be constants in regular rhs operands. For calls we
505 have to ignore lhs, fndecl and static chain, otherwise only
507 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
508 i
< gimple_num_ops (stmt
); ++i
)
510 tree op
= gimple_op (stmt
, i
);
511 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
513 if (is_gimple_min_invariant (op
))
514 has_constant_operand
= true;
517 if (has_constant_operand
)
518 all_undefined_operands
= false;
520 /* If the operation combines operands like COMPLEX_EXPR make sure to
521 not mark the result UNDEFINED if only one part of the result is
523 if (has_undefined_operand
&& all_undefined_operands
)
525 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
527 switch (gimple_assign_rhs_code (stmt
))
529 /* Unary operators are handled with all_undefined_operands. */
532 case POINTER_PLUS_EXPR
:
533 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
534 Not bitwise operators, one VARYING operand may specify the
535 result completely. Not logical operators for the same reason.
536 Not COMPLEX_EXPR as one VARYING operand makes the result partly
537 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
538 the undefined operand may be promoted. */
545 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
546 fall back to VARYING even if there were CONSTANT operands. */
547 if (has_undefined_operand
)
550 /* We do not consider virtual operands here -- load from read-only
551 memory may have only VARYING virtual operands, but still be
553 if (has_constant_operand
554 || gimple_references_memory_p (stmt
))
560 /* Returns true if STMT cannot be constant. */
563 surely_varying_stmt_p (gimple stmt
)
565 /* If the statement has operands that we cannot handle, it cannot be
567 if (gimple_has_volatile_ops (stmt
))
570 /* If it is a call and does not return a value or is not a
571 builtin and not an indirect call, it is varying. */
572 if (is_gimple_call (stmt
))
575 if (!gimple_call_lhs (stmt
)
576 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
577 && !DECL_BUILT_IN (fndecl
)))
581 /* Any other store operation is not interesting. */
582 else if (gimple_vdef (stmt
))
585 /* Anything other than assignments and conditional jumps are not
586 interesting for CCP. */
587 if (gimple_code (stmt
) != GIMPLE_ASSIGN
588 && gimple_code (stmt
) != GIMPLE_COND
589 && gimple_code (stmt
) != GIMPLE_SWITCH
590 && gimple_code (stmt
) != GIMPLE_CALL
)
596 /* Initialize local data structures for CCP. */
599 ccp_initialize (void)
603 const_val
= XCNEWVEC (prop_value_t
, num_ssa_names
);
605 /* Initialize simulation flags for PHI nodes and statements. */
608 gimple_stmt_iterator i
;
610 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
612 gimple stmt
= gsi_stmt (i
);
615 /* If the statement is a control insn, then we do not
616 want to avoid simulating the statement once. Failure
617 to do so means that those edges will never get added. */
618 if (stmt_ends_bb_p (stmt
))
621 is_varying
= surely_varying_stmt_p (stmt
);
628 /* If the statement will not produce a constant, mark
629 all its outputs VARYING. */
630 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
631 set_value_varying (def
);
633 prop_set_simulate_again (stmt
, !is_varying
);
637 /* Now process PHI nodes. We never clear the simulate_again flag on
638 phi nodes, since we do not know which edges are executable yet,
639 except for phi nodes for virtual operands when we do not do store ccp. */
642 gimple_stmt_iterator i
;
644 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
646 gimple phi
= gsi_stmt (i
);
648 if (!is_gimple_reg (gimple_phi_result (phi
)))
649 prop_set_simulate_again (phi
, false);
651 prop_set_simulate_again (phi
, true);
656 /* Debug count support. Reset the values of ssa names
657 VARYING when the total number ssa names analyzed is
658 beyond the debug count specified. */
664 for (i
= 0; i
< num_ssa_names
; i
++)
668 const_val
[i
].lattice_val
= VARYING
;
669 const_val
[i
].value
= NULL_TREE
;
675 /* Do final substitution of propagated values, cleanup the flowgraph and
676 free allocated storage.
678 Return TRUE when something was optimized. */
683 bool something_changed
;
686 /* Perform substitutions based on the known constant values. */
687 something_changed
= substitute_and_fold (const_val
, ccp_fold_stmt
, true);
691 return something_changed
;;
695 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
698 any M UNDEFINED = any
699 any M VARYING = VARYING
700 Ci M Cj = Ci if (i == j)
701 Ci M Cj = VARYING if (i != j)
705 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
707 if (val1
->lattice_val
== UNDEFINED
)
709 /* UNDEFINED M any = any */
712 else if (val2
->lattice_val
== UNDEFINED
)
714 /* any M UNDEFINED = any
715 Nothing to do. VAL1 already contains the value we want. */
718 else if (val1
->lattice_val
== VARYING
719 || val2
->lattice_val
== VARYING
)
721 /* any M VARYING = VARYING. */
722 val1
->lattice_val
= VARYING
;
723 val1
->value
= NULL_TREE
;
725 else if (val1
->lattice_val
== CONSTANT
726 && val2
->lattice_val
== CONSTANT
727 && simple_cst_equal (val1
->value
, val2
->value
) == 1)
729 /* Ci M Cj = Ci if (i == j)
730 Ci M Cj = VARYING if (i != j)
732 If these two values come from memory stores, make sure that
733 they come from the same memory reference.
734 Nothing to do. VAL1 already contains the value we want. */
738 /* Any other combination is VARYING. */
739 val1
->lattice_val
= VARYING
;
740 val1
->value
= NULL_TREE
;
745 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
746 lattice values to determine PHI_NODE's lattice value. The value of a
747 PHI node is determined calling ccp_lattice_meet with all the arguments
748 of the PHI node that are incoming via executable edges. */
750 static enum ssa_prop_result
751 ccp_visit_phi_node (gimple phi
)
754 prop_value_t
*old_val
, new_val
;
756 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
758 fprintf (dump_file
, "\nVisiting PHI node: ");
759 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
762 old_val
= get_value (gimple_phi_result (phi
));
763 switch (old_val
->lattice_val
)
766 return SSA_PROP_VARYING
;
773 new_val
.lattice_val
= UNDEFINED
;
774 new_val
.value
= NULL_TREE
;
781 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
783 /* Compute the meet operator over all the PHI arguments flowing
784 through executable edges. */
785 edge e
= gimple_phi_arg_edge (phi
, i
);
787 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
790 "\n Argument #%d (%d -> %d %sexecutable)\n",
791 i
, e
->src
->index
, e
->dest
->index
,
792 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
795 /* If the incoming edge is executable, Compute the meet operator for
796 the existing value of the PHI node and the current PHI argument. */
797 if (e
->flags
& EDGE_EXECUTABLE
)
799 tree arg
= gimple_phi_arg (phi
, i
)->def
;
800 prop_value_t arg_val
;
802 if (is_gimple_min_invariant (arg
))
804 arg_val
.lattice_val
= CONSTANT
;
808 arg_val
= *(get_value (arg
));
810 ccp_lattice_meet (&new_val
, &arg_val
);
812 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
814 fprintf (dump_file
, "\t");
815 print_generic_expr (dump_file
, arg
, dump_flags
);
816 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
817 fprintf (dump_file
, "\n");
820 if (new_val
.lattice_val
== VARYING
)
825 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
827 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
828 fprintf (dump_file
, "\n\n");
831 /* Make the transition to the new value. */
832 if (set_lattice_value (gimple_phi_result (phi
), new_val
))
834 if (new_val
.lattice_val
== VARYING
)
835 return SSA_PROP_VARYING
;
837 return SSA_PROP_INTERESTING
;
840 return SSA_PROP_NOT_INTERESTING
;
843 /* Get operand number OPNR from the rhs of STMT. Before returning it,
844 simplify it to a constant if possible. */
847 get_rhs_assign_op_for_ccp (gimple stmt
, int opnr
)
849 tree op
= gimple_op (stmt
, opnr
);
851 if (TREE_CODE (op
) == SSA_NAME
)
853 prop_value_t
*val
= get_value (op
);
854 if (val
->lattice_val
== CONSTANT
)
855 op
= get_value (op
)->value
;
860 /* CCP specific front-end to the non-destructive constant folding
863 Attempt to simplify the RHS of STMT knowing that one or more
864 operands are constants.
866 If simplification is possible, return the simplified RHS,
867 otherwise return the original RHS or NULL_TREE. */
870 ccp_fold (gimple stmt
)
872 location_t loc
= gimple_location (stmt
);
873 switch (gimple_code (stmt
))
877 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
879 switch (get_gimple_rhs_class (subcode
))
881 case GIMPLE_SINGLE_RHS
:
883 tree rhs
= gimple_assign_rhs1 (stmt
);
884 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
886 if (TREE_CODE (rhs
) == SSA_NAME
)
888 /* If the RHS is an SSA_NAME, return its known constant value,
890 return get_value (rhs
)->value
;
892 /* Handle propagating invariant addresses into address operations.
893 The folding we do here matches that in tree-ssa-forwprop.c. */
894 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
897 base
= &TREE_OPERAND (rhs
, 0);
898 while (handled_component_p (*base
))
899 base
= &TREE_OPERAND (*base
, 0);
900 if (TREE_CODE (*base
) == MEM_REF
901 && TREE_CODE (TREE_OPERAND (*base
, 0)) == SSA_NAME
)
903 prop_value_t
*val
= get_value (TREE_OPERAND (*base
, 0));
904 if (val
->lattice_val
== CONSTANT
905 && TREE_CODE (val
->value
) == ADDR_EXPR
)
907 tree ret
, save
= *base
;
909 new_base
= fold_build2 (MEM_REF
, TREE_TYPE (*base
),
910 unshare_expr (val
->value
),
911 TREE_OPERAND (*base
, 1));
912 /* We need to return a new tree, not modify the IL
913 or share parts of it. So play some tricks to
914 avoid manually building it. */
916 ret
= unshare_expr (rhs
);
917 recompute_tree_invariant_for_addr_expr (ret
);
923 else if (TREE_CODE (rhs
) == CONSTRUCTOR
924 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
925 && (CONSTRUCTOR_NELTS (rhs
)
926 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
932 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
934 if (TREE_CODE (val
) == SSA_NAME
935 && get_value (val
)->lattice_val
== CONSTANT
)
936 val
= get_value (val
)->value
;
937 if (TREE_CODE (val
) == INTEGER_CST
938 || TREE_CODE (val
) == REAL_CST
939 || TREE_CODE (val
) == FIXED_CST
)
940 list
= tree_cons (NULL_TREE
, val
, list
);
945 return build_vector (TREE_TYPE (rhs
), nreverse (list
));
948 if (kind
== tcc_reference
)
950 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
951 || TREE_CODE (rhs
) == REALPART_EXPR
952 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
953 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
955 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
956 if (val
->lattice_val
== CONSTANT
)
957 return fold_unary_loc (EXPR_LOCATION (rhs
),
959 TREE_TYPE (rhs
), val
->value
);
961 else if (TREE_CODE (rhs
) == MEM_REF
962 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
964 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
965 if (val
->lattice_val
== CONSTANT
966 && TREE_CODE (val
->value
) == ADDR_EXPR
)
968 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
969 unshare_expr (val
->value
),
970 TREE_OPERAND (rhs
, 1));
975 return fold_const_aggregate_ref (rhs
);
977 else if (kind
== tcc_declaration
)
978 return get_symbol_constant_value (rhs
);
982 case GIMPLE_UNARY_RHS
:
984 /* Handle unary operators that can appear in GIMPLE form.
985 Note that we know the single operand must be a constant,
986 so this should almost always return a simplified RHS. */
987 tree lhs
= gimple_assign_lhs (stmt
);
988 tree op0
= get_rhs_assign_op_for_ccp (stmt
, 1);
990 /* Conversions are useless for CCP purposes if they are
991 value-preserving. Thus the restrictions that
992 useless_type_conversion_p places for pointer type conversions
993 do not apply here. Substitution later will only substitute to
995 if (CONVERT_EXPR_CODE_P (subcode
)
996 && POINTER_TYPE_P (TREE_TYPE (lhs
))
997 && POINTER_TYPE_P (TREE_TYPE (op0
)))
1000 /* Try to re-construct array references on-the-fly. */
1001 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
1003 && ((tem
= maybe_fold_offset_to_address
1005 op0
, integer_zero_node
, TREE_TYPE (lhs
)))
1012 fold_unary_ignore_overflow_loc (loc
, subcode
,
1013 gimple_expr_type (stmt
), op0
);
1016 case GIMPLE_BINARY_RHS
:
1018 /* Handle binary operators that can appear in GIMPLE form. */
1019 tree op0
= get_rhs_assign_op_for_ccp (stmt
, 1);
1020 tree op1
= get_rhs_assign_op_for_ccp (stmt
, 2);
1022 /* Translate &x + CST into an invariant form suitable for
1023 further propagation. */
1024 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1025 && TREE_CODE (op0
) == ADDR_EXPR
1026 && TREE_CODE (op1
) == INTEGER_CST
)
1028 tree off
= fold_convert (ptr_type_node
, op1
);
1029 return build_fold_addr_expr
1030 (fold_build2 (MEM_REF
,
1031 TREE_TYPE (TREE_TYPE (op0
)),
1032 unshare_expr (op0
), off
));
1035 return fold_binary_loc (loc
, subcode
,
1036 gimple_expr_type (stmt
), op0
, op1
);
1039 case GIMPLE_TERNARY_RHS
:
1041 /* Handle binary operators that can appear in GIMPLE form. */
1042 tree op0
= get_rhs_assign_op_for_ccp (stmt
, 1);
1043 tree op1
= get_rhs_assign_op_for_ccp (stmt
, 2);
1044 tree op2
= get_rhs_assign_op_for_ccp (stmt
, 3);
1046 return fold_ternary_loc (loc
, subcode
,
1047 gimple_expr_type (stmt
), op0
, op1
, op2
);
1058 tree fn
= gimple_call_fn (stmt
);
1061 if (TREE_CODE (fn
) == SSA_NAME
)
1063 val
= get_value (fn
);
1064 if (val
->lattice_val
== CONSTANT
)
1067 if (TREE_CODE (fn
) == ADDR_EXPR
1068 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1069 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
1071 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
1074 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1076 args
[i
] = gimple_call_arg (stmt
, i
);
1077 if (TREE_CODE (args
[i
]) == SSA_NAME
)
1079 val
= get_value (args
[i
]);
1080 if (val
->lattice_val
== CONSTANT
)
1081 args
[i
] = val
->value
;
1084 call
= build_call_array_loc (loc
,
1085 gimple_call_return_type (stmt
),
1086 fn
, gimple_call_num_args (stmt
), args
);
1087 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
1089 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1090 STRIP_NOPS (retval
);
1098 /* Handle comparison operators that can appear in GIMPLE form. */
1099 tree op0
= gimple_cond_lhs (stmt
);
1100 tree op1
= gimple_cond_rhs (stmt
);
1101 enum tree_code code
= gimple_cond_code (stmt
);
1103 /* Simplify the operands down to constants when appropriate. */
1104 if (TREE_CODE (op0
) == SSA_NAME
)
1106 prop_value_t
*val
= get_value (op0
);
1107 if (val
->lattice_val
== CONSTANT
)
1111 if (TREE_CODE (op1
) == SSA_NAME
)
1113 prop_value_t
*val
= get_value (op1
);
1114 if (val
->lattice_val
== CONSTANT
)
1118 return fold_binary_loc (loc
, code
, boolean_type_node
, op0
, op1
);
1123 tree rhs
= gimple_switch_index (stmt
);
1125 if (TREE_CODE (rhs
) == SSA_NAME
)
1127 /* If the RHS is an SSA_NAME, return its known constant value,
1129 return get_value (rhs
)->value
;
1141 /* Return the tree representing the element referenced by T if T is an
1142 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1143 NULL_TREE otherwise. */
1146 fold_const_aggregate_ref (tree t
)
1148 prop_value_t
*value
;
1149 tree base
, ctor
, idx
, field
;
1150 unsigned HOST_WIDE_INT cnt
;
1153 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
1154 return get_symbol_constant_value (t
);
1156 switch (TREE_CODE (t
))
1159 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1160 DECL_INITIAL. If BASE is a nested reference into another
1161 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1162 the inner reference. */
1163 base
= TREE_OPERAND (t
, 0);
1164 switch (TREE_CODE (base
))
1167 /* ??? We could handle this case. */
1168 if (!integer_zerop (TREE_OPERAND (base
, 1)))
1170 base
= get_base_address (base
);
1172 || TREE_CODE (base
) != VAR_DECL
)
1177 if (!TREE_READONLY (base
)
1178 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
1179 || !targetm
.binds_local_p (base
))
1182 ctor
= DECL_INITIAL (base
);
1187 ctor
= fold_const_aggregate_ref (base
);
1199 if (ctor
== NULL_TREE
1200 || (TREE_CODE (ctor
) != CONSTRUCTOR
1201 && TREE_CODE (ctor
) != STRING_CST
)
1202 || !TREE_STATIC (ctor
))
1205 /* Get the index. If we have an SSA_NAME, try to resolve it
1206 with the current lattice value for the SSA_NAME. */
1207 idx
= TREE_OPERAND (t
, 1);
1208 switch (TREE_CODE (idx
))
1211 if ((value
= get_value (idx
))
1212 && value
->lattice_val
== CONSTANT
1213 && TREE_CODE (value
->value
) == INTEGER_CST
)
1226 /* Fold read from constant string. */
1227 if (TREE_CODE (ctor
) == STRING_CST
)
1229 if ((TYPE_MODE (TREE_TYPE (t
))
1230 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1231 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1233 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
1234 && compare_tree_int (idx
, TREE_STRING_LENGTH (ctor
)) < 0)
1235 return build_int_cst_type (TREE_TYPE (t
),
1236 (TREE_STRING_POINTER (ctor
)
1237 [TREE_INT_CST_LOW (idx
)]));
1241 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1242 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1243 if (tree_int_cst_equal (cfield
, idx
))
1246 if (TREE_CODE (cval
) == ADDR_EXPR
)
1248 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1249 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1250 add_referenced_var (base
);
1257 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1258 DECL_INITIAL. If BASE is a nested reference into another
1259 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1260 the inner reference. */
1261 base
= TREE_OPERAND (t
, 0);
1262 switch (TREE_CODE (base
))
1265 if (!TREE_READONLY (base
)
1266 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1267 || !targetm
.binds_local_p (base
))
1270 ctor
= DECL_INITIAL (base
);
1275 ctor
= fold_const_aggregate_ref (base
);
1282 if (ctor
== NULL_TREE
1283 || TREE_CODE (ctor
) != CONSTRUCTOR
1284 || !TREE_STATIC (ctor
))
1287 field
= TREE_OPERAND (t
, 1);
1289 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1291 /* FIXME: Handle bit-fields. */
1292 && ! DECL_BIT_FIELD (cfield
))
1295 if (TREE_CODE (cval
) == ADDR_EXPR
)
1297 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1298 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1299 add_referenced_var (base
);
1308 tree c
= fold_const_aggregate_ref (TREE_OPERAND (t
, 0));
1309 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
1310 return fold_build1_loc (EXPR_LOCATION (t
),
1311 TREE_CODE (t
), TREE_TYPE (t
), c
);
1316 /* Get the base object we are accessing. */
1317 base
= TREE_OPERAND (t
, 0);
1318 if (TREE_CODE (base
) == SSA_NAME
1319 && (value
= get_value (base
))
1320 && value
->lattice_val
== CONSTANT
)
1321 base
= value
->value
;
1322 if (TREE_CODE (base
) != ADDR_EXPR
)
1324 base
= TREE_OPERAND (base
, 0);
1325 switch (TREE_CODE (base
))
1329 && !AGGREGATE_TYPE_P (TREE_TYPE (base
))
1330 && integer_zerop (TREE_OPERAND (t
, 1)))
1332 tree res
= get_symbol_constant_value (base
);
1334 && !useless_type_conversion_p
1335 (TREE_TYPE (t
), TREE_TYPE (res
)))
1336 res
= fold_unary (VIEW_CONVERT_EXPR
, TREE_TYPE (t
), res
);
1340 if (!TREE_READONLY (base
)
1341 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
1342 || !targetm
.binds_local_p (base
))
1345 ctor
= DECL_INITIAL (base
);
1357 if (ctor
== NULL_TREE
1358 || (TREE_CODE (ctor
) != CONSTRUCTOR
1359 && TREE_CODE (ctor
) != STRING_CST
)
1360 || !TREE_STATIC (ctor
))
1363 /* Get the byte offset. */
1364 idx
= TREE_OPERAND (t
, 1);
1366 /* Fold read from constant string. */
1367 if (TREE_CODE (ctor
) == STRING_CST
)
1369 if ((TYPE_MODE (TREE_TYPE (t
))
1370 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1371 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1373 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
1374 && compare_tree_int (idx
, TREE_STRING_LENGTH (ctor
)) < 0)
1375 return build_int_cst_type (TREE_TYPE (t
),
1376 (TREE_STRING_POINTER (ctor
)
1377 [TREE_INT_CST_LOW (idx
)]));
1381 /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
1382 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
1383 && (TYPE_MODE (TREE_TYPE (t
))
1384 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1385 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t
))) != 0
1388 (TRUNC_MOD_EXPR
, idx
,
1389 size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t
)))), 0)))
1391 idx
= int_const_binop (TRUNC_DIV_EXPR
, idx
,
1392 size_int (GET_MODE_SIZE
1393 (TYPE_MODE (TREE_TYPE (t
)))), 0);
1394 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1395 if (tree_int_cst_equal (cfield
, idx
))
1398 if (TREE_CODE (cval
) == ADDR_EXPR
)
1400 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1401 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1402 add_referenced_var (base
);
1404 if (useless_type_conversion_p (TREE_TYPE (t
), TREE_TYPE (cval
)))
1406 else if (CONSTANT_CLASS_P (cval
))
1407 return fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (t
), cval
);
1421 /* Evaluate statement STMT.
1422 Valid only for assignments, calls, conditionals, and switches. */
1425 evaluate_stmt (gimple stmt
)
1428 tree simplified
= NULL_TREE
;
1429 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1432 fold_defer_overflow_warnings ();
1434 /* If the statement is likely to have a CONSTANT result, then try
1435 to fold the statement to determine the constant value. */
1436 /* FIXME. This is the only place that we call ccp_fold.
1437 Since likely_value never returns CONSTANT for calls, we will
1438 not attempt to fold them, including builtins that may profit. */
1439 if (likelyvalue
== CONSTANT
)
1440 simplified
= ccp_fold (stmt
);
1441 /* If the statement is likely to have a VARYING result, then do not
1442 bother folding the statement. */
1443 else if (likelyvalue
== VARYING
)
1445 enum gimple_code code
= gimple_code (stmt
);
1446 if (code
== GIMPLE_ASSIGN
)
1448 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1450 /* Other cases cannot satisfy is_gimple_min_invariant
1452 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1453 simplified
= gimple_assign_rhs1 (stmt
);
1455 else if (code
== GIMPLE_SWITCH
)
1456 simplified
= gimple_switch_index (stmt
);
1458 /* These cannot satisfy is_gimple_min_invariant without folding. */
1459 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1462 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1464 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1466 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1468 fprintf (dump_file
, "which is likely ");
1469 switch (likelyvalue
)
1472 fprintf (dump_file
, "CONSTANT");
1475 fprintf (dump_file
, "UNDEFINED");
1478 fprintf (dump_file
, "VARYING");
1482 fprintf (dump_file
, "\n");
1487 /* The statement produced a constant value. */
1488 val
.lattice_val
= CONSTANT
;
1489 val
.value
= simplified
;
1493 /* The statement produced a nonconstant value. If the statement
1494 had UNDEFINED operands, then the result of the statement
1495 should be UNDEFINED. Otherwise, the statement is VARYING. */
1496 if (likelyvalue
== UNDEFINED
)
1497 val
.lattice_val
= likelyvalue
;
1499 val
.lattice_val
= VARYING
;
1501 val
.value
= NULL_TREE
;
1507 /* Fold the stmt at *GSI with CCP specific information that propagating
1508 and regular folding does not catch. */
1511 ccp_fold_stmt (gimple_stmt_iterator
*gsi
)
1513 gimple stmt
= gsi_stmt (*gsi
);
1515 switch (gimple_code (stmt
))
1520 /* Statement evaluation will handle type mismatches in constants
1521 more gracefully than the final propagation. This allows us to
1522 fold more conditionals here. */
1523 val
= evaluate_stmt (stmt
);
1524 if (val
.lattice_val
!= CONSTANT
1525 || TREE_CODE (val
.value
) != INTEGER_CST
)
1528 if (integer_zerop (val
.value
))
1529 gimple_cond_make_false (stmt
);
1531 gimple_cond_make_true (stmt
);
1538 tree lhs
= gimple_call_lhs (stmt
);
1541 bool changed
= false;
1544 /* If the call was folded into a constant make sure it goes
1545 away even if we cannot propagate into all uses because of
1548 && TREE_CODE (lhs
) == SSA_NAME
1549 && (val
= get_value (lhs
))
1550 && val
->lattice_val
== CONSTANT
)
1552 tree new_rhs
= unshare_expr (val
->value
);
1554 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
1555 TREE_TYPE (new_rhs
)))
1556 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1557 res
= update_call_from_tree (gsi
, new_rhs
);
1562 /* Propagate into the call arguments. Compared to replace_uses_in
1563 this can use the argument slot types for type verification
1564 instead of the current argument type. We also can safely
1565 drop qualifiers here as we are dealing with constants anyway. */
1566 argt
= TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt
))));
1567 for (i
= 0; i
< gimple_call_num_args (stmt
) && argt
;
1568 ++i
, argt
= TREE_CHAIN (argt
))
1570 tree arg
= gimple_call_arg (stmt
, i
);
1571 if (TREE_CODE (arg
) == SSA_NAME
1572 && (val
= get_value (arg
))
1573 && val
->lattice_val
== CONSTANT
1574 && useless_type_conversion_p
1575 (TYPE_MAIN_VARIANT (TREE_VALUE (argt
)),
1576 TYPE_MAIN_VARIANT (TREE_TYPE (val
->value
))))
1578 gimple_call_set_arg (stmt
, i
, unshare_expr (val
->value
));
1588 tree lhs
= gimple_assign_lhs (stmt
);
1591 /* If we have a load that turned out to be constant replace it
1592 as we cannot propagate into all uses in all cases. */
1593 if (gimple_assign_single_p (stmt
)
1594 && TREE_CODE (lhs
) == SSA_NAME
1595 && (val
= get_value (lhs
))
1596 && val
->lattice_val
== CONSTANT
)
1598 tree rhs
= unshare_expr (val
->value
);
1599 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
1600 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
1601 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
1613 /* Visit the assignment statement STMT. Set the value of its LHS to the
1614 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1615 creates virtual definitions, set the value of each new name to that
1616 of the RHS (if we can derive a constant out of the RHS).
1617 Value-returning call statements also perform an assignment, and
1618 are handled here. */
1620 static enum ssa_prop_result
1621 visit_assignment (gimple stmt
, tree
*output_p
)
1624 enum ssa_prop_result retval
;
1626 tree lhs
= gimple_get_lhs (stmt
);
1628 gcc_assert (gimple_code (stmt
) != GIMPLE_CALL
1629 || gimple_call_lhs (stmt
) != NULL_TREE
);
1631 if (gimple_assign_copy_p (stmt
))
1633 tree rhs
= gimple_assign_rhs1 (stmt
);
1635 if (TREE_CODE (rhs
) == SSA_NAME
)
1637 /* For a simple copy operation, we copy the lattice values. */
1638 prop_value_t
*nval
= get_value (rhs
);
1642 val
= evaluate_stmt (stmt
);
1645 /* Evaluate the statement, which could be
1646 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1647 val
= evaluate_stmt (stmt
);
1649 retval
= SSA_PROP_NOT_INTERESTING
;
1651 /* Set the lattice value of the statement's output. */
1652 if (TREE_CODE (lhs
) == SSA_NAME
)
1654 /* If STMT is an assignment to an SSA_NAME, we only have one
1656 if (set_lattice_value (lhs
, val
))
1659 if (val
.lattice_val
== VARYING
)
1660 retval
= SSA_PROP_VARYING
;
1662 retval
= SSA_PROP_INTERESTING
;
1670 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1671 if it can determine which edge will be taken. Otherwise, return
1672 SSA_PROP_VARYING. */
1674 static enum ssa_prop_result
1675 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
1680 block
= gimple_bb (stmt
);
1681 val
= evaluate_stmt (stmt
);
1683 /* Find which edge out of the conditional block will be taken and add it
1684 to the worklist. If no single edge can be determined statically,
1685 return SSA_PROP_VARYING to feed all the outgoing edges to the
1686 propagation engine. */
1687 *taken_edge_p
= val
.value
? find_taken_edge (block
, val
.value
) : 0;
1689 return SSA_PROP_INTERESTING
;
1691 return SSA_PROP_VARYING
;
1695 /* Evaluate statement STMT. If the statement produces an output value and
1696 its evaluation changes the lattice value of its output, return
1697 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1700 If STMT is a conditional branch and we can determine its truth
1701 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1702 value, return SSA_PROP_VARYING. */
1704 static enum ssa_prop_result
1705 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
1710 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1712 fprintf (dump_file
, "\nVisiting statement:\n");
1713 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1716 switch (gimple_code (stmt
))
1719 /* If the statement is an assignment that produces a single
1720 output value, evaluate its RHS to see if the lattice value of
1721 its output has changed. */
1722 return visit_assignment (stmt
, output_p
);
1725 /* A value-returning call also performs an assignment. */
1726 if (gimple_call_lhs (stmt
) != NULL_TREE
)
1727 return visit_assignment (stmt
, output_p
);
1732 /* If STMT is a conditional branch, see if we can determine
1733 which branch will be taken. */
1734 /* FIXME. It appears that we should be able to optimize
1735 computed GOTOs here as well. */
1736 return visit_cond_stmt (stmt
, taken_edge_p
);
1742 /* Any other kind of statement is not interesting for constant
1743 propagation and, therefore, not worth simulating. */
1744 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1745 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1747 /* Definitions made by statements other than assignments to
1748 SSA_NAMEs represent unknown modifications to their outputs.
1749 Mark them VARYING. */
1750 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1752 prop_value_t v
= { VARYING
, NULL_TREE
};
1753 set_lattice_value (def
, v
);
1756 return SSA_PROP_VARYING
;
1760 /* Main entry point for SSA Conditional Constant Propagation. */
1766 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1767 if (ccp_finalize ())
1768 return (TODO_cleanup_cfg
| TODO_update_ssa
| TODO_remove_unused_locals
);
1777 return flag_tree_ccp
!= 0;
1781 struct gimple_opt_pass pass_ccp
=
1786 gate_ccp
, /* gate */
1787 do_ssa_ccp
, /* execute */
1790 0, /* static_pass_number */
1791 TV_TREE_CCP
, /* tv_id */
1792 PROP_cfg
| PROP_ssa
, /* properties_required */
1793 0, /* properties_provided */
1794 0, /* properties_destroyed */
1795 0, /* todo_flags_start */
1796 TODO_dump_func
| TODO_verify_ssa
1797 | TODO_verify_stmts
| TODO_ggc_collect
/* todo_flags_finish */
1803 /* Try to optimize out __builtin_stack_restore. Optimize it out
1804 if there is another __builtin_stack_restore in the same basic
1805 block and no calls or ASM_EXPRs are in between, or if this block's
1806 only outgoing edge is to EXIT_BLOCK and there are no calls or
1807 ASM_EXPRs after this __builtin_stack_restore. */
1810 optimize_stack_restore (gimple_stmt_iterator i
)
1815 basic_block bb
= gsi_bb (i
);
1816 gimple call
= gsi_stmt (i
);
1818 if (gimple_code (call
) != GIMPLE_CALL
1819 || gimple_call_num_args (call
) != 1
1820 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
1821 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
1824 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
1826 stmt
= gsi_stmt (i
);
1827 if (gimple_code (stmt
) == GIMPLE_ASM
)
1829 if (gimple_code (stmt
) != GIMPLE_CALL
)
1832 callee
= gimple_call_fndecl (stmt
);
1834 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
1835 /* All regular builtins are ok, just obviously not alloca. */
1836 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
)
1839 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
1840 goto second_stack_restore
;
1846 /* Allow one successor of the exit block, or zero successors. */
1847 switch (EDGE_COUNT (bb
->succs
))
1852 if (single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR
)
1858 second_stack_restore
:
1860 /* If there's exactly one use, then zap the call to __builtin_stack_save.
1861 If there are multiple uses, then the last one should remove the call.
1862 In any case, whether the call to __builtin_stack_save can be removed
1863 or not is irrelevant to removing the call to __builtin_stack_restore. */
1864 if (has_single_use (gimple_call_arg (call
, 0)))
1866 gimple stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
1867 if (is_gimple_call (stack_save
))
1869 callee
= gimple_call_fndecl (stack_save
);
1871 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
1872 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
)
1874 gimple_stmt_iterator stack_save_gsi
;
1877 stack_save_gsi
= gsi_for_stmt (stack_save
);
1878 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
1879 update_call_from_tree (&stack_save_gsi
, rhs
);
1884 /* No effect, so the statement will be deleted. */
1885 return integer_zero_node
;
1888 /* If va_list type is a simple pointer and nothing special is needed,
1889 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
1890 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
1891 pointer assignment. */
1894 optimize_stdarg_builtin (gimple call
)
1896 tree callee
, lhs
, rhs
, cfun_va_list
;
1897 bool va_list_simple_ptr
;
1898 location_t loc
= gimple_location (call
);
1900 if (gimple_code (call
) != GIMPLE_CALL
)
1903 callee
= gimple_call_fndecl (call
);
1905 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
1906 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
1907 && (TREE_TYPE (cfun_va_list
) == void_type_node
1908 || TREE_TYPE (cfun_va_list
) == char_type_node
);
1910 switch (DECL_FUNCTION_CODE (callee
))
1912 case BUILT_IN_VA_START
:
1913 if (!va_list_simple_ptr
1914 || targetm
.expand_builtin_va_start
!= NULL
1915 || built_in_decls
[BUILT_IN_NEXT_ARG
] == NULL
)
1918 if (gimple_call_num_args (call
) != 2)
1921 lhs
= gimple_call_arg (call
, 0);
1922 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
1923 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
1924 != TYPE_MAIN_VARIANT (cfun_va_list
))
1927 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
1928 rhs
= build_call_expr_loc (loc
, built_in_decls
[BUILT_IN_NEXT_ARG
],
1929 1, integer_zero_node
);
1930 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
1931 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
1933 case BUILT_IN_VA_COPY
:
1934 if (!va_list_simple_ptr
)
1937 if (gimple_call_num_args (call
) != 2)
1940 lhs
= gimple_call_arg (call
, 0);
1941 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
1942 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
1943 != TYPE_MAIN_VARIANT (cfun_va_list
))
1946 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
1947 rhs
= gimple_call_arg (call
, 1);
1948 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
1949 != TYPE_MAIN_VARIANT (cfun_va_list
))
1952 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
1953 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
1955 case BUILT_IN_VA_END
:
1956 /* No effect, so the statement will be deleted. */
1957 return integer_zero_node
;
1964 /* A simple pass that attempts to fold all builtin functions. This pass
1965 is run after we've propagated as many constants as we can. */
1968 execute_fold_all_builtins (void)
1970 bool cfg_changed
= false;
1972 unsigned int todoflags
= 0;
1976 gimple_stmt_iterator i
;
1977 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1979 gimple stmt
, old_stmt
;
1980 tree callee
, result
;
1981 enum built_in_function fcode
;
1983 stmt
= gsi_stmt (i
);
1985 if (gimple_code (stmt
) != GIMPLE_CALL
)
1990 callee
= gimple_call_fndecl (stmt
);
1991 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
1996 fcode
= DECL_FUNCTION_CODE (callee
);
1998 result
= gimple_fold_builtin (stmt
);
2001 gimple_remove_stmt_histograms (cfun
, stmt
);
2004 switch (DECL_FUNCTION_CODE (callee
))
2006 case BUILT_IN_CONSTANT_P
:
2007 /* Resolve __builtin_constant_p. If it hasn't been
2008 folded to integer_one_node by now, it's fairly
2009 certain that the value simply isn't constant. */
2010 result
= integer_zero_node
;
2013 case BUILT_IN_STACK_RESTORE
:
2014 result
= optimize_stack_restore (i
);
2020 case BUILT_IN_VA_START
:
2021 case BUILT_IN_VA_END
:
2022 case BUILT_IN_VA_COPY
:
2023 /* These shouldn't be folded before pass_stdarg. */
2024 result
= optimize_stdarg_builtin (stmt
);
2034 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2036 fprintf (dump_file
, "Simplified\n ");
2037 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2041 if (!update_call_from_tree (&i
, result
))
2043 gimplify_and_update_call_from_tree (&i
, result
);
2044 todoflags
|= TODO_update_address_taken
;
2047 stmt
= gsi_stmt (i
);
2050 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
2051 && gimple_purge_dead_eh_edges (bb
))
2054 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2056 fprintf (dump_file
, "to\n ");
2057 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2058 fprintf (dump_file
, "\n");
2061 /* Retry the same statement if it changed into another
2062 builtin, there might be new opportunities now. */
2063 if (gimple_code (stmt
) != GIMPLE_CALL
)
2068 callee
= gimple_call_fndecl (stmt
);
2070 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
2071 || DECL_FUNCTION_CODE (callee
) == fcode
)
2076 /* Delete unreachable blocks. */
2078 todoflags
|= TODO_cleanup_cfg
;
2084 struct gimple_opt_pass pass_fold_builtins
=
2090 execute_fold_all_builtins
, /* execute */
2093 0, /* static_pass_number */
2094 TV_NONE
, /* tv_id */
2095 PROP_cfg
| PROP_ssa
, /* properties_required */
2096 0, /* properties_provided */
2097 0, /* properties_destroyed */
2098 0, /* todo_flags_start */
2101 | TODO_update_ssa
/* todo_flags_finish */