1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
195 #include "basic-block.h"
197 #include "function.h"
198 #include "tree-pretty-print.h"
199 #include "gimple-pretty-print.h"
201 #include "tree-dump.h"
202 #include "tree-flow.h"
203 #include "tree-pass.h"
204 #include "tree-ssa-propagate.h"
205 #include "value-prof.h"
206 #include "langhooks.h"
212 /* Possible lattice values. */
221 /* Array of propagated constant values. After propagation,
222 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
223 the constant is held in an SSA name representing a memory store
224 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
225 memory reference used to store (i.e., the LHS of the assignment
227 static prop_value_t
*const_val
;
229 static void canonicalize_float_value (prop_value_t
*);
230 static bool ccp_fold_stmt (gimple_stmt_iterator
*);
232 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
235 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
237 switch (val
.lattice_val
)
240 fprintf (outf
, "%sUNINITIALIZED", prefix
);
243 fprintf (outf
, "%sUNDEFINED", prefix
);
246 fprintf (outf
, "%sVARYING", prefix
);
249 fprintf (outf
, "%sCONSTANT ", prefix
);
250 print_generic_expr (outf
, val
.value
, dump_flags
);
258 /* Print lattice value VAL to stderr. */
260 void debug_lattice_value (prop_value_t val
);
263 debug_lattice_value (prop_value_t val
)
265 dump_lattice_value (stderr
, "", val
);
266 fprintf (stderr
, "\n");
270 /* Compute a default value for variable VAR and store it in the
271 CONST_VAL array. The following rules are used to get default
274 1- Global and static variables that are declared constant are
277 2- Any other value is considered UNDEFINED. This is useful when
278 considering PHI nodes. PHI arguments that are undefined do not
279 change the constant value of the PHI node, which allows for more
280 constants to be propagated.
282 3- Variables defined by statements other than assignments and PHI
283 nodes are considered VARYING.
285 4- Initial values of variables that are not GIMPLE registers are
286 considered VARYING. */
289 get_default_value (tree var
)
291 tree sym
= SSA_NAME_VAR (var
);
292 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
};
295 stmt
= SSA_NAME_DEF_STMT (var
);
297 if (gimple_nop_p (stmt
))
299 /* Variables defined by an empty statement are those used
300 before being initialized. If VAR is a local variable, we
301 can assume initially that it is UNDEFINED, otherwise we must
302 consider it VARYING. */
303 if (is_gimple_reg (sym
) && TREE_CODE (sym
) != PARM_DECL
)
304 val
.lattice_val
= UNDEFINED
;
306 val
.lattice_val
= VARYING
;
308 else if (is_gimple_assign (stmt
)
309 /* Value-returning GIMPLE_CALL statements assign to
310 a variable, and are treated similarly to GIMPLE_ASSIGN. */
311 || (is_gimple_call (stmt
)
312 && gimple_call_lhs (stmt
) != NULL_TREE
)
313 || gimple_code (stmt
) == GIMPLE_PHI
)
316 if (gimple_assign_single_p (stmt
)
317 && DECL_P (gimple_assign_rhs1 (stmt
))
318 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
320 val
.lattice_val
= CONSTANT
;
324 /* Any other variable defined by an assignment or a PHI node
325 is considered UNDEFINED. */
326 val
.lattice_val
= UNDEFINED
;
330 /* Otherwise, VAR will never take on a constant value. */
331 val
.lattice_val
= VARYING
;
338 /* Get the constant value associated with variable VAR. */
340 static inline prop_value_t
*
345 if (const_val
== NULL
)
348 val
= &const_val
[SSA_NAME_VERSION (var
)];
349 if (val
->lattice_val
== UNINITIALIZED
)
350 *val
= get_default_value (var
);
352 canonicalize_float_value (val
);
357 /* Sets the value associated with VAR to VARYING. */
360 set_value_varying (tree var
)
362 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
364 val
->lattice_val
= VARYING
;
365 val
->value
= NULL_TREE
;
368 /* For float types, modify the value of VAL to make ccp work correctly
369 for non-standard values (-0, NaN):
371 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
372 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
373 This is to fix the following problem (see PR 29921): Suppose we have
377 and we set value of y to NaN. This causes value of x to be set to NaN.
378 When we later determine that y is in fact VARYING, fold uses the fact
379 that HONOR_NANS is false, and we try to change the value of x to 0,
380 causing an ICE. With HONOR_NANS being false, the real appearance of
381 NaN would cause undefined behavior, though, so claiming that y (and x)
382 are UNDEFINED initially is correct. */
385 canonicalize_float_value (prop_value_t
*val
)
387 enum machine_mode mode
;
391 if (val
->lattice_val
!= CONSTANT
392 || TREE_CODE (val
->value
) != REAL_CST
)
395 d
= TREE_REAL_CST (val
->value
);
396 type
= TREE_TYPE (val
->value
);
397 mode
= TYPE_MODE (type
);
399 if (!HONOR_SIGNED_ZEROS (mode
)
400 && REAL_VALUE_MINUS_ZERO (d
))
402 val
->value
= build_real (type
, dconst0
);
406 if (!HONOR_NANS (mode
)
407 && REAL_VALUE_ISNAN (d
))
409 val
->lattice_val
= UNDEFINED
;
415 /* Set the value for variable VAR to NEW_VAL. Return true if the new
416 value is different from VAR's previous value. */
419 set_lattice_value (tree var
, prop_value_t new_val
)
421 prop_value_t
*old_val
= get_value (var
);
423 canonicalize_float_value (&new_val
);
425 /* Lattice transitions must always be monotonically increasing in
426 value. If *OLD_VAL and NEW_VAL are the same, return false to
427 inform the caller that this was a non-transition. */
429 gcc_assert (old_val
->lattice_val
< new_val
.lattice_val
430 || (old_val
->lattice_val
== new_val
.lattice_val
431 && ((!old_val
->value
&& !new_val
.value
)
432 || operand_equal_p (old_val
->value
, new_val
.value
, 0))));
434 if (old_val
->lattice_val
!= new_val
.lattice_val
)
436 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
438 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
439 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
444 gcc_assert (new_val
.lattice_val
!= UNDEFINED
);
452 /* Return the likely CCP lattice value for STMT.
454 If STMT has no operands, then return CONSTANT.
456 Else if undefinedness of operands of STMT cause its value to be
457 undefined, then return UNDEFINED.
459 Else if any operands of STMT are constants, then return CONSTANT.
461 Else return VARYING. */
464 likely_value (gimple stmt
)
466 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
471 enum gimple_code code
= gimple_code (stmt
);
473 /* This function appears to be called only for assignments, calls,
474 conditionals, and switches, due to the logic in visit_stmt. */
475 gcc_assert (code
== GIMPLE_ASSIGN
476 || code
== GIMPLE_CALL
477 || code
== GIMPLE_COND
478 || code
== GIMPLE_SWITCH
);
480 /* If the statement has volatile operands, it won't fold to a
482 if (gimple_has_volatile_ops (stmt
))
485 /* Arrive here for more complex cases. */
486 has_constant_operand
= false;
487 has_undefined_operand
= false;
488 all_undefined_operands
= true;
489 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
491 prop_value_t
*val
= get_value (use
);
493 if (val
->lattice_val
== UNDEFINED
)
494 has_undefined_operand
= true;
496 all_undefined_operands
= false;
498 if (val
->lattice_val
== CONSTANT
)
499 has_constant_operand
= true;
502 /* There may be constants in regular rhs operands. For calls we
503 have to ignore lhs, fndecl and static chain, otherwise only
505 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
506 i
< gimple_num_ops (stmt
); ++i
)
508 tree op
= gimple_op (stmt
, i
);
509 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
511 if (is_gimple_min_invariant (op
))
512 has_constant_operand
= true;
515 if (has_constant_operand
)
516 all_undefined_operands
= false;
518 /* If the operation combines operands like COMPLEX_EXPR make sure to
519 not mark the result UNDEFINED if only one part of the result is
521 if (has_undefined_operand
&& all_undefined_operands
)
523 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
525 switch (gimple_assign_rhs_code (stmt
))
527 /* Unary operators are handled with all_undefined_operands. */
530 case POINTER_PLUS_EXPR
:
531 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
532 Not bitwise operators, one VARYING operand may specify the
533 result completely. Not logical operators for the same reason.
534 Not COMPLEX_EXPR as one VARYING operand makes the result partly
535 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
536 the undefined operand may be promoted. */
543 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
544 fall back to VARYING even if there were CONSTANT operands. */
545 if (has_undefined_operand
)
548 /* We do not consider virtual operands here -- load from read-only
549 memory may have only VARYING virtual operands, but still be
551 if (has_constant_operand
552 || gimple_references_memory_p (stmt
))
558 /* Returns true if STMT cannot be constant. */
561 surely_varying_stmt_p (gimple stmt
)
563 /* If the statement has operands that we cannot handle, it cannot be
565 if (gimple_has_volatile_ops (stmt
))
568 /* If it is a call and does not return a value or is not a
569 builtin and not an indirect call, it is varying. */
570 if (is_gimple_call (stmt
))
573 if (!gimple_call_lhs (stmt
)
574 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
575 && !DECL_BUILT_IN (fndecl
)))
579 /* Any other store operation is not interesting. */
580 else if (gimple_vdef (stmt
))
583 /* Anything other than assignments and conditional jumps are not
584 interesting for CCP. */
585 if (gimple_code (stmt
) != GIMPLE_ASSIGN
586 && gimple_code (stmt
) != GIMPLE_COND
587 && gimple_code (stmt
) != GIMPLE_SWITCH
588 && gimple_code (stmt
) != GIMPLE_CALL
)
594 /* Initialize local data structures for CCP. */
597 ccp_initialize (void)
601 const_val
= XCNEWVEC (prop_value_t
, num_ssa_names
);
603 /* Initialize simulation flags for PHI nodes and statements. */
606 gimple_stmt_iterator i
;
608 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
610 gimple stmt
= gsi_stmt (i
);
613 /* If the statement is a control insn, then we do not
614 want to avoid simulating the statement once. Failure
615 to do so means that those edges will never get added. */
616 if (stmt_ends_bb_p (stmt
))
619 is_varying
= surely_varying_stmt_p (stmt
);
626 /* If the statement will not produce a constant, mark
627 all its outputs VARYING. */
628 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
629 set_value_varying (def
);
631 prop_set_simulate_again (stmt
, !is_varying
);
635 /* Now process PHI nodes. We never clear the simulate_again flag on
636 phi nodes, since we do not know which edges are executable yet,
637 except for phi nodes for virtual operands when we do not do store ccp. */
640 gimple_stmt_iterator i
;
642 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
644 gimple phi
= gsi_stmt (i
);
646 if (!is_gimple_reg (gimple_phi_result (phi
)))
647 prop_set_simulate_again (phi
, false);
649 prop_set_simulate_again (phi
, true);
654 /* Debug count support. Reset the values of ssa names
655 VARYING when the total number ssa names analyzed is
656 beyond the debug count specified. */
662 for (i
= 0; i
< num_ssa_names
; i
++)
666 const_val
[i
].lattice_val
= VARYING
;
667 const_val
[i
].value
= NULL_TREE
;
673 /* Do final substitution of propagated values, cleanup the flowgraph and
674 free allocated storage.
676 Return TRUE when something was optimized. */
681 bool something_changed
;
684 /* Perform substitutions based on the known constant values. */
685 something_changed
= substitute_and_fold (const_val
, ccp_fold_stmt
);
689 return something_changed
;;
693 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
696 any M UNDEFINED = any
697 any M VARYING = VARYING
698 Ci M Cj = Ci if (i == j)
699 Ci M Cj = VARYING if (i != j)
703 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
705 if (val1
->lattice_val
== UNDEFINED
)
707 /* UNDEFINED M any = any */
710 else if (val2
->lattice_val
== UNDEFINED
)
712 /* any M UNDEFINED = any
713 Nothing to do. VAL1 already contains the value we want. */
716 else if (val1
->lattice_val
== VARYING
717 || val2
->lattice_val
== VARYING
)
719 /* any M VARYING = VARYING. */
720 val1
->lattice_val
= VARYING
;
721 val1
->value
= NULL_TREE
;
723 else if (val1
->lattice_val
== CONSTANT
724 && val2
->lattice_val
== CONSTANT
725 && simple_cst_equal (val1
->value
, val2
->value
) == 1)
727 /* Ci M Cj = Ci if (i == j)
728 Ci M Cj = VARYING if (i != j)
730 If these two values come from memory stores, make sure that
731 they come from the same memory reference. */
732 val1
->lattice_val
= CONSTANT
;
733 val1
->value
= val1
->value
;
737 /* Any other combination is VARYING. */
738 val1
->lattice_val
= VARYING
;
739 val1
->value
= NULL_TREE
;
744 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
745 lattice values to determine PHI_NODE's lattice value. The value of a
746 PHI node is determined calling ccp_lattice_meet with all the arguments
747 of the PHI node that are incoming via executable edges. */
749 static enum ssa_prop_result
750 ccp_visit_phi_node (gimple phi
)
753 prop_value_t
*old_val
, new_val
;
755 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
757 fprintf (dump_file
, "\nVisiting PHI node: ");
758 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
761 old_val
= get_value (gimple_phi_result (phi
));
762 switch (old_val
->lattice_val
)
765 return SSA_PROP_VARYING
;
772 new_val
.lattice_val
= UNDEFINED
;
773 new_val
.value
= NULL_TREE
;
780 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
782 /* Compute the meet operator over all the PHI arguments flowing
783 through executable edges. */
784 edge e
= gimple_phi_arg_edge (phi
, i
);
786 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
789 "\n Argument #%d (%d -> %d %sexecutable)\n",
790 i
, e
->src
->index
, e
->dest
->index
,
791 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
794 /* If the incoming edge is executable, Compute the meet operator for
795 the existing value of the PHI node and the current PHI argument. */
796 if (e
->flags
& EDGE_EXECUTABLE
)
798 tree arg
= gimple_phi_arg (phi
, i
)->def
;
799 prop_value_t arg_val
;
801 if (is_gimple_min_invariant (arg
))
803 arg_val
.lattice_val
= CONSTANT
;
807 arg_val
= *(get_value (arg
));
809 ccp_lattice_meet (&new_val
, &arg_val
);
811 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
813 fprintf (dump_file
, "\t");
814 print_generic_expr (dump_file
, arg
, dump_flags
);
815 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
816 fprintf (dump_file
, "\n");
819 if (new_val
.lattice_val
== VARYING
)
824 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
826 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
827 fprintf (dump_file
, "\n\n");
830 /* Make the transition to the new value. */
831 if (set_lattice_value (gimple_phi_result (phi
), new_val
))
833 if (new_val
.lattice_val
== VARYING
)
834 return SSA_PROP_VARYING
;
836 return SSA_PROP_INTERESTING
;
839 return SSA_PROP_NOT_INTERESTING
;
842 /* CCP specific front-end to the non-destructive constant folding
845 Attempt to simplify the RHS of STMT knowing that one or more
846 operands are constants.
848 If simplification is possible, return the simplified RHS,
849 otherwise return the original RHS or NULL_TREE. */
852 ccp_fold (gimple stmt
)
854 location_t loc
= gimple_location (stmt
);
855 switch (gimple_code (stmt
))
859 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
861 switch (get_gimple_rhs_class (subcode
))
863 case GIMPLE_SINGLE_RHS
:
865 tree rhs
= gimple_assign_rhs1 (stmt
);
866 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
868 if (TREE_CODE (rhs
) == SSA_NAME
)
870 /* If the RHS is an SSA_NAME, return its known constant value,
872 return get_value (rhs
)->value
;
874 /* Handle propagating invariant addresses into address operations.
875 The folding we do here matches that in tree-ssa-forwprop.c. */
876 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
879 base
= &TREE_OPERAND (rhs
, 0);
880 while (handled_component_p (*base
))
881 base
= &TREE_OPERAND (*base
, 0);
882 if (TREE_CODE (*base
) == INDIRECT_REF
883 && TREE_CODE (TREE_OPERAND (*base
, 0)) == SSA_NAME
)
885 prop_value_t
*val
= get_value (TREE_OPERAND (*base
, 0));
886 if (val
->lattice_val
== CONSTANT
887 && TREE_CODE (val
->value
) == ADDR_EXPR
888 && may_propagate_address_into_dereference
891 /* We need to return a new tree, not modify the IL
892 or share parts of it. So play some tricks to
893 avoid manually building it. */
894 tree ret
, save
= *base
;
895 *base
= TREE_OPERAND (val
->value
, 0);
896 ret
= unshare_expr (rhs
);
897 recompute_tree_invariant_for_addr_expr (ret
);
903 else if (TREE_CODE (rhs
) == CONSTRUCTOR
904 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
905 && (CONSTRUCTOR_NELTS (rhs
)
906 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
912 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
914 if (TREE_CODE (val
) == SSA_NAME
915 && get_value (val
)->lattice_val
== CONSTANT
)
916 val
= get_value (val
)->value
;
917 if (TREE_CODE (val
) == INTEGER_CST
918 || TREE_CODE (val
) == REAL_CST
919 || TREE_CODE (val
) == FIXED_CST
)
920 list
= tree_cons (NULL_TREE
, val
, list
);
925 return build_vector (TREE_TYPE (rhs
), nreverse (list
));
928 if (kind
== tcc_reference
)
930 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
931 || TREE_CODE (rhs
) == REALPART_EXPR
932 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
933 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
935 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
936 if (val
->lattice_val
== CONSTANT
)
937 return fold_unary_loc (EXPR_LOCATION (rhs
),
939 TREE_TYPE (rhs
), val
->value
);
941 else if (TREE_CODE (rhs
) == INDIRECT_REF
942 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
944 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
945 if (val
->lattice_val
== CONSTANT
946 && TREE_CODE (val
->value
) == ADDR_EXPR
947 && useless_type_conversion_p (TREE_TYPE (rhs
),
948 TREE_TYPE (TREE_TYPE (val
->value
))))
949 rhs
= TREE_OPERAND (val
->value
, 0);
951 return fold_const_aggregate_ref (rhs
);
953 else if (kind
== tcc_declaration
)
954 return get_symbol_constant_value (rhs
);
958 case GIMPLE_UNARY_RHS
:
960 /* Handle unary operators that can appear in GIMPLE form.
961 Note that we know the single operand must be a constant,
962 so this should almost always return a simplified RHS. */
963 tree lhs
= gimple_assign_lhs (stmt
);
964 tree op0
= gimple_assign_rhs1 (stmt
);
966 /* Simplify the operand down to a constant. */
967 if (TREE_CODE (op0
) == SSA_NAME
)
969 prop_value_t
*val
= get_value (op0
);
970 if (val
->lattice_val
== CONSTANT
)
971 op0
= get_value (op0
)->value
;
974 /* Conversions are useless for CCP purposes if they are
975 value-preserving. Thus the restrictions that
976 useless_type_conversion_p places for pointer type conversions
977 do not apply here. Substitution later will only substitute to
979 if (CONVERT_EXPR_CODE_P (subcode
)
980 && POINTER_TYPE_P (TREE_TYPE (lhs
))
981 && POINTER_TYPE_P (TREE_TYPE (op0
))
982 /* Do not allow differences in volatile qualification
983 as this might get us confused as to whether a
984 propagation destination statement is volatile
985 or not. See PR36988. */
986 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs
)))
987 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0
)))))
990 /* Still try to generate a constant of correct type. */
991 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
993 && ((tem
= maybe_fold_offset_to_address
995 op0
, integer_zero_node
, TREE_TYPE (lhs
)))
1002 fold_unary_ignore_overflow_loc (loc
, subcode
,
1003 gimple_expr_type (stmt
), op0
);
1006 case GIMPLE_BINARY_RHS
:
1008 /* Handle binary operators that can appear in GIMPLE form. */
1009 tree op0
= gimple_assign_rhs1 (stmt
);
1010 tree op1
= gimple_assign_rhs2 (stmt
);
1012 /* Simplify the operands down to constants when appropriate. */
1013 if (TREE_CODE (op0
) == SSA_NAME
)
1015 prop_value_t
*val
= get_value (op0
);
1016 if (val
->lattice_val
== CONSTANT
)
1020 if (TREE_CODE (op1
) == SSA_NAME
)
1022 prop_value_t
*val
= get_value (op1
);
1023 if (val
->lattice_val
== CONSTANT
)
1027 /* Fold &foo + CST into an invariant reference if possible. */
1028 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1029 && TREE_CODE (op0
) == ADDR_EXPR
1030 && TREE_CODE (op1
) == INTEGER_CST
)
1032 tree tem
= maybe_fold_offset_to_address
1033 (loc
, op0
, op1
, TREE_TYPE (op0
));
1034 if (tem
!= NULL_TREE
)
1038 return fold_binary_loc (loc
, subcode
,
1039 gimple_expr_type (stmt
), op0
, op1
);
1050 tree fn
= gimple_call_fn (stmt
);
1053 if (TREE_CODE (fn
) == SSA_NAME
)
1055 val
= get_value (fn
);
1056 if (val
->lattice_val
== CONSTANT
)
1059 if (TREE_CODE (fn
) == ADDR_EXPR
1060 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1061 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
1063 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
1066 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1068 args
[i
] = gimple_call_arg (stmt
, i
);
1069 if (TREE_CODE (args
[i
]) == SSA_NAME
)
1071 val
= get_value (args
[i
]);
1072 if (val
->lattice_val
== CONSTANT
)
1073 args
[i
] = val
->value
;
1076 call
= build_call_array_loc (loc
,
1077 gimple_call_return_type (stmt
),
1078 fn
, gimple_call_num_args (stmt
), args
);
1079 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
1081 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1082 STRIP_NOPS (retval
);
1090 /* Handle comparison operators that can appear in GIMPLE form. */
1091 tree op0
= gimple_cond_lhs (stmt
);
1092 tree op1
= gimple_cond_rhs (stmt
);
1093 enum tree_code code
= gimple_cond_code (stmt
);
1095 /* Simplify the operands down to constants when appropriate. */
1096 if (TREE_CODE (op0
) == SSA_NAME
)
1098 prop_value_t
*val
= get_value (op0
);
1099 if (val
->lattice_val
== CONSTANT
)
1103 if (TREE_CODE (op1
) == SSA_NAME
)
1105 prop_value_t
*val
= get_value (op1
);
1106 if (val
->lattice_val
== CONSTANT
)
1110 return fold_binary_loc (loc
, code
, boolean_type_node
, op0
, op1
);
1115 tree rhs
= gimple_switch_index (stmt
);
1117 if (TREE_CODE (rhs
) == SSA_NAME
)
1119 /* If the RHS is an SSA_NAME, return its known constant value,
1121 return get_value (rhs
)->value
;
1133 /* Return the tree representing the element referenced by T if T is an
1134 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1135 NULL_TREE otherwise. */
1138 fold_const_aggregate_ref (tree t
)
1140 prop_value_t
*value
;
1141 tree base
, ctor
, idx
, field
;
1142 unsigned HOST_WIDE_INT cnt
;
1145 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
1146 return get_symbol_constant_value (t
);
1148 switch (TREE_CODE (t
))
1151 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1152 DECL_INITIAL. If BASE is a nested reference into another
1153 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1154 the inner reference. */
1155 base
= TREE_OPERAND (t
, 0);
1156 switch (TREE_CODE (base
))
1159 if (!TREE_READONLY (base
)
1160 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
1161 || !targetm
.binds_local_p (base
))
1164 ctor
= DECL_INITIAL (base
);
1169 ctor
= fold_const_aggregate_ref (base
);
1181 if (ctor
== NULL_TREE
1182 || (TREE_CODE (ctor
) != CONSTRUCTOR
1183 && TREE_CODE (ctor
) != STRING_CST
)
1184 || !TREE_STATIC (ctor
))
1187 /* Get the index. If we have an SSA_NAME, try to resolve it
1188 with the current lattice value for the SSA_NAME. */
1189 idx
= TREE_OPERAND (t
, 1);
1190 switch (TREE_CODE (idx
))
1193 if ((value
= get_value (idx
))
1194 && value
->lattice_val
== CONSTANT
1195 && TREE_CODE (value
->value
) == INTEGER_CST
)
1208 /* Fold read from constant string. */
1209 if (TREE_CODE (ctor
) == STRING_CST
)
1211 if ((TYPE_MODE (TREE_TYPE (t
))
1212 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1213 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1215 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
1216 && compare_tree_int (idx
, TREE_STRING_LENGTH (ctor
)) < 0)
1217 return build_int_cst_type (TREE_TYPE (t
),
1218 (TREE_STRING_POINTER (ctor
)
1219 [TREE_INT_CST_LOW (idx
)]));
1223 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1224 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1225 if (tree_int_cst_equal (cfield
, idx
))
1228 if (TREE_CODE (cval
) == ADDR_EXPR
)
1230 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1231 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1232 add_referenced_var (base
);
1239 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1240 DECL_INITIAL. If BASE is a nested reference into another
1241 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1242 the inner reference. */
1243 base
= TREE_OPERAND (t
, 0);
1244 switch (TREE_CODE (base
))
1247 if (!TREE_READONLY (base
)
1248 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1249 || !targetm
.binds_local_p (base
))
1252 ctor
= DECL_INITIAL (base
);
1257 ctor
= fold_const_aggregate_ref (base
);
1264 if (ctor
== NULL_TREE
1265 || TREE_CODE (ctor
) != CONSTRUCTOR
1266 || !TREE_STATIC (ctor
))
1269 field
= TREE_OPERAND (t
, 1);
1271 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1273 /* FIXME: Handle bit-fields. */
1274 && ! DECL_BIT_FIELD (cfield
))
1277 if (TREE_CODE (cval
) == ADDR_EXPR
)
1279 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1280 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1281 add_referenced_var (base
);
1290 tree c
= fold_const_aggregate_ref (TREE_OPERAND (t
, 0));
1291 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
1292 return fold_build1_loc (EXPR_LOCATION (t
),
1293 TREE_CODE (t
), TREE_TYPE (t
), c
);
1299 tree base
= TREE_OPERAND (t
, 0);
1300 if (TREE_CODE (base
) == SSA_NAME
1301 && (value
= get_value (base
))
1302 && value
->lattice_val
== CONSTANT
1303 && TREE_CODE (value
->value
) == ADDR_EXPR
1304 && useless_type_conversion_p (TREE_TYPE (t
),
1305 TREE_TYPE (TREE_TYPE (value
->value
))))
1306 return fold_const_aggregate_ref (TREE_OPERAND (value
->value
, 0));
1317 /* Evaluate statement STMT.
1318 Valid only for assignments, calls, conditionals, and switches. */
1321 evaluate_stmt (gimple stmt
)
1324 tree simplified
= NULL_TREE
;
1325 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1328 fold_defer_overflow_warnings ();
1330 /* If the statement is likely to have a CONSTANT result, then try
1331 to fold the statement to determine the constant value. */
1332 /* FIXME. This is the only place that we call ccp_fold.
1333 Since likely_value never returns CONSTANT for calls, we will
1334 not attempt to fold them, including builtins that may profit. */
1335 if (likelyvalue
== CONSTANT
)
1336 simplified
= ccp_fold (stmt
);
1337 /* If the statement is likely to have a VARYING result, then do not
1338 bother folding the statement. */
1339 else if (likelyvalue
== VARYING
)
1341 enum gimple_code code
= gimple_code (stmt
);
1342 if (code
== GIMPLE_ASSIGN
)
1344 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1346 /* Other cases cannot satisfy is_gimple_min_invariant
1348 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1349 simplified
= gimple_assign_rhs1 (stmt
);
1351 else if (code
== GIMPLE_SWITCH
)
1352 simplified
= gimple_switch_index (stmt
);
1354 /* These cannot satisfy is_gimple_min_invariant without folding. */
1355 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1358 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1360 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1362 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1364 fprintf (dump_file
, "which is likely ");
1365 switch (likelyvalue
)
1368 fprintf (dump_file
, "CONSTANT");
1371 fprintf (dump_file
, "UNDEFINED");
1374 fprintf (dump_file
, "VARYING");
1378 fprintf (dump_file
, "\n");
1383 /* The statement produced a constant value. */
1384 val
.lattice_val
= CONSTANT
;
1385 val
.value
= simplified
;
1389 /* The statement produced a nonconstant value. If the statement
1390 had UNDEFINED operands, then the result of the statement
1391 should be UNDEFINED. Otherwise, the statement is VARYING. */
1392 if (likelyvalue
== UNDEFINED
)
1393 val
.lattice_val
= likelyvalue
;
1395 val
.lattice_val
= VARYING
;
1397 val
.value
= NULL_TREE
;
1403 /* Fold the stmt at *GSI with CCP specific information that propagating
1404 and regular folding does not catch. */
1407 ccp_fold_stmt (gimple_stmt_iterator
*gsi
)
1409 gimple stmt
= gsi_stmt (*gsi
);
1411 switch (gimple_code (stmt
))
1416 /* Statement evaluation will handle type mismatches in constants
1417 more gracefully than the final propagation. This allows us to
1418 fold more conditionals here. */
1419 val
= evaluate_stmt (stmt
);
1420 if (val
.lattice_val
!= CONSTANT
1421 || TREE_CODE (val
.value
) != INTEGER_CST
)
1424 if (integer_zerop (val
.value
))
1425 gimple_cond_make_false (stmt
);
1427 gimple_cond_make_true (stmt
);
1434 tree lhs
= gimple_call_lhs (stmt
);
1437 bool changed
= false;
1440 /* If the call was folded into a constant make sure it goes
1441 away even if we cannot propagate into all uses because of
1444 && TREE_CODE (lhs
) == SSA_NAME
1445 && (val
= get_value (lhs
))
1446 && val
->lattice_val
== CONSTANT
)
1448 tree new_rhs
= unshare_expr (val
->value
);
1450 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
1451 TREE_TYPE (new_rhs
)))
1452 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1453 res
= update_call_from_tree (gsi
, new_rhs
);
1458 /* Propagate into the call arguments. Compared to replace_uses_in
1459 this can use the argument slot types for type verification
1460 instead of the current argument type. We also can safely
1461 drop qualifiers here as we are dealing with constants anyway. */
1462 argt
= TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt
))));
1463 for (i
= 0; i
< gimple_call_num_args (stmt
) && argt
;
1464 ++i
, argt
= TREE_CHAIN (argt
))
1466 tree arg
= gimple_call_arg (stmt
, i
);
1467 if (TREE_CODE (arg
) == SSA_NAME
1468 && (val
= get_value (arg
))
1469 && val
->lattice_val
== CONSTANT
1470 && useless_type_conversion_p
1471 (TYPE_MAIN_VARIANT (TREE_VALUE (argt
)),
1472 TYPE_MAIN_VARIANT (TREE_TYPE (val
->value
))))
1474 gimple_call_set_arg (stmt
, i
, unshare_expr (val
->value
));
1484 tree lhs
= gimple_assign_lhs (stmt
);
1487 /* If we have a load that turned out to be constant replace it
1488 as we cannot propagate into all uses in all cases. */
1489 if (gimple_assign_single_p (stmt
)
1490 && TREE_CODE (lhs
) == SSA_NAME
1491 && (val
= get_value (lhs
))
1492 && val
->lattice_val
== CONSTANT
)
1494 tree rhs
= unshare_expr (val
->value
);
1495 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
1496 rhs
= fold_convert (TREE_TYPE (lhs
), rhs
);
1497 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
1509 /* Visit the assignment statement STMT. Set the value of its LHS to the
1510 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1511 creates virtual definitions, set the value of each new name to that
1512 of the RHS (if we can derive a constant out of the RHS).
1513 Value-returning call statements also perform an assignment, and
1514 are handled here. */
1516 static enum ssa_prop_result
1517 visit_assignment (gimple stmt
, tree
*output_p
)
1520 enum ssa_prop_result retval
;
1522 tree lhs
= gimple_get_lhs (stmt
);
1524 gcc_assert (gimple_code (stmt
) != GIMPLE_CALL
1525 || gimple_call_lhs (stmt
) != NULL_TREE
);
1527 if (gimple_assign_copy_p (stmt
))
1529 tree rhs
= gimple_assign_rhs1 (stmt
);
1531 if (TREE_CODE (rhs
) == SSA_NAME
)
1533 /* For a simple copy operation, we copy the lattice values. */
1534 prop_value_t
*nval
= get_value (rhs
);
1538 val
= evaluate_stmt (stmt
);
1541 /* Evaluate the statement, which could be
1542 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1543 val
= evaluate_stmt (stmt
);
1545 retval
= SSA_PROP_NOT_INTERESTING
;
1547 /* Set the lattice value of the statement's output. */
1548 if (TREE_CODE (lhs
) == SSA_NAME
)
1550 /* If STMT is an assignment to an SSA_NAME, we only have one
1552 if (set_lattice_value (lhs
, val
))
1555 if (val
.lattice_val
== VARYING
)
1556 retval
= SSA_PROP_VARYING
;
1558 retval
= SSA_PROP_INTERESTING
;
1566 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1567 if it can determine which edge will be taken. Otherwise, return
1568 SSA_PROP_VARYING. */
1570 static enum ssa_prop_result
1571 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
1576 block
= gimple_bb (stmt
);
1577 val
= evaluate_stmt (stmt
);
1579 /* Find which edge out of the conditional block will be taken and add it
1580 to the worklist. If no single edge can be determined statically,
1581 return SSA_PROP_VARYING to feed all the outgoing edges to the
1582 propagation engine. */
1583 *taken_edge_p
= val
.value
? find_taken_edge (block
, val
.value
) : 0;
1585 return SSA_PROP_INTERESTING
;
1587 return SSA_PROP_VARYING
;
1591 /* Evaluate statement STMT. If the statement produces an output value and
1592 its evaluation changes the lattice value of its output, return
1593 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1596 If STMT is a conditional branch and we can determine its truth
1597 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1598 value, return SSA_PROP_VARYING. */
1600 static enum ssa_prop_result
1601 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
1606 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1608 fprintf (dump_file
, "\nVisiting statement:\n");
1609 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1612 switch (gimple_code (stmt
))
1615 /* If the statement is an assignment that produces a single
1616 output value, evaluate its RHS to see if the lattice value of
1617 its output has changed. */
1618 return visit_assignment (stmt
, output_p
);
1621 /* A value-returning call also performs an assignment. */
1622 if (gimple_call_lhs (stmt
) != NULL_TREE
)
1623 return visit_assignment (stmt
, output_p
);
1628 /* If STMT is a conditional branch, see if we can determine
1629 which branch will be taken. */
1630 /* FIXME. It appears that we should be able to optimize
1631 computed GOTOs here as well. */
1632 return visit_cond_stmt (stmt
, taken_edge_p
);
1638 /* Any other kind of statement is not interesting for constant
1639 propagation and, therefore, not worth simulating. */
1640 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1641 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1643 /* Definitions made by statements other than assignments to
1644 SSA_NAMEs represent unknown modifications to their outputs.
1645 Mark them VARYING. */
1646 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1648 prop_value_t v
= { VARYING
, NULL_TREE
};
1649 set_lattice_value (def
, v
);
1652 return SSA_PROP_VARYING
;
1656 /* Main entry point for SSA Conditional Constant Propagation. */
1662 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1663 if (ccp_finalize ())
1664 return (TODO_cleanup_cfg
| TODO_update_ssa
| TODO_remove_unused_locals
);
1673 return flag_tree_ccp
!= 0;
1677 struct gimple_opt_pass pass_ccp
=
1682 gate_ccp
, /* gate */
1683 do_ssa_ccp
, /* execute */
1686 0, /* static_pass_number */
1687 TV_TREE_CCP
, /* tv_id */
1688 PROP_cfg
| PROP_ssa
, /* properties_required */
1689 0, /* properties_provided */
1690 0, /* properties_destroyed */
1691 0, /* todo_flags_start */
1692 TODO_dump_func
| TODO_verify_ssa
1693 | TODO_verify_stmts
| TODO_ggc_collect
/* todo_flags_finish */
1699 /* Try to optimize out __builtin_stack_restore. Optimize it out
1700 if there is another __builtin_stack_restore in the same basic
1701 block and no calls or ASM_EXPRs are in between, or if this block's
1702 only outgoing edge is to EXIT_BLOCK and there are no calls or
1703 ASM_EXPRs after this __builtin_stack_restore. */
1706 optimize_stack_restore (gimple_stmt_iterator i
)
1711 basic_block bb
= gsi_bb (i
);
1712 gimple call
= gsi_stmt (i
);
1714 if (gimple_code (call
) != GIMPLE_CALL
1715 || gimple_call_num_args (call
) != 1
1716 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
1717 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
1720 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
1722 stmt
= gsi_stmt (i
);
1723 if (gimple_code (stmt
) == GIMPLE_ASM
)
1725 if (gimple_code (stmt
) != GIMPLE_CALL
)
1728 callee
= gimple_call_fndecl (stmt
);
1730 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
1731 /* All regular builtins are ok, just obviously not alloca. */
1732 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
)
1735 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
1736 goto second_stack_restore
;
1742 /* Allow one successor of the exit block, or zero successors. */
1743 switch (EDGE_COUNT (bb
->succs
))
1748 if (single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR
)
1754 second_stack_restore
:
1756 /* If there's exactly one use, then zap the call to __builtin_stack_save.
1757 If there are multiple uses, then the last one should remove the call.
1758 In any case, whether the call to __builtin_stack_save can be removed
1759 or not is irrelevant to removing the call to __builtin_stack_restore. */
1760 if (has_single_use (gimple_call_arg (call
, 0)))
1762 gimple stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
1763 if (is_gimple_call (stack_save
))
1765 callee
= gimple_call_fndecl (stack_save
);
1767 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
1768 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
)
1770 gimple_stmt_iterator stack_save_gsi
;
1773 stack_save_gsi
= gsi_for_stmt (stack_save
);
1774 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
1775 update_call_from_tree (&stack_save_gsi
, rhs
);
1780 /* No effect, so the statement will be deleted. */
1781 return integer_zero_node
;
1784 /* If va_list type is a simple pointer and nothing special is needed,
1785 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
1786 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
1787 pointer assignment. */
1790 optimize_stdarg_builtin (gimple call
)
1792 tree callee
, lhs
, rhs
, cfun_va_list
;
1793 bool va_list_simple_ptr
;
1794 location_t loc
= gimple_location (call
);
1796 if (gimple_code (call
) != GIMPLE_CALL
)
1799 callee
= gimple_call_fndecl (call
);
1801 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
1802 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
1803 && (TREE_TYPE (cfun_va_list
) == void_type_node
1804 || TREE_TYPE (cfun_va_list
) == char_type_node
);
1806 switch (DECL_FUNCTION_CODE (callee
))
1808 case BUILT_IN_VA_START
:
1809 if (!va_list_simple_ptr
1810 || targetm
.expand_builtin_va_start
!= NULL
1811 || built_in_decls
[BUILT_IN_NEXT_ARG
] == NULL
)
1814 if (gimple_call_num_args (call
) != 2)
1817 lhs
= gimple_call_arg (call
, 0);
1818 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
1819 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
1820 != TYPE_MAIN_VARIANT (cfun_va_list
))
1823 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
1824 rhs
= build_call_expr_loc (loc
, built_in_decls
[BUILT_IN_NEXT_ARG
],
1825 1, integer_zero_node
);
1826 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
1827 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
1829 case BUILT_IN_VA_COPY
:
1830 if (!va_list_simple_ptr
)
1833 if (gimple_call_num_args (call
) != 2)
1836 lhs
= gimple_call_arg (call
, 0);
1837 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
1838 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
1839 != TYPE_MAIN_VARIANT (cfun_va_list
))
1842 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
1843 rhs
= gimple_call_arg (call
, 1);
1844 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
1845 != TYPE_MAIN_VARIANT (cfun_va_list
))
1848 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
1849 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
1851 case BUILT_IN_VA_END
:
1852 /* No effect, so the statement will be deleted. */
1853 return integer_zero_node
;
1860 /* A simple pass that attempts to fold all builtin functions. This pass
1861 is run after we've propagated as many constants as we can. */
1864 execute_fold_all_builtins (void)
1866 bool cfg_changed
= false;
1868 unsigned int todoflags
= 0;
1872 gimple_stmt_iterator i
;
1873 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1875 gimple stmt
, old_stmt
;
1876 tree callee
, result
;
1877 enum built_in_function fcode
;
1879 stmt
= gsi_stmt (i
);
1881 if (gimple_code (stmt
) != GIMPLE_CALL
)
1886 callee
= gimple_call_fndecl (stmt
);
1887 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
1892 fcode
= DECL_FUNCTION_CODE (callee
);
1894 result
= gimple_fold_builtin (stmt
);
1897 gimple_remove_stmt_histograms (cfun
, stmt
);
1900 switch (DECL_FUNCTION_CODE (callee
))
1902 case BUILT_IN_CONSTANT_P
:
1903 /* Resolve __builtin_constant_p. If it hasn't been
1904 folded to integer_one_node by now, it's fairly
1905 certain that the value simply isn't constant. */
1906 result
= integer_zero_node
;
1909 case BUILT_IN_STACK_RESTORE
:
1910 result
= optimize_stack_restore (i
);
1916 case BUILT_IN_VA_START
:
1917 case BUILT_IN_VA_END
:
1918 case BUILT_IN_VA_COPY
:
1919 /* These shouldn't be folded before pass_stdarg. */
1920 result
= optimize_stdarg_builtin (stmt
);
1930 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1932 fprintf (dump_file
, "Simplified\n ");
1933 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1937 if (!update_call_from_tree (&i
, result
))
1939 gimplify_and_update_call_from_tree (&i
, result
);
1940 todoflags
|= TODO_update_address_taken
;
1943 stmt
= gsi_stmt (i
);
1946 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
1947 && gimple_purge_dead_eh_edges (bb
))
1950 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1952 fprintf (dump_file
, "to\n ");
1953 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1954 fprintf (dump_file
, "\n");
1957 /* Retry the same statement if it changed into another
1958 builtin, there might be new opportunities now. */
1959 if (gimple_code (stmt
) != GIMPLE_CALL
)
1964 callee
= gimple_call_fndecl (stmt
);
1966 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
1967 || DECL_FUNCTION_CODE (callee
) == fcode
)
1972 /* Delete unreachable blocks. */
1974 todoflags
|= TODO_cleanup_cfg
;
1980 struct gimple_opt_pass pass_fold_builtins
=
1986 execute_fold_all_builtins
, /* execute */
1989 0, /* static_pass_number */
1990 TV_NONE
, /* tv_id */
1991 PROP_cfg
| PROP_ssa
, /* properties_required */
1992 0, /* properties_provided */
1993 0, /* properties_destroyed */
1994 0, /* todo_flags_start */
1997 | TODO_update_ssa
/* todo_flags_finish */