1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 2, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* Conditional constant propagation.
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
38 #include "coretypes.h"
43 #include "langhooks.h"
45 /* These RTL headers are needed for basic-block.h. */
48 #include "hard-reg-set.h"
49 #include "basic-block.h"
51 #include "diagnostic.h"
52 #include "tree-inline.h"
53 #include "tree-flow.h"
54 #include "tree-gimple.h"
55 #include "tree-dump.h"
56 #include "tree-pass.h"
62 /* Possible lattice values. */
71 /* Use the TREE_VISITED bitflag to mark statements and PHI nodes that have
72 been deemed VARYING and shouldn't be simulated again. */
73 #define DONT_SIMULATE_AGAIN(T) TREE_VISITED (T)
75 /* Main structure for CCP. Contains the lattice value and, if it's a
76 constant, the constant value. */
79 latticevalue lattice_val
;
83 /* A bitmap to keep track of executable blocks in the CFG. */
84 static sbitmap executable_blocks
;
86 /* Array of control flow edges on the worklist. */
87 static GTY(()) varray_type cfg_blocks
= NULL
;
89 static unsigned int cfg_blocks_num
= 0;
90 static int cfg_blocks_tail
;
91 static int cfg_blocks_head
;
93 static sbitmap bb_in_list
;
95 /* This is used to track the current value of each variable. */
96 static value
*value_vector
;
98 /* Worklist of SSA edges which will need reexamination as their definition
99 has changed. SSA edges are def-use edges in the SSA web. For each
100 edge, we store the definition statement or PHI node D. The destination
101 nodes that need to be visited are accessed using immediate_uses
103 static GTY(()) varray_type ssa_edges
;
105 /* Identical to SSA_EDGES. For performance reasons, the list of SSA
106 edges is split into two. One contains all SSA edges who need to be
107 reexamined because their lattice value changed to varying (this
108 worklist), and the other contains all other SSA edges to be
109 reexamined (ssa_edges).
111 Since most values in the program are varying, the ideal situation
112 is to move them to that lattice value as quickly as possible.
113 Thus, it doesn't make sense to process any other type of lattice
114 value until all varying values are propagated fully, which is one
115 thing using the varying worklist achieves. In addition, if you
116 don't use a separate worklist for varying edges, you end up with
117 situations where lattice values move from
118 undefined->constant->varying instead of undefined->varying.
120 static GTY(()) varray_type varying_ssa_edges
;
123 static void initialize (void);
124 static void finalize (void);
125 static void visit_phi_node (tree
);
126 static tree
ccp_fold (tree
);
127 static value
cp_lattice_meet (value
, value
);
128 static void visit_stmt (tree
);
129 static void visit_cond_stmt (tree
);
130 static void visit_assignment (tree
);
131 static void add_var_to_ssa_edges_worklist (tree
, value
);
132 static void add_outgoing_control_edges (basic_block
);
133 static void add_control_edge (edge
);
134 static void def_to_varying (tree
);
135 static void set_lattice_value (tree
, value
);
136 static void simulate_block (basic_block
);
137 static void simulate_stmt (tree
);
138 static void substitute_and_fold (void);
139 static value
evaluate_stmt (tree
);
140 static void dump_lattice_value (FILE *, const char *, value
);
141 static bool replace_uses_in (tree
, bool *);
142 static latticevalue
likely_value (tree
);
143 static tree
get_rhs (tree
);
144 static void set_rhs (tree
*, tree
);
145 static value
*get_value (tree
);
146 static value
get_default_value (tree
);
147 static tree
ccp_fold_builtin (tree
, tree
);
148 static bool get_strlen (tree
, tree
*, bitmap
);
149 static inline bool cfg_blocks_empty_p (void);
150 static void cfg_blocks_add (basic_block
);
151 static basic_block
cfg_blocks_get (void);
152 static bool need_imm_uses_for (tree var
);
154 /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
155 drain. This pops statements off the given WORKLIST and processes
156 them until there are no more statements on WORKLIST. */
159 process_ssa_edge_worklist (varray_type
*worklist
)
161 /* Drain the entire worklist. */
162 while (VARRAY_ACTIVE_SIZE (*worklist
) > 0)
164 /* Pull the statement to simulate off the worklist. */
165 tree stmt
= VARRAY_TOP_TREE (*worklist
);
166 stmt_ann_t ann
= stmt_ann (stmt
);
167 VARRAY_POP (*worklist
);
169 /* visit_stmt can "cancel" reevaluation of some statements.
170 If it does, then in_ccp_worklist will be zero. */
171 if (ann
->in_ccp_worklist
)
173 ann
->in_ccp_worklist
= 0;
174 simulate_stmt (stmt
);
179 /* Main entry point for SSA Conditional Constant Propagation. FNDECL is
180 the declaration for the function to optimize.
182 On exit, VARS_TO_RENAME will contain the symbols that have been exposed by
183 the propagation of ADDR_EXPR expressions into pointer dereferences and need
184 to be renamed into SSA.
186 PHASE indicates which dump file from the DUMP_FILES array to use when
187 dumping debugging information. */
194 /* Iterate until the worklists are empty. */
195 while (!cfg_blocks_empty_p ()
196 || VARRAY_ACTIVE_SIZE (ssa_edges
) > 0
197 || VARRAY_ACTIVE_SIZE (varying_ssa_edges
) > 0)
199 if (!cfg_blocks_empty_p ())
201 /* Pull the next block to simulate off the worklist. */
202 basic_block dest_block
= cfg_blocks_get ();
203 simulate_block (dest_block
);
206 /* In order to move things to varying as quickly as
207 possible,process the VARYING_SSA_EDGES worklist first. */
208 process_ssa_edge_worklist (&varying_ssa_edges
);
210 /* Now process the SSA_EDGES worklist. */
211 process_ssa_edge_worklist (&ssa_edges
);
214 /* Now perform substitutions based on the known constant values. */
215 substitute_and_fold ();
217 /* Now cleanup any unreachable code. */
220 /* Free allocated memory. */
223 /* Debugging dumps. */
224 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
226 dump_referenced_vars (dump_file
);
227 fprintf (dump_file
, "\n\n");
234 return flag_tree_ccp
!= 0;
237 struct tree_opt_pass pass_ccp
=
241 tree_ssa_ccp
, /* execute */
244 0, /* static_pass_number */
245 TV_TREE_CCP
, /* tv_id */
246 PROP_cfg
| PROP_ssa
, /* properties_required */
247 0, /* properties_provided */
248 0, /* properties_destroyed */
249 0, /* todo_flags_start */
250 TODO_dump_func
| TODO_rename_vars
251 | TODO_ggc_collect
| TODO_verify_ssa
/* todo_flags_finish */
255 /* Get the constant value associated with variable VAR. */
262 #if defined ENABLE_CHECKING
263 if (TREE_CODE (var
) != SSA_NAME
)
267 val
= &value_vector
[SSA_NAME_VERSION (var
)];
268 if (val
->lattice_val
== UNINITIALIZED
)
269 *val
= get_default_value (var
);
275 /* Simulate the execution of BLOCK. Evaluate the statement associated
276 with each variable reference inside the block. */
279 simulate_block (basic_block block
)
283 /* There is nothing to do for the exit block. */
284 if (block
== EXIT_BLOCK_PTR
)
287 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
288 fprintf (dump_file
, "\nSimulating block %d\n", block
->index
);
290 /* Always simulate PHI nodes, even if we have simulated this block
292 for (phi
= phi_nodes (block
); phi
; phi
= PHI_CHAIN (phi
))
293 visit_phi_node (phi
);
295 /* If this is the first time we've simulated this block, then we
296 must simulate each of its statements. */
297 if (!TEST_BIT (executable_blocks
, block
->index
))
299 block_stmt_iterator j
;
300 unsigned int normal_edge_count
;
303 /* Note that we have simulated this block. */
304 SET_BIT (executable_blocks
, block
->index
);
306 for (j
= bsi_start (block
); !bsi_end_p (j
); bsi_next (&j
))
307 visit_stmt (bsi_stmt (j
));
309 /* We can not predict when abnormal edges will be executed, so
310 once a block is considered executable, we consider any
311 outgoing abnormal edges as executable.
313 At the same time, if this block has only one successor that is
314 reached by non-abnormal edges, then add that successor to the
316 normal_edge_count
= 0;
318 for (e
= block
->succ
; e
; e
= e
->succ_next
)
320 if (e
->flags
& EDGE_ABNORMAL
)
322 add_control_edge (e
);
331 if (normal_edge_count
== 1)
332 add_control_edge (normal_edge
);
337 /* Follow the def-use edges for statement DEF_STMT and simulate all the
338 statements reached by it. */
341 simulate_stmt (tree use_stmt
)
343 basic_block use_bb
= bb_for_stmt (use_stmt
);
345 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
347 fprintf (dump_file
, "\nSimulating statement (from ssa_edges): ");
348 print_generic_stmt (dump_file
, use_stmt
, dump_flags
);
351 if (TREE_CODE (use_stmt
) == PHI_NODE
)
353 /* PHI nodes are always visited, regardless of whether or not the
354 destination block is executable. */
355 visit_phi_node (use_stmt
);
357 else if (TEST_BIT (executable_blocks
, use_bb
->index
))
359 /* Otherwise, visit the statement containing the use reached by
360 DEF, only if the destination block is marked executable. */
361 visit_stmt (use_stmt
);
366 /* Perform final substitution and folding. After this pass the program
367 should still be in SSA form. */
370 substitute_and_fold (void)
374 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
376 "\nSubstituing constants and folding statements\n\n");
378 /* Substitute constants in every statement of every basic block. */
381 block_stmt_iterator i
;
384 /* Propagate our known constants into PHI nodes. */
385 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
389 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
392 use_operand_p orig_p
= PHI_ARG_DEF_PTR (phi
, i
);
393 tree orig
= USE_FROM_PTR (orig_p
);
395 if (! SSA_VAR_P (orig
))
398 new_val
= get_value (orig
);
399 if (new_val
->lattice_val
== CONSTANT
400 && may_propagate_copy (orig
, new_val
->const_val
))
401 SET_USE (orig_p
, new_val
->const_val
);
405 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
407 bool replaced_address
;
408 tree stmt
= bsi_stmt (i
);
410 /* Skip statements that have been folded already. */
411 if (stmt_modified_p (stmt
) || !is_exec_stmt (stmt
))
414 /* Replace the statement with its folded version and mark it
416 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
418 fprintf (dump_file
, "Line %d: replaced ", get_lineno (stmt
));
419 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
422 if (replace_uses_in (stmt
, &replaced_address
))
424 bool changed
= fold_stmt (bsi_stmt_ptr (i
));
427 /* If we folded a builtin function, we'll likely
428 need to rename VDEFs. */
429 if (replaced_address
|| changed
)
430 mark_new_vars_to_rename (stmt
, vars_to_rename
);
433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
435 fprintf (dump_file
, " with ");
436 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
437 fprintf (dump_file
, "\n");
444 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
445 lattice values to determine PHI_NODE's lattice value. The value of a
446 PHI node is determined calling cp_lattice_meet() with all the arguments
447 of the PHI node that are incoming via executable edges. */
450 visit_phi_node (tree phi
)
452 bool short_circuit
= 0;
453 value phi_val
, *curr_val
;
456 /* If the PHI node has already been deemed to be VARYING, don't simulate
458 if (DONT_SIMULATE_AGAIN (phi
))
461 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
463 fprintf (dump_file
, "\nVisiting PHI node: ");
464 print_generic_expr (dump_file
, phi
, dump_flags
);
467 curr_val
= get_value (PHI_RESULT (phi
));
468 switch (curr_val
->lattice_val
)
471 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
472 fprintf (dump_file
, "\n Shortcircuit. Default of VARYING.");
482 phi_val
.lattice_val
= UNDEFINED
;
483 phi_val
.const_val
= NULL_TREE
;
490 /* If the variable is volatile or the variable is never referenced in a
491 real operand, then consider the PHI node VARYING. */
492 if (short_circuit
|| TREE_THIS_VOLATILE (SSA_NAME_VAR (PHI_RESULT (phi
))))
494 phi_val
.lattice_val
= VARYING
;
495 phi_val
.const_val
= NULL
;
498 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
500 /* Compute the meet operator over all the PHI arguments. */
501 edge e
= PHI_ARG_EDGE (phi
, i
);
503 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
506 "\n Argument #%d (%d -> %d %sexecutable)\n",
507 i
, e
->src
->index
, e
->dest
->index
,
508 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
511 /* If the incoming edge is executable, Compute the meet operator for
512 the existing value of the PHI node and the current PHI argument. */
513 if (e
->flags
& EDGE_EXECUTABLE
)
515 tree rdef
= PHI_ARG_DEF (phi
, i
);
516 value
*rdef_val
, val
;
518 if (is_gimple_min_invariant (rdef
))
520 val
.lattice_val
= CONSTANT
;
521 val
.const_val
= rdef
;
525 rdef_val
= get_value (rdef
);
527 phi_val
= cp_lattice_meet (phi_val
, *rdef_val
);
529 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
531 fprintf (dump_file
, "\t");
532 print_generic_expr (dump_file
, rdef
, dump_flags
);
533 dump_lattice_value (dump_file
, "\tValue: ", *rdef_val
);
534 fprintf (dump_file
, "\n");
537 if (phi_val
.lattice_val
== VARYING
)
542 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
544 dump_lattice_value (dump_file
, "\n PHI node value: ", phi_val
);
545 fprintf (dump_file
, "\n\n");
548 set_lattice_value (PHI_RESULT (phi
), phi_val
);
549 if (phi_val
.lattice_val
== VARYING
)
550 DONT_SIMULATE_AGAIN (phi
) = 1;
554 /* Compute the meet operator between VAL1 and VAL2:
556 any M UNDEFINED = any
557 any M VARYING = VARYING
558 Ci M Cj = Ci if (i == j)
559 Ci M Cj = VARYING if (i != j) */
561 cp_lattice_meet (value val1
, value val2
)
565 /* any M UNDEFINED = any. */
566 if (val1
.lattice_val
== UNDEFINED
)
568 else if (val2
.lattice_val
== UNDEFINED
)
571 /* any M VARYING = VARYING. */
572 if (val1
.lattice_val
== VARYING
|| val2
.lattice_val
== VARYING
)
574 result
.lattice_val
= VARYING
;
575 result
.const_val
= NULL_TREE
;
579 /* Ci M Cj = Ci if (i == j)
580 Ci M Cj = VARYING if (i != j) */
581 if (simple_cst_equal (val1
.const_val
, val2
.const_val
) == 1)
583 result
.lattice_val
= CONSTANT
;
584 result
.const_val
= val1
.const_val
;
588 result
.lattice_val
= VARYING
;
589 result
.const_val
= NULL_TREE
;
596 /* Evaluate statement STMT. If the statement produces an output value and
597 its evaluation changes the lattice value of its output, do the following:
599 - If the statement is an assignment, add all the SSA edges starting at
602 - If the statement is a conditional branch:
603 . If the statement evaluates to non-constant, add all edges to
605 . If the statement is constant, add the edge executed as the
606 result of the branch. */
609 visit_stmt (tree stmt
)
614 v_may_def_optype v_may_defs
;
615 v_must_def_optype v_must_defs
;
617 /* If the statement has already been deemed to be VARYING, don't simulate
619 if (DONT_SIMULATE_AGAIN (stmt
))
622 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
624 fprintf (dump_file
, "\nVisiting statement: ");
625 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
626 fprintf (dump_file
, "\n");
629 ann
= stmt_ann (stmt
);
631 /* If this statement is already in the worklist then "cancel" it. The
632 reevaluation implied by the worklist entry will produce the same
633 value we generate here and thus reevaluating it again from the
634 worklist is pointless. */
635 if (ann
->in_ccp_worklist
)
636 ann
->in_ccp_worklist
= 0;
638 /* Now examine the statement. If the statement is an assignment that
639 produces a single output value, evaluate its RHS to see if the lattice
640 value of its output has changed. */
641 if (TREE_CODE (stmt
) == MODIFY_EXPR
642 && TREE_CODE (TREE_OPERAND (stmt
, 0)) == SSA_NAME
)
643 visit_assignment (stmt
);
645 /* Definitions made by statements other than assignments to SSA_NAMEs
646 represent unknown modifications to their outputs. Mark them VARYING. */
647 else if (NUM_DEFS (defs
= DEF_OPS (ann
)) != 0)
649 DONT_SIMULATE_AGAIN (stmt
) = 1;
650 for (i
= 0; i
< NUM_DEFS (defs
); i
++)
652 tree def
= DEF_OP (defs
, i
);
653 def_to_varying (def
);
657 /* If STMT is a conditional branch, see if we can determine which branch
659 else if (TREE_CODE (stmt
) == COND_EXPR
|| TREE_CODE (stmt
) == SWITCH_EXPR
)
660 visit_cond_stmt (stmt
);
662 /* Any other kind of statement is not interesting for constant
663 propagation and, therefore, not worth simulating. */
666 DONT_SIMULATE_AGAIN (stmt
) = 1;
668 /* If STMT is a computed goto, then mark all the output edges
670 if (computed_goto_p (stmt
))
671 add_outgoing_control_edges (bb_for_stmt (stmt
));
674 /* Mark all V_MAY_DEF operands VARYING. */
675 v_may_defs
= V_MAY_DEF_OPS (ann
);
676 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
677 def_to_varying (V_MAY_DEF_RESULT (v_may_defs
, i
));
679 /* Mark all V_MUST_DEF operands VARYING. */
680 v_must_defs
= V_MUST_DEF_OPS (ann
);
681 for (i
= 0; i
< NUM_V_MUST_DEFS (v_must_defs
); i
++)
682 def_to_varying (V_MUST_DEF_OP (v_must_defs
, i
));
686 /* Visit the assignment statement STMT. Set the value of its LHS to the
687 value computed by the RHS. */
690 visit_assignment (tree stmt
)
695 lhs
= TREE_OPERAND (stmt
, 0);
696 rhs
= TREE_OPERAND (stmt
, 1);
698 if (TREE_THIS_VOLATILE (SSA_NAME_VAR (lhs
)))
700 /* Volatile variables are always VARYING. */
701 val
.lattice_val
= VARYING
;
702 val
.const_val
= NULL_TREE
;
704 else if (TREE_CODE (rhs
) == SSA_NAME
)
706 /* For a simple copy operation, we copy the lattice values. */
707 value
*nval
= get_value (rhs
);
712 /* Evaluate the statement. */
713 val
= evaluate_stmt (stmt
);
716 /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
717 the constant value into the type of the destination variable. This
718 should not be necessary if GCC represented bitfields properly. */
720 tree lhs
= TREE_OPERAND (stmt
, 0);
721 if (val
.lattice_val
== CONSTANT
722 && TREE_CODE (lhs
) == COMPONENT_REF
723 && DECL_BIT_FIELD (TREE_OPERAND (lhs
, 1)))
725 tree w
= widen_bitfield (val
.const_val
, TREE_OPERAND (lhs
, 1), lhs
);
727 if (w
&& is_gimple_min_invariant (w
))
731 val
.lattice_val
= VARYING
;
732 val
.const_val
= NULL
;
737 /* Set the lattice value of the statement's output. */
738 set_lattice_value (lhs
, val
);
739 if (val
.lattice_val
== VARYING
)
740 DONT_SIMULATE_AGAIN (stmt
) = 1;
744 /* Visit the conditional statement STMT. If it evaluates to a constant value,
745 mark outgoing edges appropriately. */
748 visit_cond_stmt (tree stmt
)
754 block
= bb_for_stmt (stmt
);
755 val
= evaluate_stmt (stmt
);
757 /* Find which edge out of the conditional block will be taken and add it
758 to the worklist. If no single edge can be determined statically, add
759 all outgoing edges from BLOCK. */
760 e
= find_taken_edge (block
, val
.const_val
);
762 add_control_edge (e
);
765 DONT_SIMULATE_AGAIN (stmt
) = 1;
766 add_outgoing_control_edges (block
);
771 /* Add all the edges coming out of BB to the control flow worklist. */
774 add_outgoing_control_edges (basic_block bb
)
778 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
779 add_control_edge (e
);
783 /* Add edge E to the control flow worklist. */
786 add_control_edge (edge e
)
788 basic_block bb
= e
->dest
;
789 if (bb
== EXIT_BLOCK_PTR
)
792 /* If the edge had already been executed, skip it. */
793 if (e
->flags
& EDGE_EXECUTABLE
)
796 e
->flags
|= EDGE_EXECUTABLE
;
798 /* If the block is already in the list, we're done. */
799 if (TEST_BIT (bb_in_list
, bb
->index
))
804 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
805 fprintf (dump_file
, "Adding Destination of edge (%d -> %d) to worklist\n\n",
806 e
->src
->index
, e
->dest
->index
);
810 /* CCP specific front-end to the non-destructive constant folding routines.
812 Attempt to simplify the RHS of STMT knowing that one or more
813 operands are constants.
815 If simplification is possible, return the simplified RHS,
816 otherwise return the original RHS. */
821 tree rhs
= get_rhs (stmt
);
822 enum tree_code code
= TREE_CODE (rhs
);
823 int kind
= TREE_CODE_CLASS (code
);
824 tree retval
= NULL_TREE
;
826 /* If the RHS is just a variable, then that variable must now have
827 a constant value that we can return directly. */
828 if (TREE_CODE (rhs
) == SSA_NAME
)
829 return get_value (rhs
)->const_val
;
831 /* Unary operators. Note that we know the single operand must
832 be a constant. So this should almost always return a
836 /* Handle unary operators which can appear in GIMPLE form. */
837 tree op0
= TREE_OPERAND (rhs
, 0);
839 /* Simplify the operand down to a constant. */
840 if (TREE_CODE (op0
) == SSA_NAME
)
842 value
*val
= get_value (op0
);
843 if (val
->lattice_val
== CONSTANT
)
844 op0
= get_value (op0
)->const_val
;
847 retval
= nondestructive_fold_unary_to_constant (code
,
851 /* If we folded, but did not create an invariant, then we can not
852 use this expression. */
853 if (retval
&& ! is_gimple_min_invariant (retval
))
856 /* If we could not fold the expression, but the arguments are all
857 constants and gimple values, then build and return the new
860 In some cases the new expression is still something we can
861 use as a replacement for an argument. This happens with
862 NOP conversions of types for example.
864 In other cases the new expression can not be used as a
865 replacement for an argument (as it would create non-gimple
866 code). But the new expression can still be used to derive
868 if (! retval
&& is_gimple_min_invariant (op0
))
869 return build1 (code
, TREE_TYPE (rhs
), op0
);
872 /* Binary and comparison operators. We know one or both of the
873 operands are constants. */
876 || code
== TRUTH_AND_EXPR
877 || code
== TRUTH_OR_EXPR
878 || code
== TRUTH_XOR_EXPR
)
880 /* Handle binary and comparison operators that can appear in
882 tree op0
= TREE_OPERAND (rhs
, 0);
883 tree op1
= TREE_OPERAND (rhs
, 1);
885 /* Simplify the operands down to constants when appropriate. */
886 if (TREE_CODE (op0
) == SSA_NAME
)
888 value
*val
= get_value (op0
);
889 if (val
->lattice_val
== CONSTANT
)
890 op0
= val
->const_val
;
893 if (TREE_CODE (op1
) == SSA_NAME
)
895 value
*val
= get_value (op1
);
896 if (val
->lattice_val
== CONSTANT
)
897 op1
= val
->const_val
;
900 retval
= nondestructive_fold_binary_to_constant (code
,
904 /* If we folded, but did not create an invariant, then we can not
905 use this expression. */
906 if (retval
&& ! is_gimple_min_invariant (retval
))
909 /* If we could not fold the expression, but the arguments are all
910 constants and gimple values, then build and return the new
913 In some cases the new expression is still something we can
914 use as a replacement for an argument. This happens with
915 NOP conversions of types for example.
917 In other cases the new expression can not be used as a
918 replacement for an argument (as it would create non-gimple
919 code). But the new expression can still be used to derive
922 && is_gimple_min_invariant (op0
)
923 && is_gimple_min_invariant (op1
))
924 return build (code
, TREE_TYPE (rhs
), op0
, op1
);
927 /* We may be able to fold away calls to builtin functions if their
928 arguments are constants. */
929 else if (code
== CALL_EXPR
930 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == ADDR_EXPR
931 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0))
933 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)))
935 use_optype uses
= STMT_USE_OPS (stmt
);
936 if (NUM_USES (uses
) != 0)
941 /* Preserve the original values of every operand. */
942 orig
= xmalloc (sizeof (tree
) * NUM_USES (uses
));
943 for (i
= 0; i
< NUM_USES (uses
); i
++)
944 orig
[i
] = USE_OP (uses
, i
);
946 /* Substitute operands with their values and try to fold. */
947 replace_uses_in (stmt
, NULL
);
948 retval
= fold_builtin (rhs
);
950 /* Restore operands to their original form. */
951 for (i
= 0; i
< NUM_USES (uses
); i
++)
952 SET_USE_OP (uses
, i
, orig
[i
]);
959 /* If we got a simplified form, see if we need to convert its type. */
962 if (TREE_TYPE (retval
) != TREE_TYPE (rhs
))
963 retval
= fold_convert (TREE_TYPE (rhs
), retval
);
965 if (TREE_TYPE (retval
) == TREE_TYPE (rhs
))
969 /* No simplification was possible. */
974 /* Evaluate statement STMT. */
977 evaluate_stmt (tree stmt
)
981 latticevalue likelyvalue
= likely_value (stmt
);
983 /* If the statement is likely to have a CONSTANT result, then try
984 to fold the statement to determine the constant value. */
985 if (likelyvalue
== CONSTANT
)
986 simplified
= ccp_fold (stmt
);
987 /* If the statement is likely to have a VARYING result, then do not
988 bother folding the statement. */
989 else if (likelyvalue
== VARYING
)
990 simplified
= get_rhs (stmt
);
991 /* Otherwise the statement is likely to have an UNDEFINED value and
992 there will be nothing to do. */
994 simplified
= NULL_TREE
;
996 if (simplified
&& is_gimple_min_invariant (simplified
))
998 /* The statement produced a constant value. */
999 val
.lattice_val
= CONSTANT
;
1000 val
.const_val
= simplified
;
1004 /* The statement produced a nonconstant value. If the statement
1005 had undefined operands, then the result of the statement should
1006 be undefined. Else the result of the statement is VARYING. */
1007 val
.lattice_val
= (likelyvalue
== UNDEFINED
? UNDEFINED
: VARYING
);
1008 val
.const_val
= NULL_TREE
;
1015 /* Debugging dumps. */
1018 dump_lattice_value (FILE *outf
, const char *prefix
, value val
)
1020 switch (val
.lattice_val
)
1023 fprintf (outf
, "%sUNDEFINED", prefix
);
1026 fprintf (outf
, "%sVARYING", prefix
);
1029 fprintf (outf
, "%sCONSTANT ", prefix
);
1030 print_generic_expr (outf
, val
.const_val
, dump_flags
);
1037 /* Given a constant value VAL for bitfield FIELD, and a destination
1038 variable VAR, return VAL appropriately widened to fit into VAR. If
1039 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1042 widen_bitfield (tree val
, tree field
, tree var
)
1044 unsigned var_size
, field_size
;
1046 unsigned HOST_WIDE_INT mask
;
1049 var_size
= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE ((var
))));
1050 field_size
= TREE_INT_CST_LOW (DECL_SIZE (field
));
1052 /* Give up if either the bitfield or the variable are too wide. */
1053 if (field_size
> HOST_BITS_PER_WIDE_INT
|| var_size
> HOST_BITS_PER_WIDE_INT
)
1056 #if defined ENABLE_CHECKING
1057 if (var_size
< field_size
)
1061 /* If VAL is not an integer constant, then give up. */
1062 if (TREE_CODE (val
) != INTEGER_CST
)
1065 /* If the sign bit of the value is not set, or the field's type is
1066 unsigned, then just mask off the high order bits of the value. */
1067 if ((TREE_INT_CST_LOW (val
) & (1 << (field_size
- 1))) == 0
1068 || DECL_UNSIGNED (field
))
1070 /* Zero extension. Build a mask with the lower 'field_size' bits
1071 set and a BIT_AND_EXPR node to clear the high order bits of
1073 for (i
= 0, mask
= 0; i
< field_size
; i
++)
1076 wide_val
= build (BIT_AND_EXPR
, TREE_TYPE (var
), val
,
1077 build_int_2 (mask
, 0));
1081 /* Sign extension. Create a mask with the upper 'field_size'
1082 bits set and a BIT_IOR_EXPR to set the high order bits of the
1084 for (i
= 0, mask
= 0; i
< (var_size
- field_size
); i
++)
1085 mask
|= 1 << (var_size
- i
- 1);
1087 wide_val
= build (BIT_IOR_EXPR
, TREE_TYPE (var
), val
,
1088 build_int_2 (mask
, 0));
1091 return fold (wide_val
);
1095 /* Function indicating whether we ought to include information for 'var'
1096 when calculating immediate uses. */
1099 need_imm_uses_for (tree var
)
1101 return get_value (var
)->lattice_val
!= VARYING
;
1105 /* Initialize local data structures and worklists for CCP. */
1112 sbitmap virtual_var
;
1114 /* Worklists of SSA edges. */
1115 VARRAY_TREE_INIT (ssa_edges
, 20, "ssa_edges");
1116 VARRAY_TREE_INIT (varying_ssa_edges
, 20, "varying_ssa_edges");
1118 executable_blocks
= sbitmap_alloc (last_basic_block
);
1119 sbitmap_zero (executable_blocks
);
1121 bb_in_list
= sbitmap_alloc (last_basic_block
);
1122 sbitmap_zero (bb_in_list
);
1124 value_vector
= (value
*) xmalloc (num_ssa_names
* sizeof (value
));
1125 memset (value_vector
, 0, num_ssa_names
* sizeof (value
));
1127 /* 1 if ssa variable is used in a virtual variable context. */
1128 virtual_var
= sbitmap_alloc (num_ssa_names
);
1129 sbitmap_zero (virtual_var
);
1131 /* Initialize default values and simulation flags for PHI nodes, statements
1135 block_stmt_iterator i
;
1139 v_may_def_optype v_may_defs
;
1140 v_must_def_optype v_must_defs
;
1144 /* Get the default value for each definition. */
1145 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
1148 stmt
= bsi_stmt (i
);
1149 get_stmt_operands (stmt
);
1150 ann
= stmt_ann (stmt
);
1151 defs
= DEF_OPS (ann
);
1152 for (x
= 0; x
< NUM_DEFS (defs
); x
++)
1154 tree def
= DEF_OP (defs
, x
);
1155 if (get_value (def
)->lattice_val
== VARYING
)
1158 DONT_SIMULATE_AGAIN (stmt
) = vary
;
1160 /* Mark all V_MAY_DEF operands VARYING. */
1161 v_may_defs
= V_MAY_DEF_OPS (ann
);
1162 for (x
= 0; x
< NUM_V_MAY_DEFS (v_may_defs
); x
++)
1164 tree res
= V_MAY_DEF_RESULT (v_may_defs
, x
);
1165 get_value (res
)->lattice_val
= VARYING
;
1166 SET_BIT (virtual_var
, SSA_NAME_VERSION (res
));
1169 /* Mark all V_MUST_DEF operands VARYING. */
1170 v_must_defs
= V_MUST_DEF_OPS (ann
);
1171 for (x
= 0; x
< NUM_V_MUST_DEFS (v_must_defs
); x
++)
1173 tree v_must_def
= V_MUST_DEF_OP (v_must_defs
, x
);
1174 get_value (v_must_def
)->lattice_val
= VARYING
;
1175 SET_BIT (virtual_var
, SSA_NAME_VERSION (v_must_def
));
1179 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1180 e
->flags
&= ~EDGE_EXECUTABLE
;
1183 /* Now process PHI nodes. */
1188 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
1191 val
= get_value (PHI_RESULT (phi
));
1192 if (val
->lattice_val
!= VARYING
)
1194 for (x
= 0; x
< PHI_NUM_ARGS (phi
); x
++)
1196 var
= PHI_ARG_DEF (phi
, x
);
1197 /* If one argument is virtual, the result is virtual, and
1198 therefore varying. */
1199 if (TREE_CODE (var
) == SSA_NAME
)
1201 if (TEST_BIT (virtual_var
, SSA_NAME_VERSION (var
)))
1203 val
->lattice_val
= VARYING
;
1204 SET_BIT (virtual_var
,
1205 SSA_NAME_VERSION (PHI_RESULT (phi
)));
1211 DONT_SIMULATE_AGAIN (phi
) = ((val
->lattice_val
== VARYING
) ? 1 : 0);
1215 sbitmap_free (virtual_var
);
1216 /* Compute immediate uses for variables we care about. */
1217 compute_immediate_uses (TDFA_USE_OPS
, need_imm_uses_for
);
1219 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1220 dump_immediate_uses (dump_file
);
1222 VARRAY_BB_INIT (cfg_blocks
, 20, "cfg_blocks");
1224 /* Seed the algorithm by adding the successors of the entry block to the
1226 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
1228 if (e
->dest
!= EXIT_BLOCK_PTR
)
1230 e
->flags
|= EDGE_EXECUTABLE
;
1231 cfg_blocks_add (e
->dest
);
1237 /* Free allocated storage. */
1243 varying_ssa_edges
= NULL
;
1245 free (value_vector
);
1246 sbitmap_free (bb_in_list
);
1247 sbitmap_free (executable_blocks
);
1251 /* Is the block worklist empty. */
1254 cfg_blocks_empty_p (void)
1256 return (cfg_blocks_num
== 0);
1259 /* Add a basic block to the worklist. */
1262 cfg_blocks_add (basic_block bb
)
1264 if (bb
== ENTRY_BLOCK_PTR
|| bb
== EXIT_BLOCK_PTR
)
1267 if (TEST_BIT (bb_in_list
, bb
->index
))
1270 if (cfg_blocks_empty_p ())
1272 cfg_blocks_tail
= cfg_blocks_head
= 0;
1278 if (cfg_blocks_num
> VARRAY_SIZE (cfg_blocks
))
1280 /* We have to grow the array now. Adjust to queue to occupy the
1281 full space of the original array. */
1282 cfg_blocks_tail
= VARRAY_SIZE (cfg_blocks
);
1283 cfg_blocks_head
= 0;
1284 VARRAY_GROW (cfg_blocks
, 2 * VARRAY_SIZE (cfg_blocks
));
1287 cfg_blocks_tail
= (cfg_blocks_tail
+ 1) % VARRAY_SIZE (cfg_blocks
);
1289 VARRAY_BB (cfg_blocks
, cfg_blocks_tail
) = bb
;
1290 SET_BIT (bb_in_list
, bb
->index
);
1293 /* Remove a block from the worklist. */
1296 cfg_blocks_get (void)
1300 bb
= VARRAY_BB (cfg_blocks
, cfg_blocks_head
);
1302 #ifdef ENABLE_CHECKING
1303 if (cfg_blocks_empty_p () || !bb
)
1307 cfg_blocks_head
= (cfg_blocks_head
+ 1) % VARRAY_SIZE (cfg_blocks
);
1309 RESET_BIT (bb_in_list
, bb
->index
);
1314 /* We have just defined a new value for VAR. Add all immediate uses
1315 of VAR to the ssa_edges or varying_ssa_edges worklist. */
1317 add_var_to_ssa_edges_worklist (tree var
, value val
)
1319 tree stmt
= SSA_NAME_DEF_STMT (var
);
1320 dataflow_t df
= get_immediate_uses (stmt
);
1321 int num_uses
= num_immediate_uses (df
);
1324 for (i
= 0; i
< num_uses
; i
++)
1326 tree use
= immediate_use (df
, i
);
1328 if (!DONT_SIMULATE_AGAIN (use
))
1330 stmt_ann_t ann
= stmt_ann (use
);
1331 if (ann
->in_ccp_worklist
== 0)
1333 ann
->in_ccp_worklist
= 1;
1334 if (val
.lattice_val
== VARYING
)
1335 VARRAY_PUSH_TREE (varying_ssa_edges
, use
);
1337 VARRAY_PUSH_TREE (ssa_edges
, use
);
1343 /* Set the lattice value for the variable VAR to VARYING. */
1346 def_to_varying (tree var
)
1349 val
.lattice_val
= VARYING
;
1350 val
.const_val
= NULL_TREE
;
1351 set_lattice_value (var
, val
);
1354 /* Set the lattice value for variable VAR to VAL. */
1357 set_lattice_value (tree var
, value val
)
1359 value
*old
= get_value (var
);
1361 #ifdef ENABLE_CHECKING
1362 if (val
.lattice_val
== UNDEFINED
)
1364 /* CONSTANT->UNDEFINED is never a valid state transition. */
1365 if (old
->lattice_val
== CONSTANT
)
1368 /* VARYING->UNDEFINED is generally not a valid state transition,
1369 except for values which are initialized to VARYING. */
1370 if (old
->lattice_val
== VARYING
1371 && get_default_value (var
).lattice_val
!= VARYING
)
1374 else if (val
.lattice_val
== CONSTANT
)
1376 /* VARYING -> CONSTANT is an invalid state transition, except
1377 for objects which start off in a VARYING state. */
1378 if (old
->lattice_val
== VARYING
1379 && get_default_value (var
).lattice_val
!= VARYING
)
1384 /* If the constant for VAR has changed, then this VAR is really varying. */
1385 if (old
->lattice_val
== CONSTANT
&& val
.lattice_val
== CONSTANT
1386 && !simple_cst_equal (old
->const_val
, val
.const_val
))
1388 val
.lattice_val
= VARYING
;
1389 val
.const_val
= NULL_TREE
;
1392 if (old
->lattice_val
!= val
.lattice_val
)
1394 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1396 dump_lattice_value (dump_file
,
1397 "Lattice value changed to ", val
);
1398 fprintf (dump_file
, ". Adding definition to SSA edges.\n");
1401 add_var_to_ssa_edges_worklist (var
, val
);
1406 /* Replace USE references in statement STMT with their immediate reaching
1407 definition. Return true if at least one reference was replaced. If
1408 REPLACED_ADDRESSES_P is given, it will be set to true if an address
1409 constant was replaced. */
1412 replace_uses_in (tree stmt
, bool *replaced_addresses_p
)
1414 bool replaced
= false;
1418 if (replaced_addresses_p
)
1419 *replaced_addresses_p
= false;
1421 get_stmt_operands (stmt
);
1423 uses
= STMT_USE_OPS (stmt
);
1424 for (i
= 0; i
< NUM_USES (uses
); i
++)
1426 use_operand_p use
= USE_OP_PTR (uses
, i
);
1427 value
*val
= get_value (USE_FROM_PTR (use
));
1429 if (val
->lattice_val
== CONSTANT
)
1431 SET_USE (use
, val
->const_val
);
1433 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (use
)))
1434 && replaced_addresses_p
)
1435 *replaced_addresses_p
= true;
1442 /* Return the likely latticevalue for STMT.
1444 If STMT has no operands, then return CONSTANT.
1446 Else if any operands of STMT are undefined, then return UNDEFINED.
1448 Else if any operands of STMT are constants, then return CONSTANT.
1450 Else return VARYING. */
1453 likely_value (tree stmt
)
1457 int found_constant
= 0;
1460 /* If the statement makes aliased loads or has volatile operands, it
1461 won't fold to a constant value. */
1462 ann
= stmt_ann (stmt
);
1463 if (ann
->makes_aliased_loads
|| ann
->has_volatile_ops
)
1466 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
1467 in the presence of const and pure calls. */
1468 if (get_call_expr_in (stmt
) != NULL_TREE
)
1471 get_stmt_operands (stmt
);
1473 uses
= USE_OPS (ann
);
1474 for (i
= 0; i
< NUM_USES (uses
); i
++)
1476 tree use
= USE_OP (uses
, i
);
1477 value
*val
= get_value (use
);
1479 if (val
->lattice_val
== UNDEFINED
)
1482 if (val
->lattice_val
== CONSTANT
)
1486 return ((found_constant
|| !uses
) ? CONSTANT
: VARYING
);
1489 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1490 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1491 is the desired result type. */
1494 maybe_fold_offset_to_array_ref (tree base
, tree offset
, tree orig_type
)
1496 unsigned HOST_WIDE_INT lquo
, lrem
;
1497 HOST_WIDE_INT hquo
, hrem
;
1498 tree elt_size
, min_idx
, idx
;
1499 tree array_type
, elt_type
;
1501 /* Ignore stupid user tricks of indexing non-array variables. */
1502 array_type
= TREE_TYPE (base
);
1503 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1505 elt_type
= TREE_TYPE (array_type
);
1506 if (!lang_hooks
.types_compatible_p (orig_type
, elt_type
))
1509 /* Whee. Ignore indexing of variable sized types. */
1510 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1511 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1514 /* If the division isn't exact, then don't do anything. Equally
1515 invalid as the above indexing of non-array variables. */
1516 if (div_and_round_double (TRUNC_DIV_EXPR
, 1,
1517 TREE_INT_CST_LOW (offset
),
1518 TREE_INT_CST_HIGH (offset
),
1519 TREE_INT_CST_LOW (elt_size
),
1520 TREE_INT_CST_HIGH (elt_size
),
1521 &lquo
, &hquo
, &lrem
, &hrem
)
1524 idx
= build_int_2_wide (lquo
, hquo
);
1526 /* Re-bias the index by the min index of the array type. */
1527 min_idx
= TYPE_DOMAIN (TREE_TYPE (base
));
1530 min_idx
= TYPE_MIN_VALUE (min_idx
);
1533 idx
= convert (TREE_TYPE (min_idx
), idx
);
1534 if (!integer_zerop (min_idx
))
1535 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 1);
1539 return build (ARRAY_REF
, orig_type
, base
, idx
);
1542 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1543 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1544 is the desired result type. */
1545 /* ??? This doesn't handle class inheritance. */
1548 maybe_fold_offset_to_component_ref (tree record_type
, tree base
, tree offset
,
1549 tree orig_type
, bool base_is_ptr
)
1551 tree f
, t
, field_type
, tail_array_field
;
1553 if (TREE_CODE (record_type
) != RECORD_TYPE
1554 && TREE_CODE (record_type
) != UNION_TYPE
1555 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1558 /* Short-circuit silly cases. */
1559 if (lang_hooks
.types_compatible_p (record_type
, orig_type
))
1562 tail_array_field
= NULL_TREE
;
1563 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1567 if (TREE_CODE (f
) != FIELD_DECL
)
1569 if (DECL_BIT_FIELD (f
))
1571 if (TREE_CODE (DECL_FIELD_OFFSET (f
)) != INTEGER_CST
)
1574 /* ??? Java creates "interesting" fields for representing base classes.
1575 They have no name, and have no context. With no context, we get into
1576 trouble with nonoverlapping_component_refs_p. Skip them. */
1577 if (!DECL_FIELD_CONTEXT (f
))
1580 /* The previous array field isn't at the end. */
1581 tail_array_field
= NULL_TREE
;
1583 /* Check to see if this offset overlaps with the field. */
1584 cmp
= tree_int_cst_compare (DECL_FIELD_OFFSET (f
), offset
);
1588 field_type
= TREE_TYPE (f
);
1591 /* Don't care about offsets into the middle of scalars. */
1592 if (!AGGREGATE_TYPE_P (field_type
))
1595 /* Check for array at the end of the struct. This is often
1596 used as for flexible array members. We should be able to
1597 turn this into an array access anyway. */
1598 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1599 tail_array_field
= f
;
1601 /* Check the end of the field against the offset. */
1602 if (!DECL_SIZE_UNIT (f
)
1603 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1605 t
= int_const_binop (MINUS_EXPR
, offset
, DECL_FIELD_OFFSET (f
), 1);
1606 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1609 /* If we matched, then set offset to the displacement into
1614 /* Here we exactly match the offset being checked. If the types match,
1615 then we can return that field. */
1616 else if (lang_hooks
.types_compatible_p (orig_type
, field_type
))
1619 base
= build1 (INDIRECT_REF
, record_type
, base
);
1620 t
= build (COMPONENT_REF
, field_type
, base
, f
);
1624 /* Don't care about type-punning of scalars. */
1625 else if (!AGGREGATE_TYPE_P (field_type
))
1631 if (!tail_array_field
)
1634 f
= tail_array_field
;
1635 field_type
= TREE_TYPE (f
);
1638 /* If we get here, we've got an aggregate field, and a possibly
1639 nonzero offset into them. Recurse and hope for a valid match. */
1641 base
= build1 (INDIRECT_REF
, record_type
, base
);
1642 base
= build (COMPONENT_REF
, field_type
, base
, f
);
1644 t
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
);
1647 return maybe_fold_offset_to_component_ref (field_type
, base
, offset
,
1651 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1652 Return the simplified expression, or NULL if nothing could be done. */
1655 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
1659 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1660 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1661 are sometimes added. */
1664 TREE_OPERAND (expr
, 0) = base
;
1666 /* One possibility is that the address reduces to a string constant. */
1667 t
= fold_read_from_constant_string (expr
);
1671 /* Add in any offset from a PLUS_EXPR. */
1672 if (TREE_CODE (base
) == PLUS_EXPR
)
1676 offset2
= TREE_OPERAND (base
, 1);
1677 if (TREE_CODE (offset2
) != INTEGER_CST
)
1679 base
= TREE_OPERAND (base
, 0);
1681 offset
= int_const_binop (PLUS_EXPR
, offset
, offset2
, 1);
1684 if (TREE_CODE (base
) == ADDR_EXPR
)
1686 /* Strip the ADDR_EXPR. */
1687 base
= TREE_OPERAND (base
, 0);
1689 /* Try folding *(&B+O) to B[X]. */
1690 t
= maybe_fold_offset_to_array_ref (base
, offset
, TREE_TYPE (expr
));
1694 /* Try folding *(&B+O) to B.X. */
1695 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (base
), base
, offset
,
1696 TREE_TYPE (expr
), false);
1700 /* Fold *&B to B. */
1701 if (integer_zerop (offset
))
1706 /* We can get here for out-of-range string constant accesses,
1707 such as "_"[3]. Bail out of the entire substitution search
1708 and arrange for the entire statement to be replaced by a
1709 call to __builtin_trap. In all likelyhood this will all be
1710 constant-folded away, but in the meantime we can't leave with
1711 something that get_expr_operands can't understand. */
1715 if (TREE_CODE (t
) == ADDR_EXPR
1716 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
1718 /* FIXME: Except that this causes problems elsewhere with dead
1719 code not being deleted, and we abort in the rtl expanders
1720 because we failed to remove some ssa_name. In the meantime,
1721 just return zero. */
1722 /* FIXME2: This condition should be signaled by
1723 fold_read_from_constant_string directly, rather than
1724 re-checking for it here. */
1725 return integer_zero_node
;
1728 /* Try folding *(B+O) to B->X. Still an improvement. */
1729 if (POINTER_TYPE_P (TREE_TYPE (base
)))
1731 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base
)),
1733 TREE_TYPE (expr
), true);
1739 /* Otherwise we had an offset that we could not simplify. */
1743 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1745 A quaint feature extant in our address arithmetic is that there
1746 can be hidden type changes here. The type of the result need
1747 not be the same as the type of the input pointer.
1749 What we're after here is an expression of the form
1750 (T *)(&array + const)
1751 where the cast doesn't actually exist, but is implicit in the
1752 type of the PLUS_EXPR. We'd like to turn this into
1754 which may be able to propagate further. */
1757 maybe_fold_stmt_addition (tree expr
)
1759 tree op0
= TREE_OPERAND (expr
, 0);
1760 tree op1
= TREE_OPERAND (expr
, 1);
1761 tree ptr_type
= TREE_TYPE (expr
);
1764 bool subtract
= (TREE_CODE (expr
) == MINUS_EXPR
);
1766 /* We're only interested in pointer arithmetic. */
1767 if (!POINTER_TYPE_P (ptr_type
))
1769 /* Canonicalize the integral operand to op1. */
1770 if (INTEGRAL_TYPE_P (TREE_TYPE (op0
)))
1774 t
= op0
, op0
= op1
, op1
= t
;
1776 /* It had better be a constant. */
1777 if (TREE_CODE (op1
) != INTEGER_CST
)
1779 /* The first operand should be an ADDR_EXPR. */
1780 if (TREE_CODE (op0
) != ADDR_EXPR
)
1782 op0
= TREE_OPERAND (op0
, 0);
1784 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1785 the offset into it. */
1786 while (TREE_CODE (op0
) == ARRAY_REF
)
1788 tree array_obj
= TREE_OPERAND (op0
, 0);
1789 tree array_idx
= TREE_OPERAND (op0
, 1);
1790 tree elt_type
= TREE_TYPE (op0
);
1791 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
1794 if (TREE_CODE (array_idx
) != INTEGER_CST
)
1796 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1799 /* Un-bias the index by the min index of the array type. */
1800 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
1803 min_idx
= TYPE_MIN_VALUE (min_idx
);
1806 array_idx
= convert (TREE_TYPE (min_idx
), array_idx
);
1807 if (!integer_zerop (min_idx
))
1808 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
1813 /* Convert the index to a byte offset. */
1814 array_idx
= convert (sizetype
, array_idx
);
1815 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
1817 /* Update the operands for the next round, or for folding. */
1818 /* If we're manipulating unsigned types, then folding into negative
1819 values can produce incorrect results. Particularly if the type
1820 is smaller than the width of the pointer. */
1822 && TYPE_UNSIGNED (TREE_TYPE (op1
))
1823 && tree_int_cst_lt (array_idx
, op1
))
1825 op1
= int_const_binop (subtract
? MINUS_EXPR
: PLUS_EXPR
,
1831 /* If we weren't able to fold the subtraction into another array reference,
1832 canonicalize the integer for passing to the array and component ref
1833 simplification functions. */
1836 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
1838 op1
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (op1
), op1
));
1839 /* ??? In theory fold should always produce another integer. */
1840 if (TREE_CODE (op1
) != INTEGER_CST
)
1844 ptd_type
= TREE_TYPE (ptr_type
);
1846 /* At which point we can try some of the same things as for indirects. */
1847 t
= maybe_fold_offset_to_array_ref (op0
, op1
, ptd_type
);
1849 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (op0
), op0
, op1
,
1852 t
= build1 (ADDR_EXPR
, ptr_type
, t
);
1857 /* Subroutine of fold_stmt called via walk_tree. We perform several
1858 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1861 fold_stmt_r (tree
*expr_p
, int *walk_subtrees
, void *data
)
1863 bool *changed_p
= data
;
1864 tree expr
= *expr_p
, t
;
1866 /* ??? It'd be nice if walk_tree had a pre-order option. */
1867 switch (TREE_CODE (expr
))
1870 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1875 t
= maybe_fold_stmt_indirect (expr
, TREE_OPERAND (expr
, 0),
1879 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1880 We'd only want to bother decomposing an existing ARRAY_REF if
1881 the base array is found to have another offset contained within.
1882 Otherwise we'd be wasting time. */
1885 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1890 /* Set TREE_INVARIANT properly so that the value is properly
1891 considered constant, and so gets propagated as expected. */
1893 recompute_tree_invarant_for_addr_expr (expr
);
1898 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1901 t
= walk_tree (&TREE_OPERAND (expr
, 1), fold_stmt_r
, data
, NULL
);
1906 t
= maybe_fold_stmt_addition (expr
);
1910 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
1915 /* Make sure the FIELD_DECL is actually a field in the type on
1916 the lhs. In cases with IMA it is possible that it came
1917 from another, equivalent type at this point. We have
1918 already checked the equivalence in this case.
1919 Match on type plus offset, to allow for unnamed fields.
1920 We won't necessarily get the corresponding field for
1921 unions; this is believed to be harmless. */
1923 if ((current_file_decl
&& TREE_CHAIN (current_file_decl
))
1924 && (DECL_FIELD_CONTEXT (TREE_OPERAND (expr
, 1)) !=
1925 TREE_TYPE (TREE_OPERAND (expr
, 0))))
1928 tree orig_field
= TREE_OPERAND (expr
, 1);
1929 tree orig_type
= TREE_TYPE (orig_field
);
1930 for (f
= TYPE_FIELDS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
1931 f
; f
= TREE_CHAIN (f
))
1933 if (lang_hooks
.types_compatible_p (TREE_TYPE (f
), orig_type
)
1934 && tree_int_cst_compare (DECL_FIELD_BIT_OFFSET (f
),
1935 DECL_FIELD_BIT_OFFSET (orig_field
))
1937 && tree_int_cst_compare (DECL_FIELD_OFFSET (f
),
1938 DECL_FIELD_OFFSET (orig_field
))
1941 TREE_OPERAND (expr
, 1) = f
;
1945 /* Fall through is an error; it will be detected in tree-sra. */
1962 /* Fold the statement pointed by STMT_P. In some cases, this function may
1963 replace the whole statement with a new one. Returns true iff folding
1964 makes any changes. */
1967 fold_stmt (tree
*stmt_p
)
1969 tree rhs
, result
, stmt
;
1970 bool changed
= false;
1974 /* If we replaced constants and the statement makes pointer dereferences,
1975 then we may need to fold instances of *&VAR into VAR, etc. */
1976 if (walk_tree (stmt_p
, fold_stmt_r
, &changed
, NULL
))
1979 = build_function_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
],
1984 rhs
= get_rhs (stmt
);
1989 /* Check for builtins that CCP can handle using information not
1990 available in the generic fold routines. */
1991 if (TREE_CODE (rhs
) == CALL_EXPR
)
1993 tree callee
= get_callee_fndecl (rhs
);
1994 if (callee
&& DECL_BUILT_IN (callee
))
1995 result
= ccp_fold_builtin (stmt
, rhs
);
1998 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
1999 if (result
== NULL_TREE
)
2000 result
= fold (rhs
);
2002 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2003 may have been added by fold, and "useless" type conversions that might
2004 now be apparent due to propagation. */
2005 STRIP_MAIN_TYPE_NOPS (result
);
2006 STRIP_USELESS_TYPE_CONVERSION (result
);
2011 set_rhs (stmt_p
, result
);
2017 /* Get the main expression from statement STMT. */
2022 enum tree_code code
= TREE_CODE (stmt
);
2024 if (code
== MODIFY_EXPR
)
2025 return TREE_OPERAND (stmt
, 1);
2026 if (code
== COND_EXPR
)
2027 return COND_EXPR_COND (stmt
);
2028 else if (code
== SWITCH_EXPR
)
2029 return SWITCH_COND (stmt
);
2030 else if (code
== RETURN_EXPR
)
2032 if (!TREE_OPERAND (stmt
, 0))
2034 if (TREE_CODE (TREE_OPERAND (stmt
, 0)) == MODIFY_EXPR
)
2035 return TREE_OPERAND (TREE_OPERAND (stmt
, 0), 1);
2037 return TREE_OPERAND (stmt
, 0);
2039 else if (code
== GOTO_EXPR
)
2040 return GOTO_DESTINATION (stmt
);
2041 else if (code
== LABEL_EXPR
)
2042 return LABEL_EXPR_LABEL (stmt
);
2048 /* Set the main expression of *STMT_P to EXPR. */
2051 set_rhs (tree
*stmt_p
, tree expr
)
2053 tree stmt
= *stmt_p
;
2054 enum tree_code code
= TREE_CODE (stmt
);
2056 if (code
== MODIFY_EXPR
)
2057 TREE_OPERAND (stmt
, 1) = expr
;
2058 else if (code
== COND_EXPR
)
2059 COND_EXPR_COND (stmt
) = expr
;
2060 else if (code
== SWITCH_EXPR
)
2061 SWITCH_COND (stmt
) = expr
;
2062 else if (code
== RETURN_EXPR
)
2064 if (TREE_OPERAND (stmt
, 0)
2065 && TREE_CODE (TREE_OPERAND (stmt
, 0)) == MODIFY_EXPR
)
2066 TREE_OPERAND (TREE_OPERAND (stmt
, 0), 1) = expr
;
2068 TREE_OPERAND (stmt
, 0) = expr
;
2070 else if (code
== GOTO_EXPR
)
2071 GOTO_DESTINATION (stmt
) = expr
;
2072 else if (code
== LABEL_EXPR
)
2073 LABEL_EXPR_LABEL (stmt
) = expr
;
2076 /* Replace the whole statement with EXPR. If EXPR has no side
2077 effects, then replace *STMT_P with an empty statement. */
2078 stmt_ann_t ann
= stmt_ann (stmt
);
2079 *stmt_p
= TREE_SIDE_EFFECTS (expr
) ? expr
: build_empty_stmt ();
2080 (*stmt_p
)->common
.ann
= (tree_ann_t
) ann
;
2082 if (TREE_SIDE_EFFECTS (expr
))
2085 v_may_def_optype v_may_defs
;
2086 v_must_def_optype v_must_defs
;
2089 /* Fix all the SSA_NAMEs created by *STMT_P to point to its new
2091 defs
= DEF_OPS (ann
);
2092 for (i
= 0; i
< NUM_DEFS (defs
); i
++)
2094 tree var
= DEF_OP (defs
, i
);
2095 if (TREE_CODE (var
) == SSA_NAME
)
2096 SSA_NAME_DEF_STMT (var
) = *stmt_p
;
2099 v_may_defs
= V_MAY_DEF_OPS (ann
);
2100 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
2102 tree var
= V_MAY_DEF_RESULT (v_may_defs
, i
);
2103 if (TREE_CODE (var
) == SSA_NAME
)
2104 SSA_NAME_DEF_STMT (var
) = *stmt_p
;
2107 v_must_defs
= V_MUST_DEF_OPS (ann
);
2108 for (i
= 0; i
< NUM_V_MUST_DEFS (v_must_defs
); i
++)
2110 tree var
= V_MUST_DEF_OP (v_must_defs
, i
);
2111 if (TREE_CODE (var
) == SSA_NAME
)
2112 SSA_NAME_DEF_STMT (var
) = *stmt_p
;
2119 /* Return a default value for variable VAR using the following rules:
2121 1- Global and static variables are considered VARYING, unless they are
2124 2- Function arguments are considered VARYING.
2126 3- Any other value is considered UNDEFINED. This is useful when
2127 considering PHI nodes. PHI arguments that are undefined do not
2128 change the constant value of the PHI node, which allows for more
2129 constants to be propagated. */
2132 get_default_value (tree var
)
2137 if (TREE_CODE (var
) == SSA_NAME
)
2138 sym
= SSA_NAME_VAR (var
);
2141 #ifdef ENABLE_CHECKING
2148 val
.lattice_val
= UNDEFINED
;
2149 val
.const_val
= NULL_TREE
;
2151 if (TREE_CODE (sym
) == PARM_DECL
|| TREE_THIS_VOLATILE (sym
))
2153 /* Function arguments and volatile variables are considered VARYING. */
2154 val
.lattice_val
= VARYING
;
2156 else if (decl_function_context (sym
) != current_function_decl
2157 || TREE_STATIC (sym
))
2159 /* Globals and static variables are considered VARYING, unless they
2160 are declared 'const'. */
2161 val
.lattice_val
= VARYING
;
2163 if (TREE_READONLY (sym
)
2164 && DECL_INITIAL (sym
)
2165 && is_gimple_min_invariant (DECL_INITIAL (sym
)))
2167 val
.lattice_val
= CONSTANT
;
2168 val
.const_val
= DECL_INITIAL (sym
);
2173 enum tree_code code
;
2174 tree stmt
= SSA_NAME_DEF_STMT (var
);
2176 if (!IS_EMPTY_STMT (stmt
))
2178 code
= TREE_CODE (stmt
);
2179 if (code
!= MODIFY_EXPR
&& code
!= PHI_NODE
)
2180 val
.lattice_val
= VARYING
;
2188 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
2189 constant, return NULL_TREE. Otherwise, return its constant value. */
2192 ccp_fold_builtin (tree stmt
, tree fn
)
2194 tree result
, strlen_val
[2];
2195 tree arglist
= TREE_OPERAND (fn
, 1), a
;
2196 tree callee
= get_callee_fndecl (fn
);
2200 /* Ignore MD builtins. */
2201 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
2204 /* First try the generic builtin folder. If that succeeds, return the
2206 result
= fold_builtin (fn
);
2210 /* If the builtin could not be folded, and it has no argument list,
2215 /* Limit the work only for builtins we know how to simplify. */
2216 switch (DECL_FUNCTION_CODE (callee
))
2218 case BUILT_IN_STRLEN
:
2219 case BUILT_IN_FPUTS
:
2220 case BUILT_IN_FPUTS_UNLOCKED
:
2223 case BUILT_IN_STRCPY
:
2224 case BUILT_IN_STRNCPY
:
2231 /* Try to use the dataflow information gathered by the CCP process. */
2232 visited
= BITMAP_XMALLOC ();
2234 memset (strlen_val
, 0, sizeof (strlen_val
));
2235 for (i
= 0, a
= arglist
;
2237 i
++, strlen_arg
>>= 1, a
= TREE_CHAIN (a
))
2240 bitmap_clear (visited
);
2241 if (!get_strlen (TREE_VALUE (a
), &strlen_val
[i
], visited
))
2242 strlen_val
[i
] = NULL_TREE
;
2245 BITMAP_XFREE (visited
);
2247 /* FIXME. All this code looks dangerous in the sense that it might
2248 create non-gimple expressions. */
2249 switch (DECL_FUNCTION_CODE (callee
))
2251 case BUILT_IN_STRLEN
:
2252 /* Convert from the internal "sizetype" type to "size_t". */
2256 tree
new = convert (size_type_node
, strlen_val
[0]);
2258 /* If the result is not a valid gimple value, or not a cast
2259 of a valid gimple value, then we can not use the result. */
2260 if (is_gimple_val (new)
2261 || (is_gimple_cast (new)
2262 && is_gimple_val (TREE_OPERAND (new, 0))))
2267 return strlen_val
[0];
2268 case BUILT_IN_STRCPY
:
2270 && is_gimple_val (strlen_val
[1]))
2271 return simplify_builtin_strcpy (arglist
, strlen_val
[1]);
2272 case BUILT_IN_STRNCPY
:
2274 && is_gimple_val (strlen_val
[1]))
2275 return simplify_builtin_strncpy (arglist
, strlen_val
[1]);
2276 case BUILT_IN_FPUTS
:
2277 return simplify_builtin_fputs (arglist
,
2278 TREE_CODE (stmt
) != MODIFY_EXPR
, 0,
2280 case BUILT_IN_FPUTS_UNLOCKED
:
2281 return simplify_builtin_fputs (arglist
,
2282 TREE_CODE (stmt
) != MODIFY_EXPR
, 1,
2293 /* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
2294 follow its use-def chains. If LENGTH is not NULL and its value is not
2295 equal to the length we determine, or if we are unable to determine the
2296 length, return false. VISITED is a bitmap of visited variables. */
2299 get_strlen (tree arg
, tree
*length
, bitmap visited
)
2301 tree var
, def_stmt
, val
;
2303 if (TREE_CODE (arg
) != SSA_NAME
)
2305 val
= c_strlen (arg
, 1);
2309 if (*length
&& simple_cst_equal (val
, *length
) != 1)
2316 /* If we were already here, break the infinite cycle. */
2317 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
2319 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
2322 def_stmt
= SSA_NAME_DEF_STMT (var
);
2324 switch (TREE_CODE (def_stmt
))
2330 /* The RHS of the statement defining VAR must either have a
2331 constant length or come from another SSA_NAME with a constant
2333 rhs
= TREE_OPERAND (def_stmt
, 1);
2335 if (TREE_CODE (rhs
) == SSA_NAME
)
2336 return get_strlen (rhs
, length
, visited
);
2338 /* See if the RHS is a constant length. */
2339 len
= c_strlen (rhs
, 1);
2342 if (*length
&& simple_cst_equal (len
, *length
) != 1)
2354 /* All the arguments of the PHI node must have the same constant
2358 for (i
= 0; i
< PHI_NUM_ARGS (def_stmt
); i
++)
2360 tree arg
= PHI_ARG_DEF (def_stmt
, i
);
2362 /* If this PHI has itself as an argument, we cannot
2363 determine the string length of this argument. However,
2364 if we can find a constant string length for the other
2365 PHI args then we can still be sure that this is a
2366 constant string length. So be optimistic and just
2367 continue with the next argument. */
2368 if (arg
== PHI_RESULT (def_stmt
))
2371 if (!get_strlen (arg
, length
, visited
))
2387 /* A simple pass that attempts to fold all builtin functions. This pass
2388 is run after we've propagated as many constants as we can. */
2391 execute_fold_all_builtins (void)
2396 block_stmt_iterator i
;
2397 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
2399 tree
*stmtp
= bsi_stmt_ptr (i
);
2400 tree call
= get_rhs (*stmtp
);
2401 tree callee
, result
;
2403 if (!call
|| TREE_CODE (call
) != CALL_EXPR
)
2405 callee
= get_callee_fndecl (call
);
2406 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2409 result
= ccp_fold_builtin (*stmtp
, call
);
2411 switch (DECL_FUNCTION_CODE (callee
))
2413 case BUILT_IN_CONSTANT_P
:
2414 /* Resolve __builtin_constant_p. If it hasn't been
2415 folded to integer_one_node by now, it's fairly
2416 certain that the value simply isn't constant. */
2417 result
= integer_zero_node
;
2424 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2426 fprintf (dump_file
, "Simplified\n ");
2427 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2430 set_rhs (stmtp
, result
);
2431 modify_stmt (*stmtp
);
2433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2435 fprintf (dump_file
, "to\n ");
2436 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2437 fprintf (dump_file
, "\n");
2443 struct tree_opt_pass pass_fold_builtins
=
2447 execute_fold_all_builtins
, /* execute */
2450 0, /* static_pass_number */
2452 PROP_cfg
| PROP_ssa
, /* properties_required */
2453 0, /* properties_provided */
2454 0, /* properties_destroyed */
2455 0, /* todo_flags_start */
2456 TODO_dump_func
| TODO_verify_ssa
/* todo_flags_finish */
2460 #include "gt-tree-ssa-ccp.h"