1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 2, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* Conditional constant propagation.
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
38 #include "coretypes.h"
43 #include "langhooks.h"
45 /* These RTL headers are needed for basic-block.h. */
48 #include "hard-reg-set.h"
49 #include "basic-block.h"
51 #include "diagnostic.h"
52 #include "tree-inline.h"
53 #include "tree-flow.h"
54 #include "tree-gimple.h"
55 #include "tree-dump.h"
56 #include "tree-pass.h"
62 /* Possible lattice values. */
72 /* Use the TREE_VISITED bitflag to mark statements and PHI nodes that have
73 been deemed VARYING and shouldn't be simulated again. */
74 #define DONT_SIMULATE_AGAIN(T) TREE_VISITED (T)
76 /* Main structure for CCP. Contains the lattice value and, if it's a
77 constant, the constant value. */
80 latticevalue lattice_val
;
84 /* A bitmap to keep track of executable blocks in the CFG. */
85 static sbitmap executable_blocks
;
87 /* Array of control flow edges on the worklist. */
88 static GTY(()) varray_type cfg_blocks
= NULL
;
90 static unsigned int cfg_blocks_num
= 0;
91 static int cfg_blocks_tail
;
92 static int cfg_blocks_head
;
94 static sbitmap bb_in_list
;
96 /* This is used to track the current value of each variable. */
97 static value
*value_vector
;
99 /* Worklist of SSA edges which will need reexamination as their definition
100 has changed. SSA edges are def-use edges in the SSA web. For each
101 edge, we store the definition statement or PHI node D. The destination
102 nodes that need to be visited are accessed using immediate_uses
104 static GTY(()) varray_type ssa_edges
;
106 /* Identical to SSA_EDGES. For performance reasons, the list of SSA
107 edges is split into two. One contains all SSA edges who need to be
108 reexamined because their lattice value changed to varying (this
109 worklist), and the other contains all other SSA edges to be
110 reexamined (ssa_edges).
112 Since most values in the program are varying, the ideal situation
113 is to move them to that lattice value as quickly as possible.
114 Thus, it doesn't make sense to process any other type of lattice
115 value until all varying values are propagated fully, which is one
116 thing using the varying worklist achieves. In addition, if you
117 don't use a separate worklist for varying edges, you end up with
118 situations where lattice values move from
119 undefined->constant->varying instead of undefined->varying.
121 static GTY(()) varray_type varying_ssa_edges
;
124 static void initialize (void);
125 static void finalize (void);
126 static void visit_phi_node (tree
);
127 static tree
ccp_fold (tree
);
128 static value
cp_lattice_meet (value
, value
);
129 static void visit_stmt (tree
);
130 static void visit_cond_stmt (tree
);
131 static void visit_assignment (tree
);
132 static void add_var_to_ssa_edges_worklist (tree
, value
);
133 static void add_outgoing_control_edges (basic_block
);
134 static void add_control_edge (edge
);
135 static void def_to_varying (tree
);
136 static void set_lattice_value (tree
, value
);
137 static void simulate_block (basic_block
);
138 static void simulate_stmt (tree
);
139 static void substitute_and_fold (void);
140 static value
evaluate_stmt (tree
);
141 static void dump_lattice_value (FILE *, const char *, value
);
142 static bool replace_uses_in (tree
, bool *);
143 static bool replace_vuse_in (tree
, bool *);
144 static latticevalue
likely_value (tree
);
145 static tree
get_rhs (tree
);
146 static bool set_rhs (tree
*, tree
);
147 static value
*get_value (tree
);
148 static value
get_default_value (tree
);
149 static tree
ccp_fold_builtin (tree
, tree
);
150 static bool get_strlen (tree
, tree
*, bitmap
);
151 static inline bool cfg_blocks_empty_p (void);
152 static void cfg_blocks_add (basic_block
);
153 static basic_block
cfg_blocks_get (void);
154 static bool need_imm_uses_for (tree var
);
156 /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
157 drain. This pops statements off the given WORKLIST and processes
158 them until there are no more statements on WORKLIST. */
161 process_ssa_edge_worklist (varray_type
*worklist
)
163 /* Drain the entire worklist. */
164 while (VARRAY_ACTIVE_SIZE (*worklist
) > 0)
166 /* Pull the statement to simulate off the worklist. */
167 tree stmt
= VARRAY_TOP_TREE (*worklist
);
168 stmt_ann_t ann
= stmt_ann (stmt
);
169 VARRAY_POP (*worklist
);
171 /* visit_stmt can "cancel" reevaluation of some statements.
172 If it does, then in_ccp_worklist will be zero. */
173 if (ann
->in_ccp_worklist
)
175 ann
->in_ccp_worklist
= 0;
176 simulate_stmt (stmt
);
181 /* Main entry point for SSA Conditional Constant Propagation. FNDECL is
182 the declaration for the function to optimize.
184 On exit, VARS_TO_RENAME will contain the symbols that have been exposed by
185 the propagation of ADDR_EXPR expressions into pointer dereferences and need
186 to be renamed into SSA.
188 PHASE indicates which dump file from the DUMP_FILES array to use when
189 dumping debugging information. */
196 /* Iterate until the worklists are empty. */
197 while (!cfg_blocks_empty_p ()
198 || VARRAY_ACTIVE_SIZE (ssa_edges
) > 0
199 || VARRAY_ACTIVE_SIZE (varying_ssa_edges
) > 0)
201 if (!cfg_blocks_empty_p ())
203 /* Pull the next block to simulate off the worklist. */
204 basic_block dest_block
= cfg_blocks_get ();
205 simulate_block (dest_block
);
208 /* In order to move things to varying as quickly as
209 possible,process the VARYING_SSA_EDGES worklist first. */
210 process_ssa_edge_worklist (&varying_ssa_edges
);
212 /* Now process the SSA_EDGES worklist. */
213 process_ssa_edge_worklist (&ssa_edges
);
216 /* Now perform substitutions based on the known constant values. */
217 substitute_and_fold ();
219 /* Now cleanup any unreachable code. */
222 /* Free allocated memory. */
225 /* Debugging dumps. */
226 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
228 dump_referenced_vars (dump_file
);
229 fprintf (dump_file
, "\n\n");
236 return flag_tree_ccp
!= 0;
239 struct tree_opt_pass pass_ccp
=
243 tree_ssa_ccp
, /* execute */
246 0, /* static_pass_number */
247 TV_TREE_CCP
, /* tv_id */
248 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
249 0, /* properties_provided */
250 0, /* properties_destroyed */
251 0, /* todo_flags_start */
252 TODO_dump_func
| TODO_rename_vars
253 | TODO_ggc_collect
| TODO_verify_ssa
254 | TODO_verify_stmts
/* todo_flags_finish */
258 /* Get the constant value associated with variable VAR. */
265 #if defined ENABLE_CHECKING
266 if (TREE_CODE (var
) != SSA_NAME
)
270 val
= &value_vector
[SSA_NAME_VERSION (var
)];
271 if (val
->lattice_val
== UNINITIALIZED
)
272 *val
= get_default_value (var
);
278 /* Simulate the execution of BLOCK. Evaluate the statement associated
279 with each variable reference inside the block. */
282 simulate_block (basic_block block
)
286 /* There is nothing to do for the exit block. */
287 if (block
== EXIT_BLOCK_PTR
)
290 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
291 fprintf (dump_file
, "\nSimulating block %d\n", block
->index
);
293 /* Always simulate PHI nodes, even if we have simulated this block
295 for (phi
= phi_nodes (block
); phi
; phi
= PHI_CHAIN (phi
))
296 visit_phi_node (phi
);
298 /* If this is the first time we've simulated this block, then we
299 must simulate each of its statements. */
300 if (!TEST_BIT (executable_blocks
, block
->index
))
302 block_stmt_iterator j
;
303 unsigned int normal_edge_count
;
306 /* Note that we have simulated this block. */
307 SET_BIT (executable_blocks
, block
->index
);
309 for (j
= bsi_start (block
); !bsi_end_p (j
); bsi_next (&j
))
310 visit_stmt (bsi_stmt (j
));
312 /* We can not predict when abnormal edges will be executed, so
313 once a block is considered executable, we consider any
314 outgoing abnormal edges as executable.
316 At the same time, if this block has only one successor that is
317 reached by non-abnormal edges, then add that successor to the
319 normal_edge_count
= 0;
321 for (e
= block
->succ
; e
; e
= e
->succ_next
)
323 if (e
->flags
& EDGE_ABNORMAL
)
325 add_control_edge (e
);
334 if (normal_edge_count
== 1)
335 add_control_edge (normal_edge
);
340 /* Follow the def-use edges for statement DEF_STMT and simulate all the
341 statements reached by it. */
344 simulate_stmt (tree use_stmt
)
346 basic_block use_bb
= bb_for_stmt (use_stmt
);
348 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
350 fprintf (dump_file
, "\nSimulating statement (from ssa_edges): ");
351 print_generic_stmt (dump_file
, use_stmt
, dump_flags
);
354 if (TREE_CODE (use_stmt
) == PHI_NODE
)
356 /* PHI nodes are always visited, regardless of whether or not the
357 destination block is executable. */
358 visit_phi_node (use_stmt
);
360 else if (TEST_BIT (executable_blocks
, use_bb
->index
))
362 /* Otherwise, visit the statement containing the use reached by
363 DEF, only if the destination block is marked executable. */
364 visit_stmt (use_stmt
);
369 /* Perform final substitution and folding. After this pass the program
370 should still be in SSA form. */
373 substitute_and_fold (void)
377 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
379 "\nSubstituing constants and folding statements\n\n");
381 /* Substitute constants in every statement of every basic block. */
384 block_stmt_iterator i
;
387 /* Propagate our known constants into PHI nodes. */
388 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
392 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
395 use_operand_p orig_p
= PHI_ARG_DEF_PTR (phi
, i
);
396 tree orig
= USE_FROM_PTR (orig_p
);
398 if (! SSA_VAR_P (orig
))
401 new_val
= get_value (orig
);
402 if (new_val
->lattice_val
== CONSTANT
403 && may_propagate_copy (orig
, new_val
->const_val
))
404 SET_USE (orig_p
, new_val
->const_val
);
408 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
410 bool replaced_address
;
411 tree stmt
= bsi_stmt (i
);
413 /* Skip statements that have been folded already. */
414 if (stmt_modified_p (stmt
) || !is_exec_stmt (stmt
))
417 /* Replace the statement with its folded version and mark it
419 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
421 fprintf (dump_file
, "Line %d: replaced ", get_lineno (stmt
));
422 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
425 if (replace_uses_in (stmt
, &replaced_address
)
426 || replace_vuse_in (stmt
, &replaced_address
))
428 bool changed
= fold_stmt (bsi_stmt_ptr (i
));
430 /* If we folded a builtin function, we'll likely
431 need to rename VDEFs. */
432 if (replaced_address
|| changed
)
434 mark_new_vars_to_rename (stmt
, vars_to_rename
);
435 if (maybe_clean_eh_stmt (stmt
))
436 tree_purge_dead_eh_edges (bb
);
442 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
444 fprintf (dump_file
, " with ");
445 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
446 fprintf (dump_file
, "\n");
453 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
454 lattice values to determine PHI_NODE's lattice value. The value of a
455 PHI node is determined calling cp_lattice_meet() with all the arguments
456 of the PHI node that are incoming via executable edges. */
459 visit_phi_node (tree phi
)
461 bool short_circuit
= 0;
462 value phi_val
, *curr_val
;
465 /* If the PHI node has already been deemed to be VARYING, don't simulate
467 if (DONT_SIMULATE_AGAIN (phi
))
470 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
472 fprintf (dump_file
, "\nVisiting PHI node: ");
473 print_generic_expr (dump_file
, phi
, dump_flags
);
476 curr_val
= get_value (PHI_RESULT (phi
));
477 switch (curr_val
->lattice_val
)
480 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
481 fprintf (dump_file
, "\n Shortcircuit. Default of VARYING.");
490 /* To avoid the default value of UNKNOWN_VAL overriding
491 that of its possible constant arguments, temporarily
492 set the phi node's default lattice value to be
493 UNDEFINED. At the same time, place something other
494 than NULL_TREE in phi_val.const_val as a flag to
495 check when setting a new state for this phi node to
496 ensure that we avoid incorrect state transitions from
497 UNKNOWN_VAL to UNDEFINED. */
498 phi_val
.lattice_val
= UNDEFINED
;
499 phi_val
.const_val
= phi
;
504 phi_val
.lattice_val
= UNDEFINED
;
505 phi_val
.const_val
= NULL_TREE
;
512 /* If the variable is volatile or the variable is never referenced in a
513 real operand, then consider the PHI node VARYING. */
514 if (short_circuit
|| TREE_THIS_VOLATILE (SSA_NAME_VAR (PHI_RESULT (phi
))))
516 phi_val
.lattice_val
= VARYING
;
517 phi_val
.const_val
= NULL
;
520 for (i
= 0; i
< PHI_NUM_ARGS (phi
); i
++)
522 /* Compute the meet operator over all the PHI arguments. */
523 edge e
= PHI_ARG_EDGE (phi
, i
);
525 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
528 "\n Argument #%d (%d -> %d %sexecutable)\n",
529 i
, e
->src
->index
, e
->dest
->index
,
530 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
533 /* If the incoming edge is executable, Compute the meet operator for
534 the existing value of the PHI node and the current PHI argument. */
535 if (e
->flags
& EDGE_EXECUTABLE
)
537 tree rdef
= PHI_ARG_DEF (phi
, i
);
538 value
*rdef_val
, val
;
540 if (is_gimple_min_invariant (rdef
))
542 val
.lattice_val
= CONSTANT
;
543 val
.const_val
= rdef
;
547 rdef_val
= get_value (rdef
);
549 phi_val
= cp_lattice_meet (phi_val
, *rdef_val
);
551 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
553 fprintf (dump_file
, "\t");
554 print_generic_expr (dump_file
, rdef
, dump_flags
);
555 dump_lattice_value (dump_file
, "\tValue: ", *rdef_val
);
556 fprintf (dump_file
, "\n");
559 if (phi_val
.lattice_val
== VARYING
)
564 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
566 dump_lattice_value (dump_file
, "\n PHI node value: ", phi_val
);
567 fprintf (dump_file
, "\n\n");
570 /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
571 if (phi_val
.lattice_val
!= UNDEFINED
|| phi_val
.const_val
== NULL_TREE
)
573 set_lattice_value (PHI_RESULT (phi
), phi_val
);
574 if (phi_val
.lattice_val
== VARYING
)
575 DONT_SIMULATE_AGAIN (phi
) = 1;
580 /* Compute the meet operator between VAL1 and VAL2:
582 any M UNDEFINED = any
583 any M VARYING = VARYING
584 any M UNKNOWN_VAL = UNKNOWN_VAL
585 Ci M Cj = Ci if (i == j)
586 Ci M Cj = VARYING if (i != j) */
588 cp_lattice_meet (value val1
, value val2
)
592 /* any M UNDEFINED = any. */
593 if (val1
.lattice_val
== UNDEFINED
)
595 else if (val2
.lattice_val
== UNDEFINED
)
598 /* any M VARYING = VARYING. */
599 if (val1
.lattice_val
== VARYING
|| val2
.lattice_val
== VARYING
)
601 result
.lattice_val
= VARYING
;
602 result
.const_val
= NULL_TREE
;
606 /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
607 if (val1
.lattice_val
== UNKNOWN_VAL
608 || val2
.lattice_val
== UNKNOWN_VAL
)
610 result
.lattice_val
= UNKNOWN_VAL
;
611 result
.const_val
= NULL_TREE
;
615 /* Ci M Cj = Ci if (i == j)
616 Ci M Cj = VARYING if (i != j) */
617 if (simple_cst_equal (val1
.const_val
, val2
.const_val
) == 1)
619 result
.lattice_val
= CONSTANT
;
620 result
.const_val
= val1
.const_val
;
624 result
.lattice_val
= VARYING
;
625 result
.const_val
= NULL_TREE
;
632 /* Evaluate statement STMT. If the statement produces an output value and
633 its evaluation changes the lattice value of its output, do the following:
635 - If the statement is an assignment, add all the SSA edges starting at
638 - If the statement is a conditional branch:
639 . If the statement evaluates to non-constant, add all edges to
641 . If the statement is constant, add the edge executed as the
642 result of the branch. */
645 visit_stmt (tree stmt
)
650 v_may_def_optype v_may_defs
;
651 v_must_def_optype v_must_defs
;
653 /* If the statement has already been deemed to be VARYING, don't simulate
655 if (DONT_SIMULATE_AGAIN (stmt
))
658 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
660 fprintf (dump_file
, "\nVisiting statement: ");
661 print_generic_stmt (dump_file
, stmt
, TDF_SLIM
);
662 fprintf (dump_file
, "\n");
665 ann
= stmt_ann (stmt
);
667 /* If this statement is already in the worklist then "cancel" it. The
668 reevaluation implied by the worklist entry will produce the same
669 value we generate here and thus reevaluating it again from the
670 worklist is pointless. */
671 if (ann
->in_ccp_worklist
)
672 ann
->in_ccp_worklist
= 0;
674 /* Now examine the statement. If the statement is an assignment that
675 produces a single output value, evaluate its RHS to see if the lattice
676 value of its output has changed. */
677 v_must_defs
= V_MUST_DEF_OPS (ann
);
678 v_may_defs
= V_MAY_DEF_OPS (ann
);
679 if (TREE_CODE (stmt
) == MODIFY_EXPR
680 && NUM_V_MAY_DEFS (v_may_defs
) == 0
681 && (NUM_V_MUST_DEFS (v_must_defs
) == 1
682 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == SSA_NAME
))
683 visit_assignment (stmt
);
685 /* Definitions made by statements other than assignments to SSA_NAMEs
686 represent unknown modifications to their outputs. Mark them VARYING. */
687 else if (NUM_DEFS (defs
= DEF_OPS (ann
)) != 0)
689 DONT_SIMULATE_AGAIN (stmt
) = 1;
690 for (i
= 0; i
< NUM_DEFS (defs
); i
++)
692 tree def
= DEF_OP (defs
, i
);
693 def_to_varying (def
);
697 /* If STMT is a conditional branch, see if we can determine which branch
699 else if (TREE_CODE (stmt
) == COND_EXPR
|| TREE_CODE (stmt
) == SWITCH_EXPR
)
700 visit_cond_stmt (stmt
);
702 /* Any other kind of statement is not interesting for constant
703 propagation and, therefore, not worth simulating. */
706 DONT_SIMULATE_AGAIN (stmt
) = 1;
708 /* If STMT is a computed goto, then mark all the output edges
710 if (computed_goto_p (stmt
))
711 add_outgoing_control_edges (bb_for_stmt (stmt
));
714 /* Mark all V_MAY_DEF operands VARYING. */
715 v_may_defs
= V_MAY_DEF_OPS (ann
);
716 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
717 def_to_varying (V_MAY_DEF_RESULT (v_may_defs
, i
));
722 /* Visit the assignment statement STMT. Set the value of its LHS to the
723 value computed by the RHS. */
726 visit_assignment (tree stmt
)
731 v_must_def_optype v_must_defs
;
733 lhs
= TREE_OPERAND (stmt
, 0);
734 rhs
= TREE_OPERAND (stmt
, 1);
735 vuses
= STMT_VUSE_OPS (stmt
);
736 v_must_defs
= STMT_V_MUST_DEF_OPS (stmt
);
738 #if defined ENABLE_CHECKING
739 if (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt
)) > 0
740 || (NUM_V_MUST_DEFS (v_must_defs
) != 1
741 && TREE_CODE (lhs
) != SSA_NAME
))
745 /* We require the SSA version number of the lhs for the value_vector.
746 Make sure we have it. */
747 if (TREE_CODE (lhs
) != SSA_NAME
)
749 /* If we make it here, then stmt only has one definition:
751 lhs
= V_MUST_DEF_OP (v_must_defs
, 0);
754 if (TREE_CODE (rhs
) == SSA_NAME
)
756 /* For a simple copy operation, we copy the lattice values. */
757 value
*nval
= get_value (rhs
);
760 else if (DECL_P (rhs
)
761 && NUM_VUSES (vuses
) == 1
762 && rhs
== SSA_NAME_VAR (VUSE_OP (vuses
, 0)))
764 /* Same as above, but the rhs is not a gimple register and yet
766 value
*nval
= get_value (VUSE_OP (vuses
, 0));
771 /* Evaluate the statement. */
772 val
= evaluate_stmt (stmt
);
775 /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
776 the constant value into the type of the destination variable. This
777 should not be necessary if GCC represented bitfields properly. */
779 tree lhs
= TREE_OPERAND (stmt
, 0);
780 if (val
.lattice_val
== CONSTANT
781 && TREE_CODE (lhs
) == COMPONENT_REF
782 && DECL_BIT_FIELD (TREE_OPERAND (lhs
, 1)))
784 tree w
= widen_bitfield (val
.const_val
, TREE_OPERAND (lhs
, 1), lhs
);
786 if (w
&& is_gimple_min_invariant (w
))
790 val
.lattice_val
= VARYING
;
791 val
.const_val
= NULL
;
796 /* If LHS is not a gimple register, then it cannot take on an
798 if (!is_gimple_reg (SSA_NAME_VAR (lhs
))
799 && val
.lattice_val
== UNDEFINED
)
800 val
.lattice_val
= UNKNOWN_VAL
;
802 /* Set the lattice value of the statement's output. */
803 set_lattice_value (lhs
, val
);
804 if (val
.lattice_val
== VARYING
)
805 DONT_SIMULATE_AGAIN (stmt
) = 1;
809 /* Visit the conditional statement STMT. If it evaluates to a constant value,
810 mark outgoing edges appropriately. */
813 visit_cond_stmt (tree stmt
)
819 block
= bb_for_stmt (stmt
);
820 val
= evaluate_stmt (stmt
);
822 /* Find which edge out of the conditional block will be taken and add it
823 to the worklist. If no single edge can be determined statically, add
824 all outgoing edges from BLOCK. */
825 e
= find_taken_edge (block
, val
.const_val
);
827 add_control_edge (e
);
830 DONT_SIMULATE_AGAIN (stmt
) = 1;
831 add_outgoing_control_edges (block
);
836 /* Add all the edges coming out of BB to the control flow worklist. */
839 add_outgoing_control_edges (basic_block bb
)
843 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
844 add_control_edge (e
);
848 /* Add edge E to the control flow worklist. */
851 add_control_edge (edge e
)
853 basic_block bb
= e
->dest
;
854 if (bb
== EXIT_BLOCK_PTR
)
857 /* If the edge had already been executed, skip it. */
858 if (e
->flags
& EDGE_EXECUTABLE
)
861 e
->flags
|= EDGE_EXECUTABLE
;
863 /* If the block is already in the list, we're done. */
864 if (TEST_BIT (bb_in_list
, bb
->index
))
869 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
870 fprintf (dump_file
, "Adding Destination of edge (%d -> %d) to worklist\n\n",
871 e
->src
->index
, e
->dest
->index
);
875 /* CCP specific front-end to the non-destructive constant folding routines.
877 Attempt to simplify the RHS of STMT knowing that one or more
878 operands are constants.
880 If simplification is possible, return the simplified RHS,
881 otherwise return the original RHS. */
886 tree rhs
= get_rhs (stmt
);
887 enum tree_code code
= TREE_CODE (rhs
);
888 int kind
= TREE_CODE_CLASS (code
);
889 tree retval
= NULL_TREE
;
892 vuses
= STMT_VUSE_OPS (stmt
);
894 /* If the RHS is just a variable, then that variable must now have
895 a constant value that we can return directly. */
896 if (TREE_CODE (rhs
) == SSA_NAME
)
897 return get_value (rhs
)->const_val
;
898 else if (DECL_P (rhs
)
899 && NUM_VUSES (vuses
) == 1
900 && rhs
== SSA_NAME_VAR (VUSE_OP (vuses
, 0)))
901 return get_value (VUSE_OP (vuses
, 0))->const_val
;
903 /* Unary operators. Note that we know the single operand must
904 be a constant. So this should almost always return a
908 /* Handle unary operators which can appear in GIMPLE form. */
909 tree op0
= TREE_OPERAND (rhs
, 0);
911 /* Simplify the operand down to a constant. */
912 if (TREE_CODE (op0
) == SSA_NAME
)
914 value
*val
= get_value (op0
);
915 if (val
->lattice_val
== CONSTANT
)
916 op0
= get_value (op0
)->const_val
;
919 retval
= nondestructive_fold_unary_to_constant (code
,
923 /* If we folded, but did not create an invariant, then we can not
924 use this expression. */
925 if (retval
&& ! is_gimple_min_invariant (retval
))
928 /* If we could not fold the expression, but the arguments are all
929 constants and gimple values, then build and return the new
932 In some cases the new expression is still something we can
933 use as a replacement for an argument. This happens with
934 NOP conversions of types for example.
936 In other cases the new expression can not be used as a
937 replacement for an argument (as it would create non-gimple
938 code). But the new expression can still be used to derive
940 if (! retval
&& is_gimple_min_invariant (op0
))
941 return build1 (code
, TREE_TYPE (rhs
), op0
);
944 /* Binary and comparison operators. We know one or both of the
945 operands are constants. */
948 || code
== TRUTH_AND_EXPR
949 || code
== TRUTH_OR_EXPR
950 || code
== TRUTH_XOR_EXPR
)
952 /* Handle binary and comparison operators that can appear in
954 tree op0
= TREE_OPERAND (rhs
, 0);
955 tree op1
= TREE_OPERAND (rhs
, 1);
957 /* Simplify the operands down to constants when appropriate. */
958 if (TREE_CODE (op0
) == SSA_NAME
)
960 value
*val
= get_value (op0
);
961 if (val
->lattice_val
== CONSTANT
)
962 op0
= val
->const_val
;
965 if (TREE_CODE (op1
) == SSA_NAME
)
967 value
*val
= get_value (op1
);
968 if (val
->lattice_val
== CONSTANT
)
969 op1
= val
->const_val
;
972 retval
= nondestructive_fold_binary_to_constant (code
,
976 /* If we folded, but did not create an invariant, then we can not
977 use this expression. */
978 if (retval
&& ! is_gimple_min_invariant (retval
))
981 /* If we could not fold the expression, but the arguments are all
982 constants and gimple values, then build and return the new
985 In some cases the new expression is still something we can
986 use as a replacement for an argument. This happens with
987 NOP conversions of types for example.
989 In other cases the new expression can not be used as a
990 replacement for an argument (as it would create non-gimple
991 code). But the new expression can still be used to derive
994 && is_gimple_min_invariant (op0
)
995 && is_gimple_min_invariant (op1
))
996 return build (code
, TREE_TYPE (rhs
), op0
, op1
);
999 /* We may be able to fold away calls to builtin functions if their
1000 arguments are constants. */
1001 else if (code
== CALL_EXPR
1002 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == ADDR_EXPR
1003 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0))
1005 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0)))
1007 use_optype uses
= STMT_USE_OPS (stmt
);
1008 if (NUM_USES (uses
) != 0)
1013 /* Preserve the original values of every operand. */
1014 orig
= xmalloc (sizeof (tree
) * NUM_USES (uses
));
1015 for (i
= 0; i
< NUM_USES (uses
); i
++)
1016 orig
[i
] = USE_OP (uses
, i
);
1018 /* Substitute operands with their values and try to fold. */
1019 replace_uses_in (stmt
, NULL
);
1020 retval
= fold_builtin (rhs
, false);
1022 /* Restore operands to their original form. */
1023 for (i
= 0; i
< NUM_USES (uses
); i
++)
1024 SET_USE_OP (uses
, i
, orig
[i
]);
1031 /* If we got a simplified form, see if we need to convert its type. */
1033 return fold_convert (TREE_TYPE (rhs
), retval
);
1035 /* No simplification was possible. */
1040 /* Evaluate statement STMT. */
1043 evaluate_stmt (tree stmt
)
1047 latticevalue likelyvalue
= likely_value (stmt
);
1049 /* If the statement is likely to have a CONSTANT result, then try
1050 to fold the statement to determine the constant value. */
1051 if (likelyvalue
== CONSTANT
)
1052 simplified
= ccp_fold (stmt
);
1053 /* If the statement is likely to have a VARYING result, then do not
1054 bother folding the statement. */
1055 else if (likelyvalue
== VARYING
)
1056 simplified
= get_rhs (stmt
);
1057 /* Otherwise the statement is likely to have an UNDEFINED value and
1058 there will be nothing to do. */
1060 simplified
= NULL_TREE
;
1062 if (simplified
&& is_gimple_min_invariant (simplified
))
1064 /* The statement produced a constant value. */
1065 val
.lattice_val
= CONSTANT
;
1066 val
.const_val
= simplified
;
1070 /* The statement produced a nonconstant value. If the statement
1071 had undefined or virtual operands, then the result of the
1072 statement should be undefined or virtual respectively.
1073 Else the result of the statement is VARYING. */
1074 val
.lattice_val
= (likelyvalue
== UNDEFINED
? UNDEFINED
: VARYING
);
1075 val
.lattice_val
= (likelyvalue
== UNKNOWN_VAL
1076 ? UNKNOWN_VAL
: val
.lattice_val
);
1077 val
.const_val
= NULL_TREE
;
1084 /* Debugging dumps. */
1087 dump_lattice_value (FILE *outf
, const char *prefix
, value val
)
1089 switch (val
.lattice_val
)
1092 fprintf (outf
, "%sUNDEFINED", prefix
);
1095 fprintf (outf
, "%sVARYING", prefix
);
1098 fprintf (outf
, "%sUNKNOWN_VAL", prefix
);
1101 fprintf (outf
, "%sCONSTANT ", prefix
);
1102 print_generic_expr (outf
, val
.const_val
, dump_flags
);
1109 /* Given a constant value VAL for bitfield FIELD, and a destination
1110 variable VAR, return VAL appropriately widened to fit into VAR. If
1111 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1114 widen_bitfield (tree val
, tree field
, tree var
)
1116 unsigned HOST_WIDE_INT var_size
, field_size
;
1118 unsigned HOST_WIDE_INT mask
;
1121 /* We can only do this if the size of the type and field and VAL are
1122 all constants representable in HOST_WIDE_INT. */
1123 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var
)), 1)
1124 || !host_integerp (DECL_SIZE (field
), 1)
1125 || !host_integerp (val
, 0))
1128 var_size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (var
)), 1);
1129 field_size
= tree_low_cst (DECL_SIZE (field
), 1);
1131 /* Give up if either the bitfield or the variable are too wide. */
1132 if (field_size
> HOST_BITS_PER_WIDE_INT
|| var_size
> HOST_BITS_PER_WIDE_INT
)
1135 #if defined ENABLE_CHECKING
1136 if (var_size
< field_size
)
1140 /* If the sign bit of the value is not set or the field's type is unsigned,
1141 just mask off the high order bits of the value. */
1142 if (DECL_UNSIGNED (field
)
1143 || !(tree_low_cst (val
, 0) & (((HOST_WIDE_INT
)1) << (field_size
- 1))))
1145 /* Zero extension. Build a mask with the lower 'field_size' bits
1146 set and a BIT_AND_EXPR node to clear the high order bits of
1148 for (i
= 0, mask
= 0; i
< field_size
; i
++)
1149 mask
|= ((HOST_WIDE_INT
) 1) << i
;
1151 wide_val
= build (BIT_AND_EXPR
, TREE_TYPE (var
), val
,
1152 fold_convert (TREE_TYPE (var
),
1153 build_int_cst (NULL_TREE
, mask
, 0)));
1157 /* Sign extension. Create a mask with the upper 'field_size'
1158 bits set and a BIT_IOR_EXPR to set the high order bits of the
1160 for (i
= 0, mask
= 0; i
< (var_size
- field_size
); i
++)
1161 mask
|= ((HOST_WIDE_INT
) 1) << (var_size
- i
- 1);
1163 wide_val
= build (BIT_IOR_EXPR
, TREE_TYPE (var
), val
,
1164 fold_convert (TREE_TYPE (var
),
1165 build_int_cst (NULL_TREE
, mask
, 0)));
1168 return fold (wide_val
);
1172 /* Function indicating whether we ought to include information for 'var'
1173 when calculating immediate uses. */
1176 need_imm_uses_for (tree var
)
1178 return get_value (var
)->lattice_val
!= VARYING
;
1182 /* Initialize local data structures and worklists for CCP. */
1189 sbitmap virtual_var
;
1191 /* Worklists of SSA edges. */
1192 VARRAY_TREE_INIT (ssa_edges
, 20, "ssa_edges");
1193 VARRAY_TREE_INIT (varying_ssa_edges
, 20, "varying_ssa_edges");
1195 executable_blocks
= sbitmap_alloc (last_basic_block
);
1196 sbitmap_zero (executable_blocks
);
1198 bb_in_list
= sbitmap_alloc (last_basic_block
);
1199 sbitmap_zero (bb_in_list
);
1201 value_vector
= (value
*) xmalloc (num_ssa_names
* sizeof (value
));
1202 memset (value_vector
, 0, num_ssa_names
* sizeof (value
));
1204 /* 1 if ssa variable is used in a virtual variable context. */
1205 virtual_var
= sbitmap_alloc (num_ssa_names
);
1206 sbitmap_zero (virtual_var
);
1208 /* Initialize default values and simulation flags for PHI nodes, statements
1212 block_stmt_iterator i
;
1216 v_may_def_optype v_may_defs
;
1217 v_must_def_optype v_must_defs
;
1221 /* Get the default value for each definition. */
1222 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
1225 stmt
= bsi_stmt (i
);
1226 get_stmt_operands (stmt
);
1227 ann
= stmt_ann (stmt
);
1228 defs
= DEF_OPS (ann
);
1229 for (x
= 0; x
< NUM_DEFS (defs
); x
++)
1231 tree def
= DEF_OP (defs
, x
);
1232 if (get_value (def
)->lattice_val
== VARYING
)
1236 /* Get the default value for each V_MUST_DEF. */
1237 v_must_defs
= V_MUST_DEF_OPS (ann
);
1238 for (x
= 0; x
< NUM_V_MUST_DEFS (v_must_defs
); x
++)
1240 tree v_must_def
= V_MUST_DEF_OP (v_must_defs
, x
);
1241 if (get_value (v_must_def
)->lattice_val
== VARYING
)
1245 DONT_SIMULATE_AGAIN (stmt
) = vary
;
1247 /* Mark all V_MAY_DEF operands VARYING. */
1248 v_may_defs
= V_MAY_DEF_OPS (ann
);
1249 for (x
= 0; x
< NUM_V_MAY_DEFS (v_may_defs
); x
++)
1251 tree res
= V_MAY_DEF_RESULT (v_may_defs
, x
);
1252 get_value (res
)->lattice_val
= VARYING
;
1253 SET_BIT (virtual_var
, SSA_NAME_VERSION (res
));
1257 for (e
= bb
->succ
; e
; e
= e
->succ_next
)
1258 e
->flags
&= ~EDGE_EXECUTABLE
;
1261 /* Now process PHI nodes. */
1266 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
1269 val
= get_value (PHI_RESULT (phi
));
1270 if (val
->lattice_val
!= VARYING
)
1272 for (x
= 0; x
< PHI_NUM_ARGS (phi
); x
++)
1274 var
= PHI_ARG_DEF (phi
, x
);
1275 /* If one argument has a V_MAY_DEF,
1276 the result is varying. */
1277 if (TREE_CODE (var
) == SSA_NAME
)
1279 if (TEST_BIT (virtual_var
, SSA_NAME_VERSION (var
)))
1281 val
->lattice_val
= VARYING
;
1282 SET_BIT (virtual_var
,
1283 SSA_NAME_VERSION (PHI_RESULT (phi
)));
1289 DONT_SIMULATE_AGAIN (phi
) = ((val
->lattice_val
== VARYING
) ? 1 : 0);
1293 sbitmap_free (virtual_var
);
1294 /* Compute immediate uses for variables we care about. */
1295 compute_immediate_uses (TDFA_USE_OPS
| TDFA_USE_VOPS
, need_imm_uses_for
);
1297 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1298 dump_immediate_uses (dump_file
);
1300 VARRAY_BB_INIT (cfg_blocks
, 20, "cfg_blocks");
1302 /* Seed the algorithm by adding the successors of the entry block to the
1304 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
1306 if (e
->dest
!= EXIT_BLOCK_PTR
)
1308 e
->flags
|= EDGE_EXECUTABLE
;
1309 cfg_blocks_add (e
->dest
);
1315 /* Free allocated storage. */
1321 varying_ssa_edges
= NULL
;
1323 free (value_vector
);
1324 sbitmap_free (bb_in_list
);
1325 sbitmap_free (executable_blocks
);
1329 /* Is the block worklist empty. */
1332 cfg_blocks_empty_p (void)
1334 return (cfg_blocks_num
== 0);
1337 /* Add a basic block to the worklist. */
1340 cfg_blocks_add (basic_block bb
)
1342 if (bb
== ENTRY_BLOCK_PTR
|| bb
== EXIT_BLOCK_PTR
)
1345 if (TEST_BIT (bb_in_list
, bb
->index
))
1348 if (cfg_blocks_empty_p ())
1350 cfg_blocks_tail
= cfg_blocks_head
= 0;
1356 if (cfg_blocks_num
> VARRAY_SIZE (cfg_blocks
))
1358 /* We have to grow the array now. Adjust to queue to occupy the
1359 full space of the original array. */
1360 cfg_blocks_tail
= VARRAY_SIZE (cfg_blocks
);
1361 cfg_blocks_head
= 0;
1362 VARRAY_GROW (cfg_blocks
, 2 * VARRAY_SIZE (cfg_blocks
));
1365 cfg_blocks_tail
= (cfg_blocks_tail
+ 1) % VARRAY_SIZE (cfg_blocks
);
1367 VARRAY_BB (cfg_blocks
, cfg_blocks_tail
) = bb
;
1368 SET_BIT (bb_in_list
, bb
->index
);
1371 /* Remove a block from the worklist. */
1374 cfg_blocks_get (void)
1378 bb
= VARRAY_BB (cfg_blocks
, cfg_blocks_head
);
1380 #ifdef ENABLE_CHECKING
1381 if (cfg_blocks_empty_p () || !bb
)
1385 cfg_blocks_head
= (cfg_blocks_head
+ 1) % VARRAY_SIZE (cfg_blocks
);
1387 RESET_BIT (bb_in_list
, bb
->index
);
1392 /* We have just defined a new value for VAR. Add all immediate uses
1393 of VAR to the ssa_edges or varying_ssa_edges worklist. */
1395 add_var_to_ssa_edges_worklist (tree var
, value val
)
1397 tree stmt
= SSA_NAME_DEF_STMT (var
);
1398 dataflow_t df
= get_immediate_uses (stmt
);
1399 int num_uses
= num_immediate_uses (df
);
1402 for (i
= 0; i
< num_uses
; i
++)
1404 tree use
= immediate_use (df
, i
);
1406 if (!DONT_SIMULATE_AGAIN (use
))
1408 stmt_ann_t ann
= stmt_ann (use
);
1409 if (ann
->in_ccp_worklist
== 0)
1411 ann
->in_ccp_worklist
= 1;
1412 if (val
.lattice_val
== VARYING
)
1413 VARRAY_PUSH_TREE (varying_ssa_edges
, use
);
1415 VARRAY_PUSH_TREE (ssa_edges
, use
);
1421 /* Set the lattice value for the variable VAR to VARYING. */
1424 def_to_varying (tree var
)
1427 val
.lattice_val
= VARYING
;
1428 val
.const_val
= NULL_TREE
;
1429 set_lattice_value (var
, val
);
1432 /* Set the lattice value for variable VAR to VAL. */
1435 set_lattice_value (tree var
, value val
)
1437 value
*old
= get_value (var
);
1439 #ifdef ENABLE_CHECKING
1440 if (val
.lattice_val
== UNDEFINED
)
1442 /* CONSTANT->UNDEFINED is never a valid state transition. */
1443 if (old
->lattice_val
== CONSTANT
)
1446 /* UNKNOWN_VAL->UNDEFINED is never a valid state transition. */
1447 if (old
->lattice_val
== UNKNOWN_VAL
)
1450 /* VARYING->UNDEFINED is generally not a valid state transition,
1451 except for values which are initialized to VARYING. */
1452 if (old
->lattice_val
== VARYING
1453 && get_default_value (var
).lattice_val
!= VARYING
)
1456 else if (val
.lattice_val
== CONSTANT
)
1458 /* VARYING -> CONSTANT is an invalid state transition, except
1459 for objects which start off in a VARYING state. */
1460 if (old
->lattice_val
== VARYING
1461 && get_default_value (var
).lattice_val
!= VARYING
)
1466 /* If the constant for VAR has changed, then this VAR is really varying. */
1467 if (old
->lattice_val
== CONSTANT
&& val
.lattice_val
== CONSTANT
1468 && !simple_cst_equal (old
->const_val
, val
.const_val
))
1470 val
.lattice_val
= VARYING
;
1471 val
.const_val
= NULL_TREE
;
1474 if (old
->lattice_val
!= val
.lattice_val
)
1476 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1478 dump_lattice_value (dump_file
,
1479 "Lattice value changed to ", val
);
1480 fprintf (dump_file
, ". Adding definition to SSA edges.\n");
1483 add_var_to_ssa_edges_worklist (var
, val
);
1488 /* Replace USE references in statement STMT with their immediate reaching
1489 definition. Return true if at least one reference was replaced. If
1490 REPLACED_ADDRESSES_P is given, it will be set to true if an address
1491 constant was replaced. */
1494 replace_uses_in (tree stmt
, bool *replaced_addresses_p
)
1496 bool replaced
= false;
1500 if (replaced_addresses_p
)
1501 *replaced_addresses_p
= false;
1503 get_stmt_operands (stmt
);
1505 uses
= STMT_USE_OPS (stmt
);
1506 for (i
= 0; i
< NUM_USES (uses
); i
++)
1508 use_operand_p use
= USE_OP_PTR (uses
, i
);
1509 value
*val
= get_value (USE_FROM_PTR (use
));
1511 if (val
->lattice_val
== CONSTANT
)
1513 SET_USE (use
, val
->const_val
);
1515 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (use
)))
1516 && replaced_addresses_p
)
1517 *replaced_addresses_p
= true;
1524 /* Replace the VUSE references in statement STMT with its immediate reaching
1525 definition. Return true if the reference was replaced. If
1526 REPLACED_ADDRESSES_P is given, it will be set to true if an address
1527 constant was replaced. */
1530 replace_vuse_in (tree stmt
, bool *replaced_addresses_p
)
1532 bool replaced
= false;
1537 if (replaced_addresses_p
)
1538 *replaced_addresses_p
= false;
1540 get_stmt_operands (stmt
);
1542 vuses
= STMT_VUSE_OPS (stmt
);
1544 if (NUM_VUSES (vuses
) != 1)
1547 vuse
= VUSE_OP_PTR (vuses
, 0);
1548 val
= get_value (USE_FROM_PTR (vuse
));
1550 if (val
->lattice_val
== CONSTANT
1551 && TREE_CODE (stmt
) == MODIFY_EXPR
1552 && DECL_P (TREE_OPERAND (stmt
, 1))
1553 && TREE_OPERAND (stmt
, 1) == SSA_NAME_VAR (USE_FROM_PTR (vuse
)))
1555 TREE_OPERAND (stmt
, 1) = val
->const_val
;
1557 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (vuse
)))
1558 && replaced_addresses_p
)
1559 *replaced_addresses_p
= true;
1565 /* Return the likely latticevalue for STMT.
1567 If STMT has no operands, then return CONSTANT.
1569 Else if any operands of STMT are undefined, then return UNDEFINED.
1571 Else if any operands of STMT are constants, then return CONSTANT.
1573 Else return VARYING. */
1576 likely_value (tree stmt
)
1581 int found_constant
= 0;
1584 /* If the statement makes aliased loads or has volatile operands, it
1585 won't fold to a constant value. */
1586 ann
= stmt_ann (stmt
);
1587 if (ann
->makes_aliased_loads
|| ann
->has_volatile_ops
)
1590 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
1591 in the presence of const and pure calls. */
1592 if (get_call_expr_in (stmt
) != NULL_TREE
)
1595 get_stmt_operands (stmt
);
1597 uses
= USE_OPS (ann
);
1598 for (i
= 0; i
< NUM_USES (uses
); i
++)
1600 tree use
= USE_OP (uses
, i
);
1601 value
*val
= get_value (use
);
1603 if (val
->lattice_val
== UNDEFINED
)
1606 if (val
->lattice_val
== CONSTANT
)
1610 vuses
= VUSE_OPS (ann
);
1612 if (NUM_VUSES (vuses
))
1614 tree vuse
= VUSE_OP (vuses
, 0);
1615 value
*val
= get_value (vuse
);
1617 if (val
->lattice_val
== UNKNOWN_VAL
)
1620 #ifdef ENABLE_CHECKING
1621 /* There should be no VUSE operands that are UNDEFINED. */
1622 if (val
->lattice_val
== UNDEFINED
)
1626 if (val
->lattice_val
== CONSTANT
)
1630 return ((found_constant
|| (!uses
&& !vuses
)) ? CONSTANT
: VARYING
);
1633 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1634 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1635 is the desired result type. */
1638 maybe_fold_offset_to_array_ref (tree base
, tree offset
, tree orig_type
)
1640 tree min_idx
, idx
, elt_offset
= integer_zero_node
;
1641 tree array_type
, elt_type
, elt_size
;
1643 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1644 measured in units of the size of elements type) from that ARRAY_REF).
1645 We can't do anything if either is variable.
1647 The case we handle here is *(&A[N]+O). */
1648 if (TREE_CODE (base
) == ARRAY_REF
)
1650 tree low_bound
= array_ref_low_bound (base
);
1652 elt_offset
= TREE_OPERAND (base
, 1);
1653 if (TREE_CODE (low_bound
) != INTEGER_CST
1654 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1657 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1658 base
= TREE_OPERAND (base
, 0);
1661 /* Ignore stupid user tricks of indexing non-array variables. */
1662 array_type
= TREE_TYPE (base
);
1663 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1665 elt_type
= TREE_TYPE (array_type
);
1666 if (!lang_hooks
.types_compatible_p (orig_type
, elt_type
))
1669 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1670 element type (so we can use the alignment if it's not constant).
1671 Otherwise, compute the offset as an index by using a division. If the
1672 division isn't exact, then don't do anything. */
1673 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1674 if (integer_zerop (offset
))
1676 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1677 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1679 idx
= integer_zero_node
;
1683 unsigned HOST_WIDE_INT lquo
, lrem
;
1684 HOST_WIDE_INT hquo
, hrem
;
1686 if (TREE_CODE (elt_size
) != INTEGER_CST
1687 || div_and_round_double (TRUNC_DIV_EXPR
, 1,
1688 TREE_INT_CST_LOW (offset
),
1689 TREE_INT_CST_HIGH (offset
),
1690 TREE_INT_CST_LOW (elt_size
),
1691 TREE_INT_CST_HIGH (elt_size
),
1692 &lquo
, &hquo
, &lrem
, &hrem
)
1696 idx
= build_int_cst (NULL_TREE
, lquo
, hquo
);
1699 /* Assume the low bound is zero. If there is a domain type, get the
1700 low bound, if any, convert the index into that type, and add the
1702 min_idx
= integer_zero_node
;
1703 if (TYPE_DOMAIN (array_type
))
1705 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
)))
1706 min_idx
= TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
));
1708 min_idx
= fold_convert (TYPE_DOMAIN (array_type
), min_idx
);
1710 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1713 idx
= fold_convert (TYPE_DOMAIN (array_type
), idx
);
1714 elt_offset
= fold_convert (TYPE_DOMAIN (array_type
), elt_offset
);
1717 if (!integer_zerop (min_idx
))
1718 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1719 if (!integer_zerop (elt_offset
))
1720 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1722 return build (ARRAY_REF
, orig_type
, base
, idx
, min_idx
,
1723 size_int (tree_low_cst (elt_size
, 1)
1724 / (TYPE_ALIGN (elt_type
) / BITS_PER_UNIT
)));
1727 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1728 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1729 is the desired result type. */
1730 /* ??? This doesn't handle class inheritance. */
1733 maybe_fold_offset_to_component_ref (tree record_type
, tree base
, tree offset
,
1734 tree orig_type
, bool base_is_ptr
)
1736 tree f
, t
, field_type
, tail_array_field
;
1738 if (TREE_CODE (record_type
) != RECORD_TYPE
1739 && TREE_CODE (record_type
) != UNION_TYPE
1740 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1743 /* Short-circuit silly cases. */
1744 if (lang_hooks
.types_compatible_p (record_type
, orig_type
))
1747 tail_array_field
= NULL_TREE
;
1748 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1752 if (TREE_CODE (f
) != FIELD_DECL
)
1754 if (DECL_BIT_FIELD (f
))
1756 if (TREE_CODE (DECL_FIELD_OFFSET (f
)) != INTEGER_CST
)
1759 /* ??? Java creates "interesting" fields for representing base classes.
1760 They have no name, and have no context. With no context, we get into
1761 trouble with nonoverlapping_component_refs_p. Skip them. */
1762 if (!DECL_FIELD_CONTEXT (f
))
1765 /* The previous array field isn't at the end. */
1766 tail_array_field
= NULL_TREE
;
1768 /* Check to see if this offset overlaps with the field. */
1769 cmp
= tree_int_cst_compare (DECL_FIELD_OFFSET (f
), offset
);
1773 field_type
= TREE_TYPE (f
);
1776 /* Don't care about offsets into the middle of scalars. */
1777 if (!AGGREGATE_TYPE_P (field_type
))
1780 /* Check for array at the end of the struct. This is often
1781 used as for flexible array members. We should be able to
1782 turn this into an array access anyway. */
1783 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1784 tail_array_field
= f
;
1786 /* Check the end of the field against the offset. */
1787 if (!DECL_SIZE_UNIT (f
)
1788 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1790 t
= int_const_binop (MINUS_EXPR
, offset
, DECL_FIELD_OFFSET (f
), 1);
1791 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1794 /* If we matched, then set offset to the displacement into
1799 /* Here we exactly match the offset being checked. If the types match,
1800 then we can return that field. */
1801 else if (lang_hooks
.types_compatible_p (orig_type
, field_type
))
1804 base
= build1 (INDIRECT_REF
, record_type
, base
);
1805 t
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1809 /* Don't care about type-punning of scalars. */
1810 else if (!AGGREGATE_TYPE_P (field_type
))
1816 if (!tail_array_field
)
1819 f
= tail_array_field
;
1820 field_type
= TREE_TYPE (f
);
1823 /* If we get here, we've got an aggregate field, and a possibly
1824 nonzero offset into them. Recurse and hope for a valid match. */
1826 base
= build1 (INDIRECT_REF
, record_type
, base
);
1827 base
= build (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1829 t
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
);
1832 return maybe_fold_offset_to_component_ref (field_type
, base
, offset
,
1836 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1837 Return the simplified expression, or NULL if nothing could be done. */
1840 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
1844 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1845 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1846 are sometimes added. */
1849 TREE_OPERAND (expr
, 0) = base
;
1851 /* One possibility is that the address reduces to a string constant. */
1852 t
= fold_read_from_constant_string (expr
);
1856 /* Add in any offset from a PLUS_EXPR. */
1857 if (TREE_CODE (base
) == PLUS_EXPR
)
1861 offset2
= TREE_OPERAND (base
, 1);
1862 if (TREE_CODE (offset2
) != INTEGER_CST
)
1864 base
= TREE_OPERAND (base
, 0);
1866 offset
= int_const_binop (PLUS_EXPR
, offset
, offset2
, 1);
1869 if (TREE_CODE (base
) == ADDR_EXPR
)
1871 /* Strip the ADDR_EXPR. */
1872 base
= TREE_OPERAND (base
, 0);
1874 /* Fold away CONST_DECL to its value, if the type is scalar. */
1875 if (TREE_CODE (base
) == CONST_DECL
1876 && is_gimple_min_invariant (DECL_INITIAL (base
)))
1877 return DECL_INITIAL (base
);
1879 /* Try folding *(&B+O) to B[X]. */
1880 t
= maybe_fold_offset_to_array_ref (base
, offset
, TREE_TYPE (expr
));
1884 /* Try folding *(&B+O) to B.X. */
1885 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (base
), base
, offset
,
1886 TREE_TYPE (expr
), false);
1890 /* Fold *&B to B. We can only do this if EXPR is the same type
1891 as BASE. We can't do this if EXPR is the element type of an array
1892 and BASE is the array. */
1893 if (integer_zerop (offset
)
1894 && lang_hooks
.types_compatible_p (TREE_TYPE (base
),
1900 /* We can get here for out-of-range string constant accesses,
1901 such as "_"[3]. Bail out of the entire substitution search
1902 and arrange for the entire statement to be replaced by a
1903 call to __builtin_trap. In all likelyhood this will all be
1904 constant-folded away, but in the meantime we can't leave with
1905 something that get_expr_operands can't understand. */
1909 if (TREE_CODE (t
) == ADDR_EXPR
1910 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
1912 /* FIXME: Except that this causes problems elsewhere with dead
1913 code not being deleted, and we abort in the rtl expanders
1914 because we failed to remove some ssa_name. In the meantime,
1915 just return zero. */
1916 /* FIXME2: This condition should be signaled by
1917 fold_read_from_constant_string directly, rather than
1918 re-checking for it here. */
1919 return integer_zero_node
;
1922 /* Try folding *(B+O) to B->X. Still an improvement. */
1923 if (POINTER_TYPE_P (TREE_TYPE (base
)))
1925 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base
)),
1927 TREE_TYPE (expr
), true);
1933 /* Otherwise we had an offset that we could not simplify. */
1937 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1939 A quaint feature extant in our address arithmetic is that there
1940 can be hidden type changes here. The type of the result need
1941 not be the same as the type of the input pointer.
1943 What we're after here is an expression of the form
1944 (T *)(&array + const)
1945 where the cast doesn't actually exist, but is implicit in the
1946 type of the PLUS_EXPR. We'd like to turn this into
1948 which may be able to propagate further. */
1951 maybe_fold_stmt_addition (tree expr
)
1953 tree op0
= TREE_OPERAND (expr
, 0);
1954 tree op1
= TREE_OPERAND (expr
, 1);
1955 tree ptr_type
= TREE_TYPE (expr
);
1958 bool subtract
= (TREE_CODE (expr
) == MINUS_EXPR
);
1960 /* We're only interested in pointer arithmetic. */
1961 if (!POINTER_TYPE_P (ptr_type
))
1963 /* Canonicalize the integral operand to op1. */
1964 if (INTEGRAL_TYPE_P (TREE_TYPE (op0
)))
1968 t
= op0
, op0
= op1
, op1
= t
;
1970 /* It had better be a constant. */
1971 if (TREE_CODE (op1
) != INTEGER_CST
)
1973 /* The first operand should be an ADDR_EXPR. */
1974 if (TREE_CODE (op0
) != ADDR_EXPR
)
1976 op0
= TREE_OPERAND (op0
, 0);
1978 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1979 the offset into it. */
1980 while (TREE_CODE (op0
) == ARRAY_REF
)
1982 tree array_obj
= TREE_OPERAND (op0
, 0);
1983 tree array_idx
= TREE_OPERAND (op0
, 1);
1984 tree elt_type
= TREE_TYPE (op0
);
1985 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
1988 if (TREE_CODE (array_idx
) != INTEGER_CST
)
1990 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1993 /* Un-bias the index by the min index of the array type. */
1994 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
1997 min_idx
= TYPE_MIN_VALUE (min_idx
);
2000 if (TREE_CODE (min_idx
) != INTEGER_CST
)
2003 array_idx
= convert (TREE_TYPE (min_idx
), array_idx
);
2004 if (!integer_zerop (min_idx
))
2005 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
2010 /* Convert the index to a byte offset. */
2011 array_idx
= convert (sizetype
, array_idx
);
2012 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
2014 /* Update the operands for the next round, or for folding. */
2015 /* If we're manipulating unsigned types, then folding into negative
2016 values can produce incorrect results. Particularly if the type
2017 is smaller than the width of the pointer. */
2019 && TYPE_UNSIGNED (TREE_TYPE (op1
))
2020 && tree_int_cst_lt (array_idx
, op1
))
2022 op1
= int_const_binop (subtract
? MINUS_EXPR
: PLUS_EXPR
,
2028 /* If we weren't able to fold the subtraction into another array reference,
2029 canonicalize the integer for passing to the array and component ref
2030 simplification functions. */
2033 if (TYPE_UNSIGNED (TREE_TYPE (op1
)))
2035 op1
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (op1
), op1
));
2036 /* ??? In theory fold should always produce another integer. */
2037 if (TREE_CODE (op1
) != INTEGER_CST
)
2041 ptd_type
= TREE_TYPE (ptr_type
);
2043 /* At which point we can try some of the same things as for indirects. */
2044 t
= maybe_fold_offset_to_array_ref (op0
, op1
, ptd_type
);
2046 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (op0
), op0
, op1
,
2049 t
= build1 (ADDR_EXPR
, ptr_type
, t
);
2054 /* Subroutine of fold_stmt called via walk_tree. We perform several
2055 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2058 fold_stmt_r (tree
*expr_p
, int *walk_subtrees
, void *data
)
2060 bool *changed_p
= data
;
2061 tree expr
= *expr_p
, t
;
2063 /* ??? It'd be nice if walk_tree had a pre-order option. */
2064 switch (TREE_CODE (expr
))
2067 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2072 t
= maybe_fold_stmt_indirect (expr
, TREE_OPERAND (expr
, 0),
2076 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
2077 We'd only want to bother decomposing an existing ARRAY_REF if
2078 the base array is found to have another offset contained within.
2079 Otherwise we'd be wasting time. */
2082 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2087 /* Set TREE_INVARIANT properly so that the value is properly
2088 considered constant, and so gets propagated as expected. */
2090 recompute_tree_invarant_for_addr_expr (expr
);
2095 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2098 t
= walk_tree (&TREE_OPERAND (expr
, 1), fold_stmt_r
, data
, NULL
);
2103 t
= maybe_fold_stmt_addition (expr
);
2107 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2112 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2113 We've already checked that the records are compatible, so we should
2114 come up with a set of compatible fields. */
2116 tree expr_record
= TREE_TYPE (TREE_OPERAND (expr
, 0));
2117 tree expr_field
= TREE_OPERAND (expr
, 1);
2119 if (DECL_FIELD_CONTEXT (expr_field
) != TYPE_MAIN_VARIANT (expr_record
))
2121 expr_field
= find_compatible_field (expr_record
, expr_field
);
2122 TREE_OPERAND (expr
, 1) = expr_field
;
2140 /* Fold the statement pointed by STMT_P. In some cases, this function may
2141 replace the whole statement with a new one. Returns true iff folding
2142 makes any changes. */
2145 fold_stmt (tree
*stmt_p
)
2147 tree rhs
, result
, stmt
;
2148 bool changed
= false;
2152 /* If we replaced constants and the statement makes pointer dereferences,
2153 then we may need to fold instances of *&VAR into VAR, etc. */
2154 if (walk_tree (stmt_p
, fold_stmt_r
, &changed
, NULL
))
2157 = build_function_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
],
2162 rhs
= get_rhs (stmt
);
2167 if (TREE_CODE (rhs
) == CALL_EXPR
)
2171 /* Check for builtins that CCP can handle using information not
2172 available in the generic fold routines. */
2173 callee
= get_callee_fndecl (rhs
);
2174 if (callee
&& DECL_BUILT_IN (callee
))
2175 result
= ccp_fold_builtin (stmt
, rhs
);
2178 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2179 here are when we've propagated the address of a decl into the
2181 /* ??? Should perhaps do this in fold proper. However, doing it
2182 there requires that we create a new CALL_EXPR, and that requires
2183 copying EH region info to the new node. Easier to just do it
2184 here where we can just smash the call operand. */
2185 callee
= TREE_OPERAND (rhs
, 0);
2186 if (TREE_CODE (callee
) == OBJ_TYPE_REF
2187 && lang_hooks
.fold_obj_type_ref
2188 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
2189 && DECL_P (TREE_OPERAND (OBJ_TYPE_REF_OBJECT (callee
), 0)))
2193 /* ??? Caution: Broken ADDR_EXPR semantics means that
2194 looking at the type of the operand of the addr_expr
2195 can yield an array type. See silly exception in
2196 check_pointer_types_r. */
2198 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
2199 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
2202 TREE_OPERAND (rhs
, 0) = t
;
2209 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2210 if (result
== NULL_TREE
)
2211 result
= fold (rhs
);
2213 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2214 may have been added by fold, and "useless" type conversions that might
2215 now be apparent due to propagation. */
2216 STRIP_USELESS_TYPE_CONVERSION (result
);
2219 changed
|= set_rhs (stmt_p
, result
);
2224 /* Get the main expression from statement STMT. */
2229 enum tree_code code
= TREE_CODE (stmt
);
2234 stmt
= TREE_OPERAND (stmt
, 0);
2235 if (!stmt
|| TREE_CODE (stmt
) != MODIFY_EXPR
)
2240 stmt
= TREE_OPERAND (stmt
, 1);
2241 if (TREE_CODE (stmt
) == WITH_SIZE_EXPR
)
2242 return TREE_OPERAND (stmt
, 0);
2247 return COND_EXPR_COND (stmt
);
2249 return SWITCH_COND (stmt
);
2251 return GOTO_DESTINATION (stmt
);
2253 return LABEL_EXPR_LABEL (stmt
);
2261 /* Set the main expression of *STMT_P to EXPR. */
2264 set_rhs (tree
*stmt_p
, tree expr
)
2266 tree stmt
= *stmt_p
, op
;
2267 enum tree_code code
= TREE_CODE (expr
);
2270 /* Verify the constant folded result is valid gimple. */
2271 if (TREE_CODE_CLASS (code
) == '2')
2273 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
2274 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
2277 else if (TREE_CODE_CLASS (code
) == '1')
2279 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
2283 switch (TREE_CODE (stmt
))
2286 op
= TREE_OPERAND (stmt
, 0);
2287 if (TREE_CODE (op
) != MODIFY_EXPR
)
2289 TREE_OPERAND (stmt
, 0) = expr
;
2296 op
= TREE_OPERAND (stmt
, 1);
2297 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
2299 TREE_OPERAND (stmt
, 1) = expr
;
2303 COND_EXPR_COND (stmt
) = expr
;
2306 SWITCH_COND (stmt
) = expr
;
2309 GOTO_DESTINATION (stmt
) = expr
;
2312 LABEL_EXPR_LABEL (stmt
) = expr
;
2316 /* Replace the whole statement with EXPR. If EXPR has no side
2317 effects, then replace *STMT_P with an empty statement. */
2318 ann
= stmt_ann (stmt
);
2319 *stmt_p
= TREE_SIDE_EFFECTS (expr
) ? expr
: build_empty_stmt ();
2320 (*stmt_p
)->common
.ann
= (tree_ann_t
) ann
;
2322 if (TREE_SIDE_EFFECTS (expr
))
2325 v_may_def_optype v_may_defs
;
2326 v_must_def_optype v_must_defs
;
2329 /* Fix all the SSA_NAMEs created by *STMT_P to point to its new
2331 defs
= DEF_OPS (ann
);
2332 for (i
= 0; i
< NUM_DEFS (defs
); i
++)
2334 tree var
= DEF_OP (defs
, i
);
2335 if (TREE_CODE (var
) == SSA_NAME
)
2336 SSA_NAME_DEF_STMT (var
) = *stmt_p
;
2339 v_may_defs
= V_MAY_DEF_OPS (ann
);
2340 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
2342 tree var
= V_MAY_DEF_RESULT (v_may_defs
, i
);
2343 if (TREE_CODE (var
) == SSA_NAME
)
2344 SSA_NAME_DEF_STMT (var
) = *stmt_p
;
2347 v_must_defs
= V_MUST_DEF_OPS (ann
);
2348 for (i
= 0; i
< NUM_V_MUST_DEFS (v_must_defs
); i
++)
2350 tree var
= V_MUST_DEF_OP (v_must_defs
, i
);
2351 if (TREE_CODE (var
) == SSA_NAME
)
2352 SSA_NAME_DEF_STMT (var
) = *stmt_p
;
2362 /* Return a default value for variable VAR using the following rules:
2364 1- Function arguments are considered VARYING.
2366 2- Global and static variables that are declared constant are
2367 considered CONSTANT.
2369 3- Any other virtually defined variable is considered UNKNOWN_VAL.
2371 4- Any other value is considered UNDEFINED. This is useful when
2372 considering PHI nodes. PHI arguments that are undefined do not
2373 change the constant value of the PHI node, which allows for more
2374 constants to be propagated. */
2377 get_default_value (tree var
)
2382 if (TREE_CODE (var
) == SSA_NAME
)
2383 sym
= SSA_NAME_VAR (var
);
2386 #ifdef ENABLE_CHECKING
2393 val
.lattice_val
= UNDEFINED
;
2394 val
.const_val
= NULL_TREE
;
2396 if (TREE_CODE (sym
) == PARM_DECL
|| TREE_THIS_VOLATILE (sym
))
2398 /* Function arguments and volatile variables are considered VARYING. */
2399 val
.lattice_val
= VARYING
;
2401 else if (TREE_STATIC (sym
))
2403 /* Globals and static variables are considered UNKNOWN_VAL,
2404 unless they are declared 'const'. */
2405 if (TREE_READONLY (sym
)
2406 && DECL_INITIAL (sym
)
2407 && is_gimple_min_invariant (DECL_INITIAL (sym
)))
2409 val
.lattice_val
= CONSTANT
;
2410 val
.const_val
= DECL_INITIAL (sym
);
2414 val
.const_val
= NULL_TREE
;
2415 val
.lattice_val
= UNKNOWN_VAL
;
2418 else if (!is_gimple_reg (sym
))
2420 val
.const_val
= NULL_TREE
;
2421 val
.lattice_val
= UNKNOWN_VAL
;
2425 enum tree_code code
;
2426 tree stmt
= SSA_NAME_DEF_STMT (var
);
2428 if (!IS_EMPTY_STMT (stmt
))
2430 code
= TREE_CODE (stmt
);
2431 if (code
!= MODIFY_EXPR
&& code
!= PHI_NODE
)
2432 val
.lattice_val
= VARYING
;
2440 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
2441 constant, return NULL_TREE. Otherwise, return its constant value. */
2444 ccp_fold_builtin (tree stmt
, tree fn
)
2446 tree result
, strlen_val
[2];
2447 tree callee
, arglist
, a
;
2452 ignore
= TREE_CODE (stmt
) != MODIFY_EXPR
;
2454 /* First try the generic builtin folder. If that succeeds, return the
2456 result
= fold_builtin (fn
, ignore
);
2460 STRIP_NOPS (result
);
2464 /* Ignore MD builtins. */
2465 callee
= get_callee_fndecl (fn
);
2466 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
2469 /* If the builtin could not be folded, and it has no argument list,
2471 arglist
= TREE_OPERAND (fn
, 1);
2475 /* Limit the work only for builtins we know how to simplify. */
2476 switch (DECL_FUNCTION_CODE (callee
))
2478 case BUILT_IN_STRLEN
:
2479 case BUILT_IN_FPUTS
:
2480 case BUILT_IN_FPUTS_UNLOCKED
:
2483 case BUILT_IN_STRCPY
:
2484 case BUILT_IN_STRNCPY
:
2491 /* Try to use the dataflow information gathered by the CCP process. */
2492 visited
= BITMAP_XMALLOC ();
2494 memset (strlen_val
, 0, sizeof (strlen_val
));
2495 for (i
= 0, a
= arglist
;
2497 i
++, strlen_arg
>>= 1, a
= TREE_CHAIN (a
))
2500 bitmap_clear (visited
);
2501 if (!get_strlen (TREE_VALUE (a
), &strlen_val
[i
], visited
))
2502 strlen_val
[i
] = NULL_TREE
;
2505 BITMAP_XFREE (visited
);
2508 switch (DECL_FUNCTION_CODE (callee
))
2510 case BUILT_IN_STRLEN
:
2513 tree
new = fold_convert (TREE_TYPE (fn
), strlen_val
[0]);
2515 /* If the result is not a valid gimple value, or not a cast
2516 of a valid gimple value, then we can not use the result. */
2517 if (is_gimple_val (new)
2518 || (is_gimple_cast (new)
2519 && is_gimple_val (TREE_OPERAND (new, 0))))
2524 case BUILT_IN_STRCPY
:
2525 if (strlen_val
[1] && is_gimple_val (strlen_val
[1]))
2526 result
= fold_builtin_strcpy (fn
, strlen_val
[1]);
2529 case BUILT_IN_STRNCPY
:
2530 if (strlen_val
[1] && is_gimple_val (strlen_val
[1]))
2531 result
= fold_builtin_strncpy (fn
, strlen_val
[1]);
2534 case BUILT_IN_FPUTS
:
2535 result
= fold_builtin_fputs (arglist
,
2536 TREE_CODE (stmt
) != MODIFY_EXPR
, 0,
2540 case BUILT_IN_FPUTS_UNLOCKED
:
2541 result
= fold_builtin_fputs (arglist
,
2542 TREE_CODE (stmt
) != MODIFY_EXPR
, 1,
2550 if (result
&& ignore
)
2551 result
= fold_ignored_result (result
);
2556 /* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
2557 follow its use-def chains. If LENGTH is not NULL and its value is not
2558 equal to the length we determine, or if we are unable to determine the
2559 length, return false. VISITED is a bitmap of visited variables. */
2562 get_strlen (tree arg
, tree
*length
, bitmap visited
)
2564 tree var
, def_stmt
, val
;
2566 if (TREE_CODE (arg
) != SSA_NAME
)
2568 val
= c_strlen (arg
, 1);
2572 if (*length
&& simple_cst_equal (val
, *length
) != 1)
2579 /* If we were already here, break the infinite cycle. */
2580 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
2582 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
2585 def_stmt
= SSA_NAME_DEF_STMT (var
);
2587 switch (TREE_CODE (def_stmt
))
2593 /* The RHS of the statement defining VAR must either have a
2594 constant length or come from another SSA_NAME with a constant
2596 rhs
= TREE_OPERAND (def_stmt
, 1);
2598 if (TREE_CODE (rhs
) == SSA_NAME
)
2599 return get_strlen (rhs
, length
, visited
);
2601 /* See if the RHS is a constant length. */
2602 len
= c_strlen (rhs
, 1);
2605 if (*length
&& simple_cst_equal (len
, *length
) != 1)
2617 /* All the arguments of the PHI node must have the same constant
2621 for (i
= 0; i
< PHI_NUM_ARGS (def_stmt
); i
++)
2623 tree arg
= PHI_ARG_DEF (def_stmt
, i
);
2625 /* If this PHI has itself as an argument, we cannot
2626 determine the string length of this argument. However,
2627 if we can find a constant string length for the other
2628 PHI args then we can still be sure that this is a
2629 constant string length. So be optimistic and just
2630 continue with the next argument. */
2631 if (arg
== PHI_RESULT (def_stmt
))
2634 if (!get_strlen (arg
, length
, visited
))
2650 /* A simple pass that attempts to fold all builtin functions. This pass
2651 is run after we've propagated as many constants as we can. */
2654 execute_fold_all_builtins (void)
2659 block_stmt_iterator i
;
2660 for (i
= bsi_start (bb
); !bsi_end_p (i
); bsi_next (&i
))
2662 tree
*stmtp
= bsi_stmt_ptr (i
);
2663 tree call
= get_rhs (*stmtp
);
2664 tree callee
, result
;
2666 if (!call
|| TREE_CODE (call
) != CALL_EXPR
)
2668 callee
= get_callee_fndecl (call
);
2669 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
2672 result
= ccp_fold_builtin (*stmtp
, call
);
2674 switch (DECL_FUNCTION_CODE (callee
))
2676 case BUILT_IN_CONSTANT_P
:
2677 /* Resolve __builtin_constant_p. If it hasn't been
2678 folded to integer_one_node by now, it's fairly
2679 certain that the value simply isn't constant. */
2680 result
= integer_zero_node
;
2687 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2689 fprintf (dump_file
, "Simplified\n ");
2690 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2693 if (set_rhs (stmtp
, result
))
2694 modify_stmt (*stmtp
);
2696 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2698 fprintf (dump_file
, "to\n ");
2699 print_generic_stmt (dump_file
, *stmtp
, dump_flags
);
2700 fprintf (dump_file
, "\n");
2706 struct tree_opt_pass pass_fold_builtins
=
2710 execute_fold_all_builtins
, /* execute */
2713 0, /* static_pass_number */
2715 PROP_cfg
| PROP_ssa
| PROP_alias
, /* properties_required */
2716 0, /* properties_provided */
2717 0, /* properties_destroyed */
2718 0, /* todo_flags_start */
2719 TODO_dump_func
| TODO_verify_ssa
/* todo_flags_finish */
2723 #include "gt-tree-ssa-ccp.h"