* gnu/regexp/CharIndexedReader.java: Removed.
[official-gcc.git] / gcc / tree-ssa-ccp.c
blobbb7939cd10c10e730a4efc7d5ef49d3b4864b61b
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 2, or (at your option) any
11 later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* Conditional constant propagation.
25 References:
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "errors.h"
41 #include "ggc.h"
42 #include "tree.h"
43 #include "langhooks.h"
45 /* These RTL headers are needed for basic-block.h. */
46 #include "rtl.h"
47 #include "tm_p.h"
48 #include "hard-reg-set.h"
49 #include "basic-block.h"
51 #include "diagnostic.h"
52 #include "tree-inline.h"
53 #include "tree-flow.h"
54 #include "tree-gimple.h"
55 #include "tree-dump.h"
56 #include "tree-pass.h"
57 #include "timevar.h"
58 #include "expr.h"
59 #include "flags.h"
62 /* Possible lattice values. */
63 typedef enum
65 UNINITIALIZED = 0,
66 UNDEFINED,
67 CONSTANT,
68 VARYING
69 } latticevalue;
71 /* Use the TREE_VISITED bitflag to mark statements and PHI nodes that have
72 been deemed VARYING and shouldn't be simulated again. */
73 #define DONT_SIMULATE_AGAIN(T) TREE_VISITED (T)
75 /* Main structure for CCP. Contains the lattice value and, if it's a
76 constant, the constant value. */
77 typedef struct
79 latticevalue lattice_val;
80 tree const_val;
81 } value;
83 /* A bitmap to keep track of executable blocks in the CFG. */
84 static sbitmap executable_blocks;
86 /* Array of control flow edges on the worklist. */
87 static GTY(()) varray_type cfg_blocks = NULL;
89 static unsigned int cfg_blocks_num = 0;
90 static int cfg_blocks_tail;
91 static int cfg_blocks_head;
93 static sbitmap bb_in_list;
95 /* This is used to track the current value of each variable. */
96 static value *value_vector;
98 /* Worklist of SSA edges which will need reexamination as their definition
99 has changed. SSA edges are def-use edges in the SSA web. For each
100 edge, we store the definition statement or PHI node D. The destination
101 nodes that need to be visited are accessed using immediate_uses (D). */
102 static GTY(()) varray_type ssa_edges;
104 static void initialize (void);
105 static void finalize (void);
106 static void visit_phi_node (tree);
107 static tree ccp_fold (tree);
108 static value cp_lattice_meet (value, value);
109 static void visit_stmt (tree);
110 static void visit_cond_stmt (tree);
111 static void visit_assignment (tree);
112 static void add_var_to_ssa_edges_worklist (tree);
113 static void add_outgoing_control_edges (basic_block);
114 static void add_control_edge (edge);
115 static void def_to_varying (tree);
116 static void set_lattice_value (tree, value);
117 static void simulate_block (basic_block);
118 static void simulate_stmt (tree);
119 static void substitute_and_fold (void);
120 static value evaluate_stmt (tree);
121 static void dump_lattice_value (FILE *, const char *, value);
122 static bool replace_uses_in (tree, bool *);
123 static latticevalue likely_value (tree);
124 static tree get_rhs (tree);
125 static void set_rhs (tree *, tree);
126 static value *get_value (tree);
127 static value get_default_value (tree);
128 static tree ccp_fold_builtin (tree, tree);
129 static bool get_strlen (tree, tree *, bitmap);
130 static inline bool cfg_blocks_empty_p (void);
131 static void cfg_blocks_add (basic_block);
132 static basic_block cfg_blocks_get (void);
133 static bool need_imm_uses_for (tree var);
135 /* Main entry point for SSA Conditional Constant Propagation. FNDECL is
136 the declaration for the function to optimize.
138 On exit, VARS_TO_RENAME will contain the symbols that have been exposed by
139 the propagation of ADDR_EXPR expressions into pointer dereferences and need
140 to be renamed into SSA.
142 PHASE indicates which dump file from the DUMP_FILES array to use when
143 dumping debugging information. */
145 static void
146 tree_ssa_ccp (void)
148 initialize ();
150 /* Iterate until the worklists are empty. */
151 while (!cfg_blocks_empty_p () || VARRAY_ACTIVE_SIZE (ssa_edges) > 0)
153 if (!cfg_blocks_empty_p ())
155 /* Pull the next block to simulate off the worklist. */
156 basic_block dest_block = cfg_blocks_get ();
157 simulate_block (dest_block);
160 /* The SSA_EDGES worklist can get rather large. Go ahead and
161 drain the entire worklist each iteration through this loop. */
162 while (VARRAY_ACTIVE_SIZE (ssa_edges) > 0)
164 /* Pull the statement to simulate off the worklist. */
165 tree stmt = VARRAY_TOP_TREE (ssa_edges);
166 stmt_ann_t ann = stmt_ann (stmt);
167 VARRAY_POP (ssa_edges);
169 /* visit_stmt can "cancel" reevaluation of some statements.
170 If it does, then in_ccp_worklist will be zero. */
171 if (ann->in_ccp_worklist)
173 ann->in_ccp_worklist = 0;
174 simulate_stmt (stmt);
179 /* Now perform substitutions based on the known constant values. */
180 substitute_and_fold ();
182 /* Now cleanup any unreachable code. */
183 cleanup_tree_cfg ();
185 /* Free allocated memory. */
186 finalize ();
188 /* Debugging dumps. */
189 if (dump_file && (dump_flags & TDF_DETAILS))
191 dump_referenced_vars (dump_file);
192 fprintf (dump_file, "\n\n");
196 static bool
197 gate_ccp (void)
199 return flag_tree_ccp != 0;
202 struct tree_opt_pass pass_ccp =
204 "ccp", /* name */
205 gate_ccp, /* gate */
206 tree_ssa_ccp, /* execute */
207 NULL, /* sub */
208 NULL, /* next */
209 0, /* static_pass_number */
210 TV_TREE_CCP, /* tv_id */
211 PROP_cfg | PROP_ssa, /* properties_required */
212 0, /* properties_provided */
213 0, /* properties_destroyed */
214 0, /* todo_flags_start */
215 TODO_dump_func | TODO_rename_vars
216 | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
220 /* Get the constant value associated with variable VAR. */
222 static value *
223 get_value (tree var)
225 value *val;
227 #if defined ENABLE_CHECKING
228 if (TREE_CODE (var) != SSA_NAME)
229 abort ();
230 #endif
232 val = &value_vector[SSA_NAME_VERSION (var)];
233 if (val->lattice_val == UNINITIALIZED)
234 *val = get_default_value (var);
236 return val;
240 /* Simulate the execution of BLOCK. Evaluate the statement associated
241 with each variable reference inside the block. */
243 static void
244 simulate_block (basic_block block)
246 tree phi;
248 /* There is nothing to do for the exit block. */
249 if (block == EXIT_BLOCK_PTR)
250 return;
252 if (dump_file && (dump_flags & TDF_DETAILS))
253 fprintf (dump_file, "\nSimulating block %d\n", block->index);
255 /* Always simulate PHI nodes, even if we have simulated this block
256 before. */
257 for (phi = phi_nodes (block); phi; phi = TREE_CHAIN (phi))
258 visit_phi_node (phi);
260 /* If this is the first time we've simulated this block, then we
261 must simulate each of its statements. */
262 if (!TEST_BIT (executable_blocks, block->index))
264 block_stmt_iterator j;
265 unsigned int normal_edge_count;
266 edge e, normal_edge;
268 /* Note that we have simulated this block. */
269 SET_BIT (executable_blocks, block->index);
271 for (j = bsi_start (block); !bsi_end_p (j); bsi_next (&j))
272 visit_stmt (bsi_stmt (j));
274 /* We can not predict when abnormal edges will be executed, so
275 once a block is considered executable, we consider any
276 outgoing abnormal edges as executable.
278 At the same time, if this block has only one successor that is
279 reached by non-abnormal edges, then add that successor to the
280 worklist. */
281 normal_edge_count = 0;
282 normal_edge = NULL;
283 for (e = block->succ; e; e = e->succ_next)
285 if (e->flags & EDGE_ABNORMAL)
287 add_control_edge (e);
289 else
291 normal_edge_count++;
292 normal_edge = e;
296 if (normal_edge_count == 1)
297 add_control_edge (normal_edge);
302 /* Follow the def-use edges for statement DEF_STMT and simulate all the
303 statements reached by it. */
305 static void
306 simulate_stmt (tree use_stmt)
308 basic_block use_bb = bb_for_stmt (use_stmt);
310 if (dump_file && (dump_flags & TDF_DETAILS))
312 fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
313 print_generic_stmt (dump_file, use_stmt, dump_flags);
316 if (TREE_CODE (use_stmt) == PHI_NODE)
318 /* PHI nodes are always visited, regardless of whether or not the
319 destination block is executable. */
320 visit_phi_node (use_stmt);
322 else if (TEST_BIT (executable_blocks, use_bb->index))
324 /* Otherwise, visit the statement containing the use reached by
325 DEF, only if the destination block is marked executable. */
326 visit_stmt (use_stmt);
331 /* Perform final substitution and folding. After this pass the program
332 should still be in SSA form. */
334 static void
335 substitute_and_fold (void)
337 basic_block bb;
339 if (dump_file && (dump_flags & TDF_DETAILS))
340 fprintf (dump_file,
341 "\nSubstituing constants and folding statements\n\n");
343 /* Substitute constants in every statement of every basic block. */
344 FOR_EACH_BB (bb)
346 block_stmt_iterator i;
347 tree phi;
349 /* Propagate our known constants into PHI nodes. */
350 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
352 int i;
354 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
356 value *new_val;
357 tree *orig_p = &PHI_ARG_DEF (phi, i);
359 if (! SSA_VAR_P (*orig_p))
360 break;
362 new_val = get_value (*orig_p);
363 if (new_val->lattice_val == CONSTANT
364 && may_propagate_copy (*orig_p, new_val->const_val))
365 *orig_p = new_val->const_val;
369 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
371 bool replaced_address;
372 tree stmt = bsi_stmt (i);
374 /* Skip statements that have been folded already. */
375 if (stmt_modified_p (stmt) || !is_exec_stmt (stmt))
376 continue;
378 /* Replace the statement with its folded version and mark it
379 folded. */
380 if (dump_file && (dump_flags & TDF_DETAILS))
382 fprintf (dump_file, "Line %d: replaced ", get_lineno (stmt));
383 print_generic_stmt (dump_file, stmt, TDF_SLIM);
386 if (replace_uses_in (stmt, &replaced_address))
388 bool changed = fold_stmt (bsi_stmt_ptr (i));
389 stmt = bsi_stmt(i);
390 modify_stmt (stmt);
391 /* If we folded a builtin function, we'll likely
392 need to rename VDEFs. */
393 if (replaced_address || changed)
394 mark_new_vars_to_rename (stmt, vars_to_rename);
397 if (dump_file && (dump_flags & TDF_DETAILS))
399 fprintf (dump_file, " with ");
400 print_generic_stmt (dump_file, stmt, TDF_SLIM);
401 fprintf (dump_file, "\n");
408 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
409 lattice values to determine PHI_NODE's lattice value. The value of a
410 PHI node is determined calling cp_lattice_meet() with all the arguments
411 of the PHI node that are incoming via executable edges. */
413 static void
414 visit_phi_node (tree phi)
416 bool short_circuit = 0;
417 value phi_val, *curr_val;
418 int i;
420 /* If the PHI node has already been deemed to be VARYING, don't simulate
421 it again. */
422 if (DONT_SIMULATE_AGAIN (phi))
423 return;
425 if (dump_file && (dump_flags & TDF_DETAILS))
427 fprintf (dump_file, "\nVisiting PHI node: ");
428 print_generic_expr (dump_file, phi, dump_flags);
431 curr_val = get_value (PHI_RESULT (phi));
432 switch (curr_val->lattice_val)
434 case VARYING:
435 if (dump_file && (dump_flags & TDF_DETAILS))
436 fprintf (dump_file, "\n Shortcircuit. Default of VARYING.");
437 short_circuit = 1;
438 break;
440 case CONSTANT:
441 phi_val = *curr_val;
442 break;
444 case UNDEFINED:
445 case UNINITIALIZED:
446 phi_val.lattice_val = UNDEFINED;
447 phi_val.const_val = NULL_TREE;
448 break;
450 default:
451 abort ();
454 /* If the variable is volatile or the variable is never referenced in a
455 real operand, then consider the PHI node VARYING. */
456 if (short_circuit || TREE_THIS_VOLATILE (SSA_NAME_VAR (PHI_RESULT (phi))))
458 phi_val.lattice_val = VARYING;
459 phi_val.const_val = NULL;
461 else
462 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
464 /* Compute the meet operator over all the PHI arguments. */
465 edge e = PHI_ARG_EDGE (phi, i);
467 if (dump_file && (dump_flags & TDF_DETAILS))
469 fprintf (dump_file,
470 "\n Argument #%d (%d -> %d %sexecutable)\n",
471 i, e->src->index, e->dest->index,
472 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
475 /* If the incoming edge is executable, Compute the meet operator for
476 the existing value of the PHI node and the current PHI argument. */
477 if (e->flags & EDGE_EXECUTABLE)
479 tree rdef = PHI_ARG_DEF (phi, i);
480 value *rdef_val, val;
482 if (is_gimple_min_invariant (rdef))
484 val.lattice_val = CONSTANT;
485 val.const_val = rdef;
486 rdef_val = &val;
488 else
489 rdef_val = get_value (rdef);
491 phi_val = cp_lattice_meet (phi_val, *rdef_val);
493 if (dump_file && (dump_flags & TDF_DETAILS))
495 fprintf (dump_file, "\t");
496 print_generic_expr (dump_file, rdef, dump_flags);
497 dump_lattice_value (dump_file, "\tValue: ", *rdef_val);
498 fprintf (dump_file, "\n");
501 if (phi_val.lattice_val == VARYING)
502 break;
506 if (dump_file && (dump_flags & TDF_DETAILS))
508 dump_lattice_value (dump_file, "\n PHI node value: ", phi_val);
509 fprintf (dump_file, "\n\n");
512 set_lattice_value (PHI_RESULT (phi), phi_val);
513 if (phi_val.lattice_val == VARYING)
514 DONT_SIMULATE_AGAIN (phi) = 1;
518 /* Compute the meet operator between VAL1 and VAL2:
520 any M UNDEFINED = any
521 any M VARYING = VARYING
522 Ci M Cj = Ci if (i == j)
523 Ci M Cj = VARYING if (i != j) */
524 static value
525 cp_lattice_meet (value val1, value val2)
527 value result;
529 /* any M UNDEFINED = any. */
530 if (val1.lattice_val == UNDEFINED)
531 return val2;
532 else if (val2.lattice_val == UNDEFINED)
533 return val1;
535 /* any M VARYING = VARYING. */
536 if (val1.lattice_val == VARYING || val2.lattice_val == VARYING)
538 result.lattice_val = VARYING;
539 result.const_val = NULL_TREE;
540 return result;
543 /* Ci M Cj = Ci if (i == j)
544 Ci M Cj = VARYING if (i != j) */
545 if (simple_cst_equal (val1.const_val, val2.const_val) == 1)
547 result.lattice_val = CONSTANT;
548 result.const_val = val1.const_val;
550 else
552 result.lattice_val = VARYING;
553 result.const_val = NULL_TREE;
556 return result;
560 /* Evaluate statement STMT. If the statement produces an output value and
561 its evaluation changes the lattice value of its output, do the following:
563 - If the statement is an assignment, add all the SSA edges starting at
564 this definition.
566 - If the statement is a conditional branch:
567 . If the statement evaluates to non-constant, add all edges to
568 worklist.
569 . If the statement is constant, add the edge executed as the
570 result of the branch. */
572 static void
573 visit_stmt (tree stmt)
575 size_t i;
576 stmt_ann_t ann;
577 def_optype defs;
578 vdef_optype vdefs;
580 /* If the statement has already been deemed to be VARYING, don't simulate
581 it again. */
582 if (DONT_SIMULATE_AGAIN (stmt))
583 return;
585 if (dump_file && (dump_flags & TDF_DETAILS))
587 fprintf (dump_file, "\nVisiting statement: ");
588 print_generic_stmt (dump_file, stmt, TDF_SLIM);
589 fprintf (dump_file, "\n");
592 ann = stmt_ann (stmt);
594 /* If this statement is already in the worklist then "cancel" it. The
595 reevaluation implied by the worklist entry will produce the same
596 value we generate here and thus reevaluating it again from the
597 worklist is pointless. */
598 if (ann->in_ccp_worklist)
599 ann->in_ccp_worklist = 0;
601 /* Now examine the statement. If the statement is an assignment that
602 produces a single output value, evaluate its RHS to see if the lattice
603 value of its output has changed. */
604 if (TREE_CODE (stmt) == MODIFY_EXPR
605 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME)
606 visit_assignment (stmt);
608 /* Definitions made by statements other than assignments to SSA_NAMEs
609 represent unknown modifications to their outputs. Mark them VARYING. */
610 else if (NUM_DEFS (defs = DEF_OPS (ann)) != 0)
612 DONT_SIMULATE_AGAIN (stmt) = 1;
613 for (i = 0; i < NUM_DEFS (defs); i++)
615 tree def = DEF_OP (defs, i);
616 def_to_varying (def);
620 /* If STMT is a conditional branch, see if we can determine which branch
621 will be taken. */
622 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
623 visit_cond_stmt (stmt);
625 /* Any other kind of statement is not interesting for constant
626 propagation and, therefore, not worth simulating. */
627 else
629 DONT_SIMULATE_AGAIN (stmt) = 1;
631 /* If STMT is a computed goto, then mark all the output edges
632 executable. */
633 if (computed_goto_p (stmt))
634 add_outgoing_control_edges (bb_for_stmt (stmt));
637 /* Mark all VDEF operands VARYING. */
638 vdefs = VDEF_OPS (ann);
639 for (i = 0; i < NUM_VDEFS (vdefs); i++)
640 def_to_varying (VDEF_RESULT (vdefs, i));
644 /* Visit the assignment statement STMT. Set the value of its LHS to the
645 value computed by the RHS. */
647 static void
648 visit_assignment (tree stmt)
650 value val;
651 tree lhs, rhs;
653 lhs = TREE_OPERAND (stmt, 0);
654 rhs = TREE_OPERAND (stmt, 1);
656 if (TREE_THIS_VOLATILE (SSA_NAME_VAR (lhs)))
658 /* Volatile variables are always VARYING. */
659 val.lattice_val = VARYING;
660 val.const_val = NULL_TREE;
662 else if (TREE_CODE (rhs) == SSA_NAME)
664 /* For a simple copy operation, we copy the lattice values. */
665 value *nval = get_value (rhs);
666 val = *nval;
668 else
670 /* Evaluate the statement. */
671 val = evaluate_stmt (stmt);
674 /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
675 the constant value into the type of the destination variable. This
676 should not be necessary if GCC represented bitfields properly. */
678 tree lhs = TREE_OPERAND (stmt, 0);
679 if (val.lattice_val == CONSTANT
680 && TREE_CODE (lhs) == COMPONENT_REF
681 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
683 tree w = widen_bitfield (val.const_val, TREE_OPERAND (lhs, 1), lhs);
685 if (w && is_gimple_min_invariant (w))
686 val.const_val = w;
687 else
689 val.lattice_val = VARYING;
690 val.const_val = NULL;
695 /* Set the lattice value of the statement's output. */
696 set_lattice_value (lhs, val);
697 if (val.lattice_val == VARYING)
698 DONT_SIMULATE_AGAIN (stmt) = 1;
702 /* Visit the conditional statement STMT. If it evaluates to a constant value,
703 mark outgoing edges appropriately. */
705 static void
706 visit_cond_stmt (tree stmt)
708 edge e;
709 value val;
710 basic_block block;
712 block = bb_for_stmt (stmt);
713 val = evaluate_stmt (stmt);
715 /* Find which edge out of the conditional block will be taken and add it
716 to the worklist. If no single edge can be determined statically, add
717 all outgoing edges from BLOCK. */
718 e = find_taken_edge (block, val.const_val);
719 if (e)
720 add_control_edge (e);
721 else
723 DONT_SIMULATE_AGAIN (stmt) = 1;
724 add_outgoing_control_edges (block);
729 /* Add all the edges coming out of BB to the control flow worklist. */
731 static void
732 add_outgoing_control_edges (basic_block bb)
734 edge e;
736 for (e = bb->succ; e; e = e->succ_next)
737 add_control_edge (e);
741 /* Add edge E to the control flow worklist. */
743 static void
744 add_control_edge (edge e)
746 basic_block bb = e->dest;
747 if (bb == EXIT_BLOCK_PTR)
748 return;
750 /* If the edge had already been executed, skip it. */
751 if (e->flags & EDGE_EXECUTABLE)
752 return;
754 e->flags |= EDGE_EXECUTABLE;
756 /* If the block is already in the list, we're done. */
757 if (TEST_BIT (bb_in_list, bb->index))
758 return;
760 cfg_blocks_add (bb);
762 if (dump_file && (dump_flags & TDF_DETAILS))
763 fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n",
764 e->src->index, e->dest->index);
768 /* CCP specific front-end to the non-destructive constant folding routines.
770 Attempt to simplify the RHS of STMT knowing that one or more
771 operands are constants.
773 If simplification is possible, return the simplified RHS,
774 otherwise return the original RHS. */
776 static tree
777 ccp_fold (tree stmt)
779 tree rhs = get_rhs (stmt);
780 enum tree_code code = TREE_CODE (rhs);
781 int kind = TREE_CODE_CLASS (code);
782 tree retval = NULL_TREE;
784 /* If the RHS is just a variable, then that variable must now have
785 a constant value that we can return directly. */
786 if (TREE_CODE (rhs) == SSA_NAME)
787 return get_value (rhs)->const_val;
789 /* Unary operators. Note that we know the single operand must
790 be a constant. So this should almost always return a
791 simplified RHS. */
792 if (kind == '1')
794 /* Handle unary operators which can appear in GIMPLE form. */
795 tree op0 = TREE_OPERAND (rhs, 0);
797 /* Simplify the operand down to a constant. */
798 if (TREE_CODE (op0) == SSA_NAME)
800 value *val = get_value (op0);
801 if (val->lattice_val == CONSTANT)
802 op0 = get_value (op0)->const_val;
805 retval = nondestructive_fold_unary_to_constant (code,
806 TREE_TYPE (rhs),
807 op0);
809 /* If we folded, but did not create an invariant, then we can not
810 use this expression. */
811 if (retval && ! is_gimple_min_invariant (retval))
812 return NULL;
814 /* If we could not fold the expression, but the arguments are all
815 constants and gimple values, then build and return the new
816 expression.
818 In some cases the new expression is still something we can
819 use as a replacement for an argument. This happens with
820 NOP conversions of types for example.
822 In other cases the new expression can not be used as a
823 replacement for an argument (as it would create non-gimple
824 code). But the new expression can still be used to derive
825 other constants. */
826 if (! retval && is_gimple_min_invariant (op0))
827 return build1 (code, TREE_TYPE (rhs), op0);
830 /* Binary and comparison operators. We know one or both of the
831 operands are constants. */
832 else if (kind == '2'
833 || kind == '<'
834 || code == TRUTH_AND_EXPR
835 || code == TRUTH_OR_EXPR
836 || code == TRUTH_XOR_EXPR)
838 /* Handle binary and comparison operators that can appear in
839 GIMPLE form. */
840 tree op0 = TREE_OPERAND (rhs, 0);
841 tree op1 = TREE_OPERAND (rhs, 1);
843 /* Simplify the operands down to constants when appropriate. */
844 if (TREE_CODE (op0) == SSA_NAME)
846 value *val = get_value (op0);
847 if (val->lattice_val == CONSTANT)
848 op0 = val->const_val;
851 if (TREE_CODE (op1) == SSA_NAME)
853 value *val = get_value (op1);
854 if (val->lattice_val == CONSTANT)
855 op1 = val->const_val;
858 retval = nondestructive_fold_binary_to_constant (code,
859 TREE_TYPE (rhs),
860 op0, op1);
862 /* If we folded, but did not create an invariant, then we can not
863 use this expression. */
864 if (retval && ! is_gimple_min_invariant (retval))
865 return NULL;
867 /* If we could not fold the expression, but the arguments are all
868 constants and gimple values, then build and return the new
869 expression.
871 In some cases the new expression is still something we can
872 use as a replacement for an argument. This happens with
873 NOP conversions of types for example.
875 In other cases the new expression can not be used as a
876 replacement for an argument (as it would create non-gimple
877 code). But the new expression can still be used to derive
878 other constants. */
879 if (! retval
880 && is_gimple_min_invariant (op0)
881 && is_gimple_min_invariant (op1))
882 return build (code, TREE_TYPE (rhs), op0, op1);
885 /* We may be able to fold away calls to builtin functions if their
886 arguments are constants. */
887 else if (code == CALL_EXPR
888 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
889 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
890 == FUNCTION_DECL)
891 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
893 use_optype uses = STMT_USE_OPS (stmt);
894 if (NUM_USES (uses) != 0)
896 tree *orig;
897 size_t i;
899 /* Preserve the original values of every operand. */
900 orig = xmalloc (sizeof (tree) * NUM_USES (uses));
901 for (i = 0; i < NUM_USES (uses); i++)
902 orig[i] = USE_OP (uses, i);
904 /* Substitute operands with their values and try to fold. */
905 replace_uses_in (stmt, NULL);
906 retval = fold_builtin (rhs);
908 /* Restore operands to their original form. */
909 for (i = 0; i < NUM_USES (uses); i++)
910 *(USE_OP_PTR (uses, i)) = orig[i];
911 free (orig);
914 else
915 return rhs;
917 /* If we got a simplified form, see if we need to convert its type. */
918 if (retval)
920 if (TREE_TYPE (retval) != TREE_TYPE (rhs))
921 retval = convert (TREE_TYPE (rhs), retval);
923 if (TREE_TYPE (retval) == TREE_TYPE (rhs))
924 return retval;
927 /* No simplification was possible. */
928 return rhs;
932 /* Evaluate statement STMT. */
934 static value
935 evaluate_stmt (tree stmt)
937 value val;
938 tree simplified;
939 latticevalue likelyvalue = likely_value (stmt);
941 /* If the statement is likely to have a CONSTANT result, then try
942 to fold the statement to determine the constant value. */
943 if (likelyvalue == CONSTANT)
944 simplified = ccp_fold (stmt);
945 /* If the statement is likely to have a VARYING result, then do not
946 bother folding the statement. */
947 else if (likelyvalue == VARYING)
948 simplified = get_rhs (stmt);
949 /* Otherwise the statement is likely to have an UNDEFINED value and
950 there will be nothing to do. */
951 else
952 simplified = NULL_TREE;
954 if (simplified && is_gimple_min_invariant (simplified))
956 /* The statement produced a constant value. */
957 val.lattice_val = CONSTANT;
958 val.const_val = simplified;
960 else
962 /* The statement produced a nonconstant value. If the statement
963 had undefined operands, then the result of the statement should
964 be undefined. Else the result of the statement is VARYING. */
965 val.lattice_val = (likelyvalue == UNDEFINED ? UNDEFINED : VARYING);
966 val.const_val = NULL_TREE;
969 return val;
973 /* Debugging dumps. */
975 static void
976 dump_lattice_value (FILE *outf, const char *prefix, value val)
978 switch (val.lattice_val)
980 case UNDEFINED:
981 fprintf (outf, "%sUNDEFINED", prefix);
982 break;
983 case VARYING:
984 fprintf (outf, "%sVARYING", prefix);
985 break;
986 case CONSTANT:
987 fprintf (outf, "%sCONSTANT ", prefix);
988 print_generic_expr (outf, val.const_val, dump_flags);
989 break;
990 default:
991 abort ();
995 /* Given a constant value VAL for bitfield FIELD, and a destination
996 variable VAR, return VAL appropriately widened to fit into VAR. If
997 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
999 tree
1000 widen_bitfield (tree val, tree field, tree var)
1002 unsigned var_size, field_size;
1003 tree wide_val;
1004 unsigned HOST_WIDE_INT mask;
1005 unsigned i;
1007 var_size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE ((var))));
1008 field_size = TREE_INT_CST_LOW (DECL_SIZE (field));
1010 /* Give up if either the bitfield or the variable are too wide. */
1011 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1012 return NULL;
1014 #if defined ENABLE_CHECKING
1015 if (var_size < field_size)
1016 abort ();
1017 #endif
1019 /* If VAL is not an integer constant, then give up. */
1020 if (TREE_CODE (val) != INTEGER_CST)
1021 return NULL;
1023 /* If the sign bit of the value is not set, or the field's type is
1024 unsigned, then just mask off the high order bits of the value. */
1025 if ((TREE_INT_CST_LOW (val) & (1 << (field_size - 1))) == 0
1026 || DECL_UNSIGNED (field))
1028 /* Zero extension. Build a mask with the lower 'field_size' bits
1029 set and a BIT_AND_EXPR node to clear the high order bits of
1030 the value. */
1031 for (i = 0, mask = 0; i < field_size; i++)
1032 mask |= 1 << i;
1034 wide_val = build (BIT_AND_EXPR, TREE_TYPE (var), val,
1035 build_int_2 (mask, 0));
1037 else
1039 /* Sign extension. Create a mask with the upper 'field_size'
1040 bits set and a BIT_IOR_EXPR to set the high order bits of the
1041 value. */
1042 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1043 mask |= 1 << (var_size - i - 1);
1045 wide_val = build (BIT_IOR_EXPR, TREE_TYPE (var), val,
1046 build_int_2 (mask, 0));
1049 return fold (wide_val);
1053 /* Function indicating whether we ought to include information for 'var'
1054 when calculating immediate uses. */
1056 static bool
1057 need_imm_uses_for (tree var)
1059 return get_value (var)->lattice_val != VARYING;
1063 /* Initialize local data structures and worklists for CCP. */
1065 static void
1066 initialize (void)
1068 edge e;
1069 basic_block bb;
1070 sbitmap virtual_var;
1072 /* Worklist of SSA edges. */
1073 VARRAY_TREE_INIT (ssa_edges, 20, "ssa_edges");
1075 executable_blocks = sbitmap_alloc (last_basic_block);
1076 sbitmap_zero (executable_blocks);
1078 bb_in_list = sbitmap_alloc (last_basic_block);
1079 sbitmap_zero (bb_in_list);
1081 value_vector = (value *) xmalloc (highest_ssa_version * sizeof (value));
1082 memset (value_vector, 0, highest_ssa_version * sizeof (value));
1084 /* 1 if ssa variable is used in a virtual variable context. */
1085 virtual_var = sbitmap_alloc (highest_ssa_version);
1086 sbitmap_zero (virtual_var);
1088 /* Initialize default values and simulation flags for PHI nodes, statements
1089 and edges. */
1090 FOR_EACH_BB (bb)
1092 block_stmt_iterator i;
1093 tree stmt;
1094 stmt_ann_t ann;
1095 def_optype defs;
1096 vdef_optype vdefs;
1097 size_t x;
1098 int vary;
1100 /* Get the default value for each definition. */
1101 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1103 vary = 0;
1104 stmt = bsi_stmt (i);
1105 get_stmt_operands (stmt);
1106 ann = stmt_ann (stmt);
1107 defs = DEF_OPS (ann);
1108 for (x = 0; x < NUM_DEFS (defs); x++)
1110 tree def = DEF_OP (defs, x);
1111 if (get_value (def)->lattice_val == VARYING)
1112 vary = 1;
1114 DONT_SIMULATE_AGAIN (stmt) = vary;
1116 /* Mark all VDEF operands VARYING. */
1117 vdefs = VDEF_OPS (ann);
1118 for (x = 0; x < NUM_VDEFS (vdefs); x++)
1120 tree res = VDEF_RESULT (vdefs, x);
1121 get_value (res)->lattice_val = VARYING;
1122 SET_BIT (virtual_var, SSA_NAME_VERSION (res));
1126 for (e = bb->succ; e; e = e->succ_next)
1127 e->flags &= ~EDGE_EXECUTABLE;
1130 /* Now process PHI nodes. */
1131 FOR_EACH_BB (bb)
1133 tree phi, var;
1134 int x;
1135 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
1137 value *val;
1138 val = get_value (PHI_RESULT (phi));
1139 if (val->lattice_val != VARYING)
1141 for (x = 0; x < PHI_NUM_ARGS (phi); x++)
1143 var = PHI_ARG_DEF (phi, x);
1144 /* If one argument is virtual, the result is virtual, and
1145 therefore varying. */
1146 if (TREE_CODE (var) == SSA_NAME)
1148 if (TEST_BIT (virtual_var, SSA_NAME_VERSION (var)))
1150 val->lattice_val = VARYING;
1151 SET_BIT (virtual_var,
1152 SSA_NAME_VERSION (PHI_RESULT (phi)));
1153 break;
1158 DONT_SIMULATE_AGAIN (phi) = ((val->lattice_val == VARYING) ? 1 : 0);
1162 sbitmap_free (virtual_var);
1163 /* Compute immediate uses for variables we care about. */
1164 compute_immediate_uses (TDFA_USE_OPS, need_imm_uses_for);
1166 if (dump_file && (dump_flags & TDF_DETAILS))
1167 dump_immediate_uses (dump_file);
1169 VARRAY_BB_INIT (cfg_blocks, 20, "cfg_blocks");
1171 /* Seed the algorithm by adding the successors of the entry block to the
1172 edge worklist. */
1173 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1175 if (e->dest != EXIT_BLOCK_PTR)
1177 e->flags |= EDGE_EXECUTABLE;
1178 cfg_blocks_add (e->dest);
1184 /* Free allocated storage. */
1186 static void
1187 finalize (void)
1189 ssa_edges = NULL;
1190 cfg_blocks = NULL;
1191 free (value_vector);
1192 sbitmap_free (bb_in_list);
1193 sbitmap_free (executable_blocks);
1194 free_df ();
1197 /* Is the block worklist empty. */
1199 static inline bool
1200 cfg_blocks_empty_p (void)
1202 return (cfg_blocks_num == 0);
1205 /* Add a basic block to the worklist. */
1207 static void
1208 cfg_blocks_add (basic_block bb)
1210 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
1211 return;
1213 if (TEST_BIT (bb_in_list, bb->index))
1214 return;
1216 if (cfg_blocks_empty_p ())
1218 cfg_blocks_tail = cfg_blocks_head = 0;
1219 cfg_blocks_num = 1;
1221 else
1223 cfg_blocks_num++;
1224 if (cfg_blocks_num > VARRAY_SIZE (cfg_blocks))
1226 /* We have to grow the array now. Adjust to queue to occupy the
1227 full space of the original array. */
1228 cfg_blocks_tail = VARRAY_SIZE (cfg_blocks);
1229 cfg_blocks_head = 0;
1230 VARRAY_GROW (cfg_blocks, 2 * VARRAY_SIZE (cfg_blocks));
1232 else
1233 cfg_blocks_tail = (cfg_blocks_tail + 1) % VARRAY_SIZE (cfg_blocks);
1235 VARRAY_BB (cfg_blocks, cfg_blocks_tail) = bb;
1236 SET_BIT (bb_in_list, bb->index);
1239 /* Remove a block from the worklist. */
1241 static basic_block
1242 cfg_blocks_get (void)
1244 basic_block bb;
1246 bb = VARRAY_BB (cfg_blocks, cfg_blocks_head);
1248 #ifdef ENABLE_CHECKING
1249 if (cfg_blocks_empty_p () || !bb)
1250 abort ();
1251 #endif
1253 cfg_blocks_head = (cfg_blocks_head + 1) % VARRAY_SIZE (cfg_blocks);
1254 --cfg_blocks_num;
1255 RESET_BIT (bb_in_list, bb->index);
1257 return bb;
1260 /* We have just defined a new value for VAR. Add all immediate uses
1261 of VAR to the ssa_edges worklist. */
1262 static void
1263 add_var_to_ssa_edges_worklist (tree var)
1265 tree stmt = SSA_NAME_DEF_STMT (var);
1266 dataflow_t df = get_immediate_uses (stmt);
1267 int num_uses = num_immediate_uses (df);
1268 int i;
1270 for (i = 0; i < num_uses; i++)
1272 tree use = immediate_use (df, i);
1274 if (!DONT_SIMULATE_AGAIN (use))
1276 stmt_ann_t ann = stmt_ann (use);
1277 if (ann->in_ccp_worklist == 0)
1279 ann->in_ccp_worklist = 1;
1280 VARRAY_PUSH_TREE (ssa_edges, use);
1286 /* Set the lattice value for the variable VAR to VARYING. */
1288 static void
1289 def_to_varying (tree var)
1291 value val;
1292 val.lattice_val = VARYING;
1293 val.const_val = NULL_TREE;
1294 set_lattice_value (var, val);
1297 /* Set the lattice value for variable VAR to VAL. */
1299 static void
1300 set_lattice_value (tree var, value val)
1302 value *old = get_value (var);
1304 #ifdef ENABLE_CHECKING
1305 if (val.lattice_val == UNDEFINED)
1307 /* CONSTANT->UNDEFINED is never a valid state transition. */
1308 if (old->lattice_val == CONSTANT)
1309 abort ();
1311 /* VARYING->UNDEFINED is generally not a valid state transition,
1312 except for values which are initialized to VARYING. */
1313 if (old->lattice_val == VARYING
1314 && get_default_value (var).lattice_val != VARYING)
1315 abort ();
1317 else if (val.lattice_val == CONSTANT)
1319 /* VARYING -> CONSTANT is an invalid state transition, except
1320 for objects which start off in a VARYING state. */
1321 if (old->lattice_val == VARYING
1322 && get_default_value (var).lattice_val != VARYING)
1323 abort ();
1325 #endif
1327 /* If the constant for VAR has changed, then this VAR is really varying. */
1328 if (old->lattice_val == CONSTANT && val.lattice_val == CONSTANT
1329 && !simple_cst_equal (old->const_val, val.const_val))
1331 val.lattice_val = VARYING;
1332 val.const_val = NULL_TREE;
1335 if (old->lattice_val != val.lattice_val)
1337 if (dump_file && (dump_flags & TDF_DETAILS))
1339 dump_lattice_value (dump_file,
1340 "Lattice value changed to ", val);
1341 fprintf (dump_file, ". Adding definition to SSA edges.\n");
1344 add_var_to_ssa_edges_worklist (var);
1345 *old = val;
1349 /* Replace USE references in statement STMT with their immediate reaching
1350 definition. Return true if at least one reference was replaced. If
1351 REPLACED_ADDRESSES_P is given, it will be set to true if an address
1352 constant was replaced. */
1354 static bool
1355 replace_uses_in (tree stmt, bool *replaced_addresses_p)
1357 bool replaced = false;
1358 use_optype uses;
1359 size_t i;
1361 if (replaced_addresses_p)
1362 *replaced_addresses_p = false;
1364 get_stmt_operands (stmt);
1366 uses = STMT_USE_OPS (stmt);
1367 for (i = 0; i < NUM_USES (uses); i++)
1369 tree *use = USE_OP_PTR (uses, i);
1370 value *val = get_value (*use);
1372 if (val->lattice_val == CONSTANT)
1374 *use = val->const_val;
1375 replaced = true;
1376 if (POINTER_TYPE_P (TREE_TYPE (*use)) && replaced_addresses_p)
1377 *replaced_addresses_p = true;
1381 return replaced;
1384 /* Return the likely latticevalue for STMT.
1386 If STMT has no operands, then return CONSTANT.
1388 Else if any operands of STMT are undefined, then return UNDEFINED.
1390 Else if any operands of STMT are constants, then return CONSTANT.
1392 Else return VARYING. */
1394 static latticevalue
1395 likely_value (tree stmt)
1397 use_optype uses;
1398 size_t i;
1399 int found_constant = 0;
1400 stmt_ann_t ann;
1402 /* If the statement makes aliased loads or has volatile operands, it
1403 won't fold to a constant value. */
1404 ann = stmt_ann (stmt);
1405 if (ann->makes_aliased_loads || ann->has_volatile_ops)
1406 return VARYING;
1408 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
1409 in the presence of const and pure calls. */
1410 if (get_call_expr_in (stmt) != NULL_TREE)
1411 return VARYING;
1413 get_stmt_operands (stmt);
1415 uses = USE_OPS (ann);
1416 for (i = 0; i < NUM_USES (uses); i++)
1418 tree use = USE_OP (uses, i);
1419 value *val = get_value (use);
1421 if (val->lattice_val == UNDEFINED)
1422 return UNDEFINED;
1424 if (val->lattice_val == CONSTANT)
1425 found_constant = 1;
1428 return ((found_constant || !uses) ? CONSTANT : VARYING);
1431 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1432 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1433 is the desired result type. */
1435 static tree
1436 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1438 unsigned HOST_WIDE_INT lquo, lrem;
1439 HOST_WIDE_INT hquo, hrem;
1440 tree elt_size, min_idx, idx;
1441 tree array_type, elt_type;
1443 /* Ignore stupid user tricks of indexing non-array variables. */
1444 array_type = TREE_TYPE (base);
1445 if (TREE_CODE (array_type) != ARRAY_TYPE)
1446 return NULL_TREE;
1447 elt_type = TREE_TYPE (array_type);
1448 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1449 return NULL_TREE;
1451 /* Whee. Ignore indexing of variable sized types. */
1452 elt_size = TYPE_SIZE_UNIT (elt_type);
1453 if (TREE_CODE (elt_size) != INTEGER_CST)
1454 return NULL_TREE;
1456 /* If the division isn't exact, then don't do anything. Equally
1457 invalid as the above indexing of non-array variables. */
1458 if (div_and_round_double (TRUNC_DIV_EXPR, 1,
1459 TREE_INT_CST_LOW (offset),
1460 TREE_INT_CST_HIGH (offset),
1461 TREE_INT_CST_LOW (elt_size),
1462 TREE_INT_CST_HIGH (elt_size),
1463 &lquo, &hquo, &lrem, &hrem)
1464 || lrem || hrem)
1465 return NULL_TREE;
1466 idx = build_int_2_wide (lquo, hquo);
1468 /* Re-bias the index by the min index of the array type. */
1469 min_idx = TYPE_DOMAIN (TREE_TYPE (base));
1470 if (min_idx)
1472 min_idx = TYPE_MIN_VALUE (min_idx);
1473 if (min_idx)
1475 idx = convert (TREE_TYPE (min_idx), idx);
1476 if (!integer_zerop (min_idx))
1477 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 1);
1481 return build (ARRAY_REF, orig_type, base, idx);
1484 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1485 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1486 is the desired result type. */
1487 /* ??? This doesn't handle class inheritance. */
1489 static tree
1490 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1491 tree orig_type, bool base_is_ptr)
1493 tree f, t, field_type, tail_array_field;
1495 if (TREE_CODE (record_type) != RECORD_TYPE
1496 && TREE_CODE (record_type) != UNION_TYPE
1497 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1498 return NULL_TREE;
1500 /* Short-circuit silly cases. */
1501 if (lang_hooks.types_compatible_p (record_type, orig_type))
1502 return NULL_TREE;
1504 tail_array_field = NULL_TREE;
1505 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1507 int cmp;
1509 if (TREE_CODE (f) != FIELD_DECL)
1510 continue;
1511 if (DECL_BIT_FIELD (f))
1512 continue;
1513 if (TREE_CODE (DECL_FIELD_OFFSET (f)) != INTEGER_CST)
1514 continue;
1516 /* ??? Java creates "interesting" fields for representing base classes.
1517 They have no name, and have no context. With no context, we get into
1518 trouble with nonoverlapping_component_refs_p. Skip them. */
1519 if (!DECL_FIELD_CONTEXT (f))
1520 continue;
1522 /* The previous array field isn't at the end. */
1523 tail_array_field = NULL_TREE;
1525 /* Check to see if this offset overlaps with the field. */
1526 cmp = tree_int_cst_compare (DECL_FIELD_OFFSET (f), offset);
1527 if (cmp > 0)
1528 continue;
1530 field_type = TREE_TYPE (f);
1531 if (cmp < 0)
1533 /* Don't care about offsets into the middle of scalars. */
1534 if (!AGGREGATE_TYPE_P (field_type))
1535 continue;
1537 /* Check for array at the end of the struct. This is often
1538 used as for flexible array members. We should be able to
1539 turn this into an array access anyway. */
1540 if (TREE_CODE (field_type) == ARRAY_TYPE)
1541 tail_array_field = f;
1543 /* Check the end of the field against the offset. */
1544 if (!DECL_SIZE_UNIT (f)
1545 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1546 continue;
1547 t = int_const_binop (MINUS_EXPR, offset, DECL_FIELD_OFFSET (f), 1);
1548 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1549 continue;
1551 /* If we matched, then set offset to the displacement into
1552 this field. */
1553 offset = t;
1556 /* Here we exactly match the offset being checked. If the types match,
1557 then we can return that field. */
1558 else if (lang_hooks.types_compatible_p (orig_type, field_type))
1560 if (base_is_ptr)
1561 base = build1 (INDIRECT_REF, record_type, base);
1562 t = build (COMPONENT_REF, field_type, base, f);
1563 return t;
1566 /* Don't care about type-punning of scalars. */
1567 else if (!AGGREGATE_TYPE_P (field_type))
1568 return NULL_TREE;
1570 goto found;
1573 if (!tail_array_field)
1574 return NULL_TREE;
1576 f = tail_array_field;
1577 field_type = TREE_TYPE (f);
1579 found:
1580 /* If we get here, we've got an aggregate field, and a possibly
1581 non-zero offset into them. Recurse and hope for a valid match. */
1582 if (base_is_ptr)
1583 base = build1 (INDIRECT_REF, record_type, base);
1584 base = build (COMPONENT_REF, field_type, base, f);
1586 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1587 if (t)
1588 return t;
1589 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1590 orig_type, false);
1593 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1594 Return the simplified expression, or NULL if nothing could be done. */
1596 static tree
1597 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1599 tree t;
1601 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1602 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1603 are sometimes added. */
1604 base = fold (base);
1605 STRIP_NOPS (base);
1606 TREE_OPERAND (expr, 0) = base;
1608 /* One possibility is that the address reduces to a string constant. */
1609 t = fold_read_from_constant_string (expr);
1610 if (t)
1611 return t;
1613 /* Add in any offset from a PLUS_EXPR. */
1614 if (TREE_CODE (base) == PLUS_EXPR)
1616 tree offset2;
1618 offset2 = TREE_OPERAND (base, 1);
1619 if (TREE_CODE (offset2) != INTEGER_CST)
1620 return NULL_TREE;
1621 base = TREE_OPERAND (base, 0);
1623 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1626 if (TREE_CODE (base) == ADDR_EXPR)
1628 /* Strip the ADDR_EXPR. */
1629 base = TREE_OPERAND (base, 0);
1631 /* Try folding *(&B+O) to B[X]. */
1632 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1633 if (t)
1634 return t;
1636 /* Try folding *(&B+O) to B.X. */
1637 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1638 TREE_TYPE (expr), false);
1639 if (t)
1640 return t;
1642 /* Fold *&B to B. */
1643 if (integer_zerop (offset))
1644 return base;
1646 else
1648 /* We can get here for out-of-range string constant accesses,
1649 such as "_"[3]. Bail out of the entire substitution search
1650 and arrange for the entire statement to be replaced by a
1651 call to __builtin_trap. In all likelyhood this will all be
1652 constant-folded away, but in the meantime we can't leave with
1653 something that get_expr_operands can't understand. */
1655 t = base;
1656 STRIP_NOPS (t);
1657 if (TREE_CODE (t) == ADDR_EXPR
1658 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1660 /* FIXME: Except that this causes problems elsewhere with dead
1661 code not being deleted, and we abort in the rtl expanders
1662 because we failed to remove some ssa_name. In the meantime,
1663 just return zero. */
1664 /* FIXME2: This condition should be signaled by
1665 fold_read_from_constant_string directly, rather than
1666 re-checking for it here. */
1667 return integer_zero_node;
1670 /* Try folding *(B+O) to B->X. Still an improvement. */
1671 if (POINTER_TYPE_P (TREE_TYPE (base)))
1673 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1674 base, offset,
1675 TREE_TYPE (expr), true);
1676 if (t)
1677 return t;
1681 /* Otherwise we had an offset that we could not simplify. */
1682 return NULL_TREE;
1685 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1687 A quaint feature extant in our address arithmetic is that there
1688 can be hidden type changes here. The type of the result need
1689 not be the same as the type of the input pointer.
1691 What we're after here is an expression of the form
1692 (T *)(&array + const)
1693 where the cast doesn't actually exist, but is implicit in the
1694 type of the PLUS_EXPR. We'd like to turn this into
1695 &array[x]
1696 which may be able to propagate further. */
1698 static tree
1699 maybe_fold_stmt_addition (tree expr)
1701 tree op0 = TREE_OPERAND (expr, 0);
1702 tree op1 = TREE_OPERAND (expr, 1);
1703 tree ptr_type = TREE_TYPE (expr);
1704 tree ptd_type;
1705 tree t;
1706 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1708 /* We're only interested in pointer arithmetic. */
1709 if (!POINTER_TYPE_P (ptr_type))
1710 return NULL_TREE;
1711 /* Canonicalize the integral operand to op1. */
1712 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1714 if (subtract)
1715 return NULL_TREE;
1716 t = op0, op0 = op1, op1 = t;
1718 /* It had better be a constant. */
1719 if (TREE_CODE (op1) != INTEGER_CST)
1720 return NULL_TREE;
1721 /* The first operand should be an ADDR_EXPR. */
1722 if (TREE_CODE (op0) != ADDR_EXPR)
1723 return NULL_TREE;
1724 op0 = TREE_OPERAND (op0, 0);
1726 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1727 the offset into it. */
1728 while (TREE_CODE (op0) == ARRAY_REF)
1730 tree array_obj = TREE_OPERAND (op0, 0);
1731 tree array_idx = TREE_OPERAND (op0, 1);
1732 tree elt_type = TREE_TYPE (op0);
1733 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1734 tree min_idx;
1736 if (TREE_CODE (array_idx) != INTEGER_CST)
1737 break;
1738 if (TREE_CODE (elt_size) != INTEGER_CST)
1739 break;
1741 /* Un-bias the index by the min index of the array type. */
1742 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1743 if (min_idx)
1745 min_idx = TYPE_MIN_VALUE (min_idx);
1746 if (min_idx)
1748 array_idx = convert (TREE_TYPE (min_idx), array_idx);
1749 if (!integer_zerop (min_idx))
1750 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1751 min_idx, 0);
1755 /* Convert the index to a byte offset. */
1756 array_idx = convert (sizetype, array_idx);
1757 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1759 /* Update the operands for the next round, or for folding. */
1760 /* If we're manipulating unsigned types, then folding into negative
1761 values can produce incorrect results. Particularly if the type
1762 is smaller than the width of the pointer. */
1763 if (subtract
1764 && TYPE_UNSIGNED (TREE_TYPE (op1))
1765 && tree_int_cst_lt (array_idx, op1))
1766 return NULL;
1767 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1768 array_idx, op1, 0);
1769 subtract = false;
1770 op0 = array_obj;
1773 /* If we weren't able to fold the subtraction into another array reference,
1774 canonicalize the integer for passing to the array and component ref
1775 simplification functions. */
1776 if (subtract)
1778 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1779 return NULL;
1780 op1 = fold (build1 (NEGATE_EXPR, TREE_TYPE (op1), op1));
1781 /* ??? In theory fold should always produce another integer. */
1782 if (TREE_CODE (op1) != INTEGER_CST)
1783 return NULL;
1786 ptd_type = TREE_TYPE (ptr_type);
1788 /* At which point we can try some of the same things as for indirects. */
1789 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1790 if (!t)
1791 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1792 ptd_type, false);
1793 if (t)
1794 t = build1 (ADDR_EXPR, ptr_type, t);
1796 return t;
1799 /* Subroutine of fold_stmt called via walk_tree. We perform several
1800 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1802 static tree
1803 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1805 bool *changed_p = data;
1806 tree expr = *expr_p, t;
1808 /* ??? It'd be nice if walk_tree had a pre-order option. */
1809 switch (TREE_CODE (expr))
1811 case INDIRECT_REF:
1812 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1813 if (t)
1814 return t;
1815 *walk_subtrees = 0;
1817 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1818 integer_zero_node);
1819 break;
1821 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1822 We'd only want to bother decomposing an existing ARRAY_REF if
1823 the base array is found to have another offset contained within.
1824 Otherwise we'd be wasting time. */
1826 case ADDR_EXPR:
1827 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1828 if (t)
1829 return t;
1830 *walk_subtrees = 0;
1832 /* Set TREE_INVARIANT properly so that the value is properly
1833 considered constant, and so gets propagated as expected. */
1834 if (*changed_p)
1835 recompute_tree_invarant_for_addr_expr (expr);
1836 return NULL_TREE;
1838 case PLUS_EXPR:
1839 case MINUS_EXPR:
1840 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1841 if (t)
1842 return t;
1843 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
1844 if (t)
1845 return t;
1846 *walk_subtrees = 0;
1848 t = maybe_fold_stmt_addition (expr);
1849 break;
1851 case COMPONENT_REF:
1852 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1853 if (t)
1854 return t;
1855 *walk_subtrees = 0;
1857 /* Make sure the FIELD_DECL is actually a field in the type on
1858 the lhs. In cases with IMA it is possible that it came
1859 from another, equivalent type at this point. We have
1860 already checked the equivalence in this case.
1861 Match on type plus offset, to allow for unnamed fields.
1862 We won't necessarily get the corresponding field for
1863 unions; this is believed to be harmless. */
1865 if ((current_file_decl && TREE_CHAIN (current_file_decl))
1866 && (DECL_FIELD_CONTEXT (TREE_OPERAND (expr, 1)) !=
1867 TREE_TYPE (TREE_OPERAND (expr, 0))))
1869 tree f;
1870 tree orig_field = TREE_OPERAND (expr, 1);
1871 tree orig_type = TREE_TYPE (orig_field);
1872 for (f = TYPE_FIELDS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1873 f; f = TREE_CHAIN (f))
1875 if (lang_hooks.types_compatible_p (TREE_TYPE (f), orig_type)
1876 && tree_int_cst_compare (DECL_FIELD_BIT_OFFSET (f),
1877 DECL_FIELD_BIT_OFFSET (orig_field))
1878 == 0
1879 && tree_int_cst_compare (DECL_FIELD_OFFSET (f),
1880 DECL_FIELD_OFFSET (orig_field))
1881 == 0)
1883 TREE_OPERAND (expr, 1) = f;
1884 break;
1887 /* Fall through is an error; it will be detected in tree-sra. */
1889 break;
1891 default:
1892 return NULL_TREE;
1895 if (t)
1897 *expr_p = t;
1898 *changed_p = true;
1901 return NULL_TREE;
1904 /* Fold the statement pointed by STMT_P. In some cases, this function may
1905 replace the whole statement with a new one. Returns true iff folding
1906 makes any changes. */
1908 bool
1909 fold_stmt (tree *stmt_p)
1911 tree rhs, result, stmt;
1912 bool changed = false;
1914 stmt = *stmt_p;
1916 /* If we replaced constants and the statement makes pointer dereferences,
1917 then we may need to fold instances of *&VAR into VAR, etc. */
1918 if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL))
1920 *stmt_p
1921 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
1922 NULL);
1923 return true;
1926 rhs = get_rhs (stmt);
1927 if (!rhs)
1928 return changed;
1929 result = NULL_TREE;
1931 /* Check for builtins that CCP can handle using information not
1932 available in the generic fold routines. */
1933 if (TREE_CODE (rhs) == CALL_EXPR)
1935 tree callee = get_callee_fndecl (rhs);
1936 if (callee && DECL_BUILT_IN (callee))
1937 result = ccp_fold_builtin (stmt, rhs);
1940 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
1941 if (result == NULL_TREE)
1942 result = fold (rhs);
1944 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
1945 may have been added by fold, and "useless" type conversions that might
1946 now be apparent due to propagation. */
1947 STRIP_MAIN_TYPE_NOPS (result);
1948 STRIP_USELESS_TYPE_CONVERSION (result);
1950 if (result != rhs)
1952 changed = true;
1953 set_rhs (stmt_p, result);
1956 return changed;
1959 /* Get the main expression from statement STMT. */
1961 static tree
1962 get_rhs (tree stmt)
1964 enum tree_code code = TREE_CODE (stmt);
1966 if (code == MODIFY_EXPR)
1967 return TREE_OPERAND (stmt, 1);
1968 if (code == COND_EXPR)
1969 return COND_EXPR_COND (stmt);
1970 else if (code == SWITCH_EXPR)
1971 return SWITCH_COND (stmt);
1972 else if (code == RETURN_EXPR)
1974 if (!TREE_OPERAND (stmt, 0))
1975 return NULL_TREE;
1976 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
1977 return TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
1978 else
1979 return TREE_OPERAND (stmt, 0);
1981 else if (code == GOTO_EXPR)
1982 return GOTO_DESTINATION (stmt);
1983 else if (code == LABEL_EXPR)
1984 return LABEL_EXPR_LABEL (stmt);
1985 else
1986 return stmt;
1990 /* Set the main expression of *STMT_P to EXPR. */
1992 static void
1993 set_rhs (tree *stmt_p, tree expr)
1995 tree stmt = *stmt_p;
1996 enum tree_code code = TREE_CODE (stmt);
1998 if (code == MODIFY_EXPR)
1999 TREE_OPERAND (stmt, 1) = expr;
2000 else if (code == COND_EXPR)
2001 COND_EXPR_COND (stmt) = expr;
2002 else if (code == SWITCH_EXPR)
2003 SWITCH_COND (stmt) = expr;
2004 else if (code == RETURN_EXPR)
2006 if (TREE_OPERAND (stmt, 0)
2007 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
2008 TREE_OPERAND (TREE_OPERAND (stmt, 0), 1) = expr;
2009 else
2010 TREE_OPERAND (stmt, 0) = expr;
2012 else if (code == GOTO_EXPR)
2013 GOTO_DESTINATION (stmt) = expr;
2014 else if (code == LABEL_EXPR)
2015 LABEL_EXPR_LABEL (stmt) = expr;
2016 else
2018 /* Replace the whole statement with EXPR. If EXPR has no side
2019 effects, then replace *STMT_P with an empty statement. */
2020 stmt_ann_t ann = stmt_ann (stmt);
2021 *stmt_p = TREE_SIDE_EFFECTS (expr) ? expr : build_empty_stmt ();
2022 (*stmt_p)->common.ann = (tree_ann) ann;
2024 if (TREE_SIDE_EFFECTS (expr))
2026 def_optype defs;
2027 vdef_optype vdefs;
2028 size_t i;
2030 /* Fix all the SSA_NAMEs created by *STMT_P to point to its new
2031 replacement. */
2032 defs = DEF_OPS (ann);
2033 for (i = 0; i < NUM_DEFS (defs); i++)
2035 tree var = DEF_OP (defs, i);
2036 if (TREE_CODE (var) == SSA_NAME)
2037 SSA_NAME_DEF_STMT (var) = *stmt_p;
2040 vdefs = VDEF_OPS (ann);
2041 for (i = 0; i < NUM_VDEFS (vdefs); i++)
2043 tree var = VDEF_RESULT (vdefs, i);
2044 if (TREE_CODE (var) == SSA_NAME)
2045 SSA_NAME_DEF_STMT (var) = *stmt_p;
2052 /* Return a default value for variable VAR using the following rules:
2054 1- Global and static variables are considered VARYING, unless they are
2055 declared const.
2057 2- Function arguments are considered VARYING.
2059 3- Any other value is considered UNDEFINED. This is useful when
2060 considering PHI nodes. PHI arguments that are undefined do not
2061 change the constant value of the PHI node, which allows for more
2062 constants to be propagated. */
2064 static value
2065 get_default_value (tree var)
2067 value val;
2068 tree sym;
2070 if (TREE_CODE (var) == SSA_NAME)
2071 sym = SSA_NAME_VAR (var);
2072 else
2074 #ifdef ENABLE_CHECKING
2075 if (!DECL_P (var))
2076 abort ();
2077 #endif
2078 sym = var;
2081 val.lattice_val = UNDEFINED;
2082 val.const_val = NULL_TREE;
2084 if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
2086 /* Function arguments and volatile variables are considered VARYING. */
2087 val.lattice_val = VARYING;
2089 else if (decl_function_context (sym) != current_function_decl
2090 || TREE_STATIC (sym))
2092 /* Globals and static variables are considered VARYING, unless they
2093 are declared 'const'. */
2094 val.lattice_val = VARYING;
2096 if (TREE_READONLY (sym)
2097 && DECL_INITIAL (sym)
2098 && is_gimple_min_invariant (DECL_INITIAL (sym)))
2100 val.lattice_val = CONSTANT;
2101 val.const_val = DECL_INITIAL (sym);
2104 else
2106 enum tree_code code;
2107 tree stmt = SSA_NAME_DEF_STMT (var);
2109 if (!IS_EMPTY_STMT (stmt))
2111 code = TREE_CODE (stmt);
2112 if (code != MODIFY_EXPR && code != PHI_NODE)
2113 val.lattice_val = VARYING;
2117 return val;
2121 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
2122 constant, return NULL_TREE. Otherwise, return its constant value. */
2124 static tree
2125 ccp_fold_builtin (tree stmt, tree fn)
2127 tree result, strlen_val[2];
2128 tree arglist = TREE_OPERAND (fn, 1), a;
2129 tree callee = get_callee_fndecl (fn);
2130 bitmap visited;
2131 int strlen_arg, i;
2133 /* Ignore MD builtins. */
2134 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2135 return NULL_TREE;
2137 /* First try the generic builtin folder. If that succeeds, return the
2138 result directly. */
2139 result = fold_builtin (fn);
2140 if (result)
2141 return result;
2143 /* If the builtin could not be folded, and it has no argument list,
2144 we're done. */
2145 if (!arglist)
2146 return NULL_TREE;
2148 /* Limit the work only for builtins we know how to simplify. */
2149 switch (DECL_FUNCTION_CODE (callee))
2151 case BUILT_IN_STRLEN:
2152 case BUILT_IN_FPUTS:
2153 case BUILT_IN_FPUTS_UNLOCKED:
2154 strlen_arg = 1;
2155 break;
2156 case BUILT_IN_STRCPY:
2157 case BUILT_IN_STRNCPY:
2158 strlen_arg = 2;
2159 break;
2160 default:
2161 return NULL_TREE;
2164 /* Try to use the dataflow information gathered by the CCP process. */
2165 visited = BITMAP_XMALLOC ();
2167 memset (strlen_val, 0, sizeof (strlen_val));
2168 for (i = 0, a = arglist;
2169 strlen_arg;
2170 i++, strlen_arg >>= 1, a = TREE_CHAIN (a))
2171 if (strlen_arg & 1)
2173 bitmap_clear (visited);
2174 if (!get_strlen (TREE_VALUE (a), &strlen_val[i], visited))
2175 strlen_val[i] = NULL_TREE;
2178 BITMAP_XFREE (visited);
2180 /* FIXME. All this code looks dangerous in the sense that it might
2181 create non-gimple expressions. */
2182 switch (DECL_FUNCTION_CODE (callee))
2184 case BUILT_IN_STRLEN:
2185 /* Convert from the internal "sizetype" type to "size_t". */
2186 if (strlen_val[0]
2187 && size_type_node)
2189 tree new = convert (size_type_node, strlen_val[0]);
2191 /* If the result is not a valid gimple value, or not a cast
2192 of a valid gimple value, then we can not use the result. */
2193 if (is_gimple_val (new)
2194 || (is_gimple_cast (new)
2195 && is_gimple_val (TREE_OPERAND (new, 0))))
2196 return new;
2197 else
2198 return NULL_TREE;
2200 return strlen_val[0];
2201 case BUILT_IN_STRCPY:
2202 if (strlen_val[1]
2203 && is_gimple_val (strlen_val[1]))
2204 return simplify_builtin_strcpy (arglist, strlen_val[1]);
2205 case BUILT_IN_STRNCPY:
2206 if (strlen_val[1]
2207 && is_gimple_val (strlen_val[1]))
2208 return simplify_builtin_strncpy (arglist, strlen_val[1]);
2209 case BUILT_IN_FPUTS:
2210 return simplify_builtin_fputs (arglist,
2211 TREE_CODE (stmt) != MODIFY_EXPR, 0,
2212 strlen_val[0]);
2213 case BUILT_IN_FPUTS_UNLOCKED:
2214 return simplify_builtin_fputs (arglist,
2215 TREE_CODE (stmt) != MODIFY_EXPR, 1,
2216 strlen_val[0]);
2218 default:
2219 abort ();
2222 return NULL_TREE;
2226 /* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
2227 follow its use-def chains. If LENGTH is not NULL and its value is not
2228 equal to the length we determine, or if we are unable to determine the
2229 length, return false. VISITED is a bitmap of visited variables. */
2231 static bool
2232 get_strlen (tree arg, tree *length, bitmap visited)
2234 tree var, def_stmt, val;
2236 if (TREE_CODE (arg) != SSA_NAME)
2238 val = c_strlen (arg, 1);
2239 if (!val)
2240 return false;
2242 if (*length && simple_cst_equal (val, *length) != 1)
2243 return false;
2245 *length = val;
2246 return true;
2249 /* If we were already here, break the infinite cycle. */
2250 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2251 return true;
2252 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2254 var = arg;
2255 def_stmt = SSA_NAME_DEF_STMT (var);
2257 switch (TREE_CODE (def_stmt))
2259 case MODIFY_EXPR:
2261 tree len, rhs;
2263 /* The RHS of the statement defining VAR must either have a
2264 constant length or come from another SSA_NAME with a constant
2265 length. */
2266 rhs = TREE_OPERAND (def_stmt, 1);
2267 STRIP_NOPS (rhs);
2268 if (TREE_CODE (rhs) == SSA_NAME)
2269 return get_strlen (rhs, length, visited);
2271 /* See if the RHS is a constant length. */
2272 len = c_strlen (rhs, 1);
2273 if (len)
2275 if (*length && simple_cst_equal (len, *length) != 1)
2276 return false;
2278 *length = len;
2279 return true;
2282 break;
2285 case PHI_NODE:
2287 /* All the arguments of the PHI node must have the same constant
2288 length. */
2289 int i;
2291 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
2293 tree arg = PHI_ARG_DEF (def_stmt, i);
2295 /* If this PHI has itself as an argument, we cannot
2296 determine the string length of this argument. However,
2297 if we can find a constant string length for the other
2298 PHI args then we can still be sure that this is a
2299 constant string length. So be optimistic and just
2300 continue with the next argument. */
2301 if (arg == PHI_RESULT (def_stmt))
2302 continue;
2304 if (!get_strlen (arg, length, visited))
2305 return false;
2308 return true;
2311 default:
2312 break;
2316 return false;
2320 /* A simple pass that attempts to fold all builtin functions. This pass
2321 is run after we've propagated as many constants as we can. */
2323 static void
2324 execute_fold_all_builtins (void)
2326 basic_block bb;
2327 FOR_EACH_BB (bb)
2329 block_stmt_iterator i;
2330 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
2332 tree *stmtp = bsi_stmt_ptr (i);
2333 tree call = get_rhs (*stmtp);
2334 tree callee, result;
2336 if (!call || TREE_CODE (call) != CALL_EXPR)
2337 continue;
2338 callee = get_callee_fndecl (call);
2339 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2340 continue;
2342 result = ccp_fold_builtin (*stmtp, call);
2343 if (!result)
2344 switch (DECL_FUNCTION_CODE (callee))
2346 case BUILT_IN_CONSTANT_P:
2347 /* Resolve __builtin_constant_p. If it hasn't been
2348 folded to integer_one_node by now, it's fairly
2349 certain that the value simply isn't constant. */
2350 result = integer_zero_node;
2351 break;
2353 default:
2354 continue;
2357 if (dump_file && (dump_flags & TDF_DETAILS))
2359 fprintf (dump_file, "Simplified\n ");
2360 print_generic_stmt (dump_file, *stmtp, dump_flags);
2363 set_rhs (stmtp, result);
2364 modify_stmt (*stmtp);
2366 if (dump_file && (dump_flags & TDF_DETAILS))
2368 fprintf (dump_file, "to\n ");
2369 print_generic_stmt (dump_file, *stmtp, dump_flags);
2370 fprintf (dump_file, "\n");
2376 struct tree_opt_pass pass_fold_builtins =
2378 "fab", /* name */
2379 NULL, /* gate */
2380 execute_fold_all_builtins, /* execute */
2381 NULL, /* sub */
2382 NULL, /* next */
2383 0, /* static_pass_number */
2384 0, /* tv_id */
2385 PROP_cfg | PROP_ssa, /* properties_required */
2386 0, /* properties_provided */
2387 0, /* properties_destroyed */
2388 0, /* todo_flags_start */
2389 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
2393 #include "gt-tree-ssa-ccp.h"