* dwarf2out.c, fold-const.c, ipa-type-escape.c,
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob3edf3e7408b34cbe62f2a571e6d73b5fa9eaf63a
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
24 /* Conditional constant propagation (CCP) is based on the SSA
25 propagation engine (tree-ssa-propagate.c). Constant assignments of
26 the form VAR = CST are propagated from the assignments into uses of
27 VAR, which in turn may generate new constants. The simulation uses
28 a four level lattice to keep track of constant values associated
29 with SSA names. Given an SSA name V_i, it may take one of the
30 following values:
32 UNINITIALIZED -> This is the default starting value. V_i
33 has not been processed yet.
35 UNDEFINED -> V_i is a local variable whose definition
36 has not been processed yet. Therefore we
37 don't yet know if its value is a constant
38 or not.
40 CONSTANT -> V_i has been found to hold a constant
41 value C.
43 VARYING -> V_i cannot take a constant value, or if it
44 does, it is not possible to determine it
45 at compile time.
47 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
49 1- In ccp_visit_stmt, we are interested in assignments whose RHS
50 evaluates into a constant and conditional jumps whose predicate
51 evaluates into a boolean true or false. When an assignment of
52 the form V_i = CONST is found, V_i's lattice value is set to
53 CONSTANT and CONST is associated with it. This causes the
54 propagation engine to add all the SSA edges coming out the
55 assignment into the worklists, so that statements that use V_i
56 can be visited.
58 If the statement is a conditional with a constant predicate, we
59 mark the outgoing edges as executable or not executable
60 depending on the predicate's value. This is then used when
61 visiting PHI nodes to know when a PHI argument can be ignored.
64 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
65 same constant C, then the LHS of the PHI is set to C. This
66 evaluation is known as the "meet operation". Since one of the
67 goals of this evaluation is to optimistically return constant
68 values as often as possible, it uses two main short cuts:
70 - If an argument is flowing in through a non-executable edge, it
71 is ignored. This is useful in cases like this:
73 if (PRED)
74 a_9 = 3;
75 else
76 a_10 = 100;
77 a_11 = PHI (a_9, a_10)
79 If PRED is known to always evaluate to false, then we can
80 assume that a_11 will always take its value from a_10, meaning
81 that instead of consider it VARYING (a_9 and a_10 have
82 different values), we can consider it CONSTANT 100.
84 - If an argument has an UNDEFINED value, then it does not affect
85 the outcome of the meet operation. If a variable V_i has an
86 UNDEFINED value, it means that either its defining statement
87 hasn't been visited yet or V_i has no defining statement, in
88 which case the original symbol 'V' is being used
89 uninitialized. Since 'V' is a local variable, the compiler
90 may assume any initial value for it.
93 After propagation, every variable V_i that ends up with a lattice
94 value of CONSTANT will have the associated constant value in the
95 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
96 final substitution and folding.
99 Constant propagation in stores and loads (STORE-CCP)
100 ----------------------------------------------------
102 While CCP has all the logic to propagate constants in GIMPLE
103 registers, it is missing the ability to associate constants with
104 stores and loads (i.e., pointer dereferences, structures and
105 global/aliased variables). We don't keep loads and stores in
106 SSA, but we do build a factored use-def web for them (in the
107 virtual operands).
109 For instance, consider the following code fragment:
111 struct A a;
112 const int B = 42;
114 void foo (int i)
116 if (i > 10)
117 a.a = 42;
118 else
120 a.b = 21;
121 a.a = a.b + 21;
124 if (a.a != B)
125 never_executed ();
128 We should be able to deduce that the predicate 'a.a != B' is always
129 false. To achieve this, we associate constant values to the SSA
130 names in the V_MAY_DEF and V_MUST_DEF operands for each store.
131 Additionally, since we also glob partial loads/stores with the base
132 symbol, we also keep track of the memory reference where the
133 constant value was stored (in the MEM_REF field of PROP_VALUE_T).
134 For instance,
136 # a_5 = V_MAY_DEF <a_4>
137 a.a = 2;
139 # VUSE <a_5>
140 x_3 = a.b;
142 In the example above, CCP will associate value '2' with 'a_5', but
143 it would be wrong to replace the load from 'a.b' with '2', because
144 '2' had been stored into a.a.
146 To support STORE-CCP, it is necessary to add a new value to the
147 constant propagation lattice. When evaluating a load for a memory
148 reference we can no longer assume a value of UNDEFINED if we
149 haven't seen a preceding store to the same memory location.
150 Consider, for instance global variables:
152 int A;
154 foo (int i)
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
160 # VUSE <A_5>
161 A.0_6 = A;
163 return A.0_6;
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'. Therefore,
169 when doing STORE-CCP, we introduce a fifth lattice value
170 (UNKNOWN_VAL), which overrides any other value when computing the
171 meet operation in PHI nodes.
173 Though STORE-CCP is not too expensive, it does have to do more work
174 than regular CCP, so it is only enabled at -O2. Both regular CCP
175 and STORE-CCP use the exact same algorithm. The only distinction
176 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
177 set to true. This affects the evaluation of statements and PHI
178 nodes.
180 References:
182 Constant propagation with conditional branches,
183 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
185 Building an Optimizing Compiler,
186 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
188 Advanced Compiler Design and Implementation,
189 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
191 #include "config.h"
192 #include "system.h"
193 #include "coretypes.h"
194 #include "tm.h"
195 #include "tree.h"
196 #include "flags.h"
197 #include "rtl.h"
198 #include "tm_p.h"
199 #include "ggc.h"
200 #include "basic-block.h"
201 #include "output.h"
202 #include "expr.h"
203 #include "function.h"
204 #include "diagnostic.h"
205 #include "timevar.h"
206 #include "tree-dump.h"
207 #include "tree-flow.h"
208 #include "tree-pass.h"
209 #include "tree-ssa-propagate.h"
210 #include "langhooks.h"
211 #include "target.h"
214 /* Possible lattice values. */
215 typedef enum
217 UNINITIALIZED = 0,
218 UNDEFINED,
219 UNKNOWN_VAL,
220 CONSTANT,
221 VARYING
222 } ccp_lattice_t;
224 /* Array of propagated constant values. After propagation,
225 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
226 the constant is held in an SSA name representing a memory store
227 (i.e., a V_MAY_DEF or V_MUST_DEF), CONST_VAL[I].MEM_REF will
228 contain the actual memory reference used to store (i.e., the LHS of
229 the assignment doing the store). */
230 static prop_value_t *const_val;
232 /* True if we are also propagating constants in stores and loads. */
233 static bool do_store_ccp;
235 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
237 static void
238 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
240 switch (val.lattice_val)
242 case UNINITIALIZED:
243 fprintf (outf, "%sUNINITIALIZED", prefix);
244 break;
245 case UNDEFINED:
246 fprintf (outf, "%sUNDEFINED", prefix);
247 break;
248 case VARYING:
249 fprintf (outf, "%sVARYING", prefix);
250 break;
251 case UNKNOWN_VAL:
252 fprintf (outf, "%sUNKNOWN_VAL", prefix);
253 break;
254 case CONSTANT:
255 fprintf (outf, "%sCONSTANT ", prefix);
256 print_generic_expr (outf, val.value, dump_flags);
257 break;
258 default:
259 gcc_unreachable ();
264 /* Print lattice value VAL to stderr. */
266 void debug_lattice_value (prop_value_t val);
268 void
269 debug_lattice_value (prop_value_t val)
271 dump_lattice_value (stderr, "", val);
272 fprintf (stderr, "\n");
276 /* Compute a default value for variable VAR and store it in the
277 CONST_VAL array. The following rules are used to get default
278 values:
280 1- Global and static variables that are declared constant are
281 considered CONSTANT.
283 2- Any other value is considered UNDEFINED. This is useful when
284 considering PHI nodes. PHI arguments that are undefined do not
285 change the constant value of the PHI node, which allows for more
286 constants to be propagated.
288 3- If SSA_NAME_VALUE is set and it is a constant, its value is
289 used.
291 4- Variables defined by statements other than assignments and PHI
292 nodes are considered VARYING.
294 5- Variables that are not GIMPLE registers are considered
295 UNKNOWN_VAL, which is really a stronger version of UNDEFINED.
296 It's used to avoid the short circuit evaluation implied by
297 UNDEFINED in ccp_lattice_meet. */
299 static prop_value_t
300 get_default_value (tree var)
302 tree sym = SSA_NAME_VAR (var);
303 prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
305 if (!do_store_ccp && !is_gimple_reg (var))
307 /* Short circuit for regular CCP. We are not interested in any
308 non-register when DO_STORE_CCP is false. */
309 val.lattice_val = VARYING;
311 else if (SSA_NAME_VALUE (var)
312 && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
314 val.lattice_val = CONSTANT;
315 val.value = SSA_NAME_VALUE (var);
317 else if (TREE_STATIC (sym)
318 && TREE_READONLY (sym)
319 && DECL_INITIAL (sym)
320 && is_gimple_min_invariant (DECL_INITIAL (sym)))
322 /* Globals and static variables declared 'const' take their
323 initial value. */
324 val.lattice_val = CONSTANT;
325 val.value = DECL_INITIAL (sym);
326 val.mem_ref = sym;
328 else
330 tree stmt = SSA_NAME_DEF_STMT (var);
332 if (IS_EMPTY_STMT (stmt))
334 /* Variables defined by an empty statement are those used
335 before being initialized. If VAR is a local variable, we
336 can assume initially that it is UNDEFINED. If we are
337 doing STORE-CCP, function arguments and non-register
338 variables are initially UNKNOWN_VAL, because we cannot
339 discard the value incoming from outside of this function
340 (see ccp_lattice_meet for details). */
341 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
342 val.lattice_val = UNDEFINED;
343 else if (do_store_ccp)
344 val.lattice_val = UNKNOWN_VAL;
345 else
346 val.lattice_val = VARYING;
348 else if (TREE_CODE (stmt) == MODIFY_EXPR
349 || TREE_CODE (stmt) == PHI_NODE)
351 /* Any other variable defined by an assignment or a PHI node
352 is considered UNDEFINED (or UNKNOWN_VAL if VAR is not a
353 GIMPLE register). */
354 val.lattice_val = is_gimple_reg (sym) ? UNDEFINED : UNKNOWN_VAL;
356 else
358 /* Otherwise, VAR will never take on a constant value. */
359 val.lattice_val = VARYING;
363 return val;
367 /* Get the constant value associated with variable VAR. If
368 MAY_USE_DEFAULT_P is true, call get_default_value on variables that
369 have the lattice value UNINITIALIZED. */
371 static prop_value_t *
372 get_value (tree var, bool may_use_default_p)
374 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
375 if (may_use_default_p && val->lattice_val == UNINITIALIZED)
376 *val = get_default_value (var);
378 return val;
382 /* Set the value for variable VAR to NEW_VAL. Return true if the new
383 value is different from VAR's previous value. */
385 static bool
386 set_lattice_value (tree var, prop_value_t new_val)
388 prop_value_t *old_val = get_value (var, false);
390 /* Lattice transitions must always be monotonically increasing in
391 value. We allow two exceptions:
393 1- If *OLD_VAL and NEW_VAL are the same, return false to
394 inform the caller that this was a non-transition.
396 2- If we are doing store-ccp (i.e., DOING_STORE_CCP is true),
397 allow CONSTANT->UNKNOWN_VAL. The UNKNOWN_VAL state is a
398 special type of UNDEFINED state which prevents the short
399 circuit evaluation of PHI arguments (see ccp_visit_phi_node
400 and ccp_lattice_meet). */
401 gcc_assert (old_val->lattice_val <= new_val.lattice_val
402 || (old_val->lattice_val == new_val.lattice_val
403 && old_val->value == new_val.value
404 && old_val->mem_ref == new_val.mem_ref)
405 || (do_store_ccp
406 && old_val->lattice_val == CONSTANT
407 && new_val.lattice_val == UNKNOWN_VAL));
409 if (old_val->lattice_val != new_val.lattice_val)
411 if (dump_file && (dump_flags & TDF_DETAILS))
413 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
414 fprintf (dump_file, ". %sdding SSA edges to worklist.\n",
415 new_val.lattice_val != UNDEFINED ? "A" : "Not a");
418 *old_val = new_val;
420 /* Transitions UNINITIALIZED -> UNDEFINED are never interesting
421 for propagation purposes. In these cases return false to
422 avoid doing useless work. */
423 return (new_val.lattice_val != UNDEFINED);
426 return false;
430 /* Return the likely CCP lattice value for STMT.
432 If STMT has no operands, then return CONSTANT.
434 Else if any operands of STMT are undefined, then return UNDEFINED.
436 Else if any operands of STMT are constants, then return CONSTANT.
438 Else return VARYING. */
440 static ccp_lattice_t
441 likely_value (tree stmt)
443 bool found_constant;
444 stmt_ann_t ann;
445 tree use;
446 ssa_op_iter iter;
448 ann = stmt_ann (stmt);
450 /* If the statement has volatile operands, it won't fold to a
451 constant value. */
452 if (ann->has_volatile_ops)
453 return VARYING;
455 /* If we are not doing store-ccp, statements with loads
456 and/or stores will never fold into a constant. */
457 if (!do_store_ccp
458 && (ann->makes_aliased_stores
459 || ann->makes_aliased_loads
460 || !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)))
461 return VARYING;
464 /* A CALL_EXPR is assumed to be varying. NOTE: This may be overly
465 conservative, in the presence of const and pure calls. */
466 if (get_call_expr_in (stmt) != NULL_TREE)
467 return VARYING;
469 /* Anything other than assignments and conditional jumps are not
470 interesting for CCP. */
471 if (TREE_CODE (stmt) != MODIFY_EXPR
472 && TREE_CODE (stmt) != COND_EXPR
473 && TREE_CODE (stmt) != SWITCH_EXPR)
474 return VARYING;
476 found_constant = false;
477 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
479 prop_value_t *val = get_value (use, true);
481 if (val->lattice_val == VARYING)
482 return VARYING;
484 if (val->lattice_val == UNKNOWN_VAL)
486 /* UNKNOWN_VAL is invalid when not doing STORE-CCP. */
487 gcc_assert (do_store_ccp);
488 return UNKNOWN_VAL;
491 if (val->lattice_val == CONSTANT)
492 found_constant = true;
495 if (found_constant
496 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE)
497 || ZERO_SSA_OPERANDS (stmt, SSA_OP_VUSE))
498 return CONSTANT;
500 return UNDEFINED;
504 /* Initialize local data structures for CCP. */
506 static void
507 ccp_initialize (void)
509 basic_block bb;
511 const_val = xmalloc (num_ssa_names * sizeof (*const_val));
512 memset (const_val, 0, num_ssa_names * sizeof (*const_val));
514 /* Initialize simulation flags for PHI nodes and statements. */
515 FOR_EACH_BB (bb)
517 block_stmt_iterator i;
519 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
521 bool is_varying = false;
522 tree stmt = bsi_stmt (i);
524 if (likely_value (stmt) == VARYING)
527 tree def;
528 ssa_op_iter iter;
530 /* If the statement will not produce a constant, mark
531 all its outputs VARYING. */
532 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
533 get_value (def, false)->lattice_val = VARYING;
535 /* Never mark conditional jumps with DONT_SIMULATE_AGAIN,
536 otherwise the propagator will never add the outgoing
537 control edges. */
538 if (TREE_CODE (stmt) != COND_EXPR
539 && TREE_CODE (stmt) != SWITCH_EXPR)
540 is_varying = true;
543 DONT_SIMULATE_AGAIN (stmt) = is_varying;
547 /* Now process PHI nodes. */
548 FOR_EACH_BB (bb)
550 tree phi;
552 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
554 int i;
555 tree arg;
556 prop_value_t *val = get_value (PHI_RESULT (phi), false);
558 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
560 arg = PHI_ARG_DEF (phi, i);
562 if (TREE_CODE (arg) == SSA_NAME
563 && get_value (arg, false)->lattice_val == VARYING)
565 val->lattice_val = VARYING;
566 break;
570 DONT_SIMULATE_AGAIN (phi) = (val->lattice_val == VARYING);
576 /* Do final substitution of propagated values, cleanup the flowgraph and
577 free allocated storage. */
579 static void
580 ccp_finalize (void)
582 /* Perform substitutions based on the known constant values. */
583 substitute_and_fold (const_val, false);
585 free (const_val);
589 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
590 in VAL1.
592 any M UNDEFINED = any
593 any M UNKNOWN_VAL = UNKNOWN_VAL
594 any M VARYING = VARYING
595 Ci M Cj = Ci if (i == j)
596 Ci M Cj = VARYING if (i != j)
598 Lattice values UNKNOWN_VAL and UNDEFINED are similar but have
599 different semantics at PHI nodes. Both values imply that we don't
600 know whether the variable is constant or not. However, UNKNOWN_VAL
601 values override all others. For instance, suppose that A is a
602 global variable:
604 +------+
606 | / \
607 | / \
608 | | A_1 = 4
609 | \ /
610 | \ /
611 | A_3 = PHI (A_2, A_1)
612 | ... = A_3
614 +----+
616 If the edge into A_2 is not executable, the first visit to A_3 will
617 yield the constant 4. But the second visit to A_3 will be with A_2
618 in state UNKNOWN_VAL. We can no longer conclude that A_3 is 4
619 because A_2 may have been set in another function. If we had used
620 the lattice value UNDEFINED, we would have had wrongly concluded
621 that A_3 is 4. */
624 static void
625 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
627 if (val1->lattice_val == UNDEFINED)
629 /* UNDEFINED M any = any */
630 *val1 = *val2;
632 else if (val2->lattice_val == UNDEFINED)
634 /* any M UNDEFINED = any
635 Nothing to do. VAL1 already contains the value we want. */
638 else if (val1->lattice_val == UNKNOWN_VAL
639 || val2->lattice_val == UNKNOWN_VAL)
641 /* UNKNOWN_VAL values are invalid if we are not doing STORE-CCP. */
642 gcc_assert (do_store_ccp);
644 /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
645 val1->lattice_val = UNKNOWN_VAL;
646 val1->value = NULL_TREE;
647 val1->mem_ref = NULL_TREE;
649 else if (val1->lattice_val == VARYING
650 || val2->lattice_val == VARYING)
652 /* any M VARYING = VARYING. */
653 val1->lattice_val = VARYING;
654 val1->value = NULL_TREE;
655 val1->mem_ref = NULL_TREE;
657 else if (val1->lattice_val == CONSTANT
658 && val2->lattice_val == CONSTANT
659 && simple_cst_equal (val1->value, val2->value) == 1
660 && (!do_store_ccp
661 || simple_cst_equal (val1->mem_ref, val2->mem_ref) == 1))
663 /* Ci M Cj = Ci if (i == j)
664 Ci M Cj = VARYING if (i != j)
666 If these two values come from memory stores, make sure that
667 they come from the same memory reference. */
668 val1->lattice_val = CONSTANT;
669 val1->value = val1->value;
670 val1->mem_ref = val1->mem_ref;
672 else
674 /* Any other combination is VARYING. */
675 val1->lattice_val = VARYING;
676 val1->value = NULL_TREE;
677 val1->mem_ref = NULL_TREE;
682 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
683 lattice values to determine PHI_NODE's lattice value. The value of a
684 PHI node is determined calling ccp_lattice_meet with all the arguments
685 of the PHI node that are incoming via executable edges. */
687 static enum ssa_prop_result
688 ccp_visit_phi_node (tree phi)
690 int i;
691 prop_value_t *old_val, new_val;
693 if (dump_file && (dump_flags & TDF_DETAILS))
695 fprintf (dump_file, "\nVisiting PHI node: ");
696 print_generic_expr (dump_file, phi, dump_flags);
699 old_val = get_value (PHI_RESULT (phi), false);
700 switch (old_val->lattice_val)
702 case VARYING:
703 return SSA_PROP_VARYING;
705 case CONSTANT:
706 new_val = *old_val;
707 break;
709 case UNKNOWN_VAL:
710 /* To avoid the default value of UNKNOWN_VAL overriding
711 that of its possible constant arguments, temporarily
712 set the PHI node's default lattice value to be
713 UNDEFINED. If the PHI node's old value was UNKNOWN_VAL and
714 the new value is UNDEFINED, then we prevent the invalid
715 transition by not calling set_lattice_value. */
716 gcc_assert (do_store_ccp);
718 /* FALLTHRU */
720 case UNDEFINED:
721 case UNINITIALIZED:
722 new_val.lattice_val = UNDEFINED;
723 new_val.value = NULL_TREE;
724 new_val.mem_ref = NULL_TREE;
725 break;
727 default:
728 gcc_unreachable ();
731 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
733 /* Compute the meet operator over all the PHI arguments flowing
734 through executable edges. */
735 edge e = PHI_ARG_EDGE (phi, i);
737 if (dump_file && (dump_flags & TDF_DETAILS))
739 fprintf (dump_file,
740 "\n Argument #%d (%d -> %d %sexecutable)\n",
741 i, e->src->index, e->dest->index,
742 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
745 /* If the incoming edge is executable, Compute the meet operator for
746 the existing value of the PHI node and the current PHI argument. */
747 if (e->flags & EDGE_EXECUTABLE)
749 tree arg = PHI_ARG_DEF (phi, i);
750 prop_value_t arg_val;
752 if (is_gimple_min_invariant (arg))
754 arg_val.lattice_val = CONSTANT;
755 arg_val.value = arg;
756 arg_val.mem_ref = NULL_TREE;
758 else
759 arg_val = *(get_value (arg, true));
761 ccp_lattice_meet (&new_val, &arg_val);
763 if (dump_file && (dump_flags & TDF_DETAILS))
765 fprintf (dump_file, "\t");
766 print_generic_expr (dump_file, arg, dump_flags);
767 dump_lattice_value (dump_file, "\tValue: ", arg_val);
768 fprintf (dump_file, "\n");
771 if (new_val.lattice_val == VARYING)
772 break;
776 if (dump_file && (dump_flags & TDF_DETAILS))
778 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
779 fprintf (dump_file, "\n\n");
782 /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
783 if (do_store_ccp
784 && old_val->lattice_val == UNKNOWN_VAL
785 && new_val.lattice_val == UNDEFINED)
786 return SSA_PROP_NOT_INTERESTING;
788 /* Otherwise, make the transition to the new value. */
789 if (set_lattice_value (PHI_RESULT (phi), new_val))
791 if (new_val.lattice_val == VARYING)
792 return SSA_PROP_VARYING;
793 else
794 return SSA_PROP_INTERESTING;
796 else
797 return SSA_PROP_NOT_INTERESTING;
801 /* CCP specific front-end to the non-destructive constant folding
802 routines.
804 Attempt to simplify the RHS of STMT knowing that one or more
805 operands are constants.
807 If simplification is possible, return the simplified RHS,
808 otherwise return the original RHS. */
810 static tree
811 ccp_fold (tree stmt)
813 tree rhs = get_rhs (stmt);
814 enum tree_code code = TREE_CODE (rhs);
815 enum tree_code_class kind = TREE_CODE_CLASS (code);
816 tree retval = NULL_TREE;
818 if (TREE_CODE (rhs) == SSA_NAME)
820 /* If the RHS is an SSA_NAME, return its known constant value,
821 if any. */
822 return get_value (rhs, true)->value;
824 else if (do_store_ccp && stmt_makes_single_load (stmt))
826 /* If the RHS is a memory load, see if the VUSEs associated with
827 it are a valid constant for that memory load. */
828 prop_value_t *val = get_value_loaded_by (stmt, const_val);
829 if (val && simple_cst_equal (val->mem_ref, rhs) == 1)
830 return val->value;
831 else
832 return NULL_TREE;
835 /* Unary operators. Note that we know the single operand must
836 be a constant. So this should almost always return a
837 simplified RHS. */
838 if (kind == tcc_unary)
840 /* Handle unary operators which can appear in GIMPLE form. */
841 tree op0 = TREE_OPERAND (rhs, 0);
843 /* Simplify the operand down to a constant. */
844 if (TREE_CODE (op0) == SSA_NAME)
846 prop_value_t *val = get_value (op0, true);
847 if (val->lattice_val == CONSTANT)
848 op0 = get_value (op0, true)->value;
851 return fold_unary (code, TREE_TYPE (rhs), op0);
854 /* Binary and comparison operators. We know one or both of the
855 operands are constants. */
856 else if (kind == tcc_binary
857 || kind == tcc_comparison
858 || code == TRUTH_AND_EXPR
859 || code == TRUTH_OR_EXPR
860 || code == TRUTH_XOR_EXPR)
862 /* Handle binary and comparison operators that can appear in
863 GIMPLE form. */
864 tree op0 = TREE_OPERAND (rhs, 0);
865 tree op1 = TREE_OPERAND (rhs, 1);
867 /* Simplify the operands down to constants when appropriate. */
868 if (TREE_CODE (op0) == SSA_NAME)
870 prop_value_t *val = get_value (op0, true);
871 if (val->lattice_val == CONSTANT)
872 op0 = val->value;
875 if (TREE_CODE (op1) == SSA_NAME)
877 prop_value_t *val = get_value (op1, true);
878 if (val->lattice_val == CONSTANT)
879 op1 = val->value;
882 return fold_binary (code, TREE_TYPE (rhs), op0, op1);
885 /* We may be able to fold away calls to builtin functions if their
886 arguments are constants. */
887 else if (code == CALL_EXPR
888 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
889 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
890 == FUNCTION_DECL)
891 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
893 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
895 tree *orig, var;
896 tree fndecl, arglist;
897 size_t i = 0;
898 ssa_op_iter iter;
899 use_operand_p var_p;
901 /* Preserve the original values of every operand. */
902 orig = xmalloc (sizeof (tree) * NUM_SSA_OPERANDS (stmt, SSA_OP_USE));
903 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
904 orig[i++] = var;
906 /* Substitute operands with their values and try to fold. */
907 replace_uses_in (stmt, NULL, const_val);
908 fndecl = get_callee_fndecl (rhs);
909 arglist = TREE_OPERAND (rhs, 1);
910 retval = fold_builtin (fndecl, arglist, false);
912 /* Restore operands to their original form. */
913 i = 0;
914 FOR_EACH_SSA_USE_OPERAND (var_p, stmt, iter, SSA_OP_USE)
915 SET_USE (var_p, orig[i++]);
916 free (orig);
919 else
920 return rhs;
922 /* If we got a simplified form, see if we need to convert its type. */
923 if (retval)
924 return fold_convert (TREE_TYPE (rhs), retval);
926 /* No simplification was possible. */
927 return rhs;
931 /* Return the tree representing the element referenced by T if T is an
932 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
933 NULL_TREE otherwise. */
935 static tree
936 fold_const_aggregate_ref (tree t)
938 prop_value_t *value;
939 tree base, ctor, idx, field;
940 unsigned HOST_WIDE_INT cnt;
941 tree cfield, cval;
943 switch (TREE_CODE (t))
945 case ARRAY_REF:
946 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
947 DECL_INITIAL. If BASE is a nested reference into another
948 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
949 the inner reference. */
950 base = TREE_OPERAND (t, 0);
951 switch (TREE_CODE (base))
953 case VAR_DECL:
954 if (!TREE_READONLY (base)
955 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
956 || !targetm.binds_local_p (base))
957 return NULL_TREE;
959 ctor = DECL_INITIAL (base);
960 break;
962 case ARRAY_REF:
963 case COMPONENT_REF:
964 ctor = fold_const_aggregate_ref (base);
965 break;
967 default:
968 return NULL_TREE;
971 if (ctor == NULL_TREE
972 || TREE_CODE (ctor) != CONSTRUCTOR
973 || !TREE_STATIC (ctor))
974 return NULL_TREE;
976 /* Get the index. If we have an SSA_NAME, try to resolve it
977 with the current lattice value for the SSA_NAME. */
978 idx = TREE_OPERAND (t, 1);
979 switch (TREE_CODE (idx))
981 case SSA_NAME:
982 if ((value = get_value (idx, true))
983 && value->lattice_val == CONSTANT
984 && TREE_CODE (value->value) == INTEGER_CST)
985 idx = value->value;
986 else
987 return NULL_TREE;
988 break;
990 case INTEGER_CST:
991 break;
993 default:
994 return NULL_TREE;
997 /* Whoo-hoo! I'll fold ya baby. Yeah! */
998 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
999 if (tree_int_cst_equal (cfield, idx))
1000 return cval;
1001 break;
1003 case COMPONENT_REF:
1004 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1005 DECL_INITIAL. If BASE is a nested reference into another
1006 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1007 the inner reference. */
1008 base = TREE_OPERAND (t, 0);
1009 switch (TREE_CODE (base))
1011 case VAR_DECL:
1012 if (!TREE_READONLY (base)
1013 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1014 || !targetm.binds_local_p (base))
1015 return NULL_TREE;
1017 ctor = DECL_INITIAL (base);
1018 break;
1020 case ARRAY_REF:
1021 case COMPONENT_REF:
1022 ctor = fold_const_aggregate_ref (base);
1023 break;
1025 default:
1026 return NULL_TREE;
1029 if (ctor == NULL_TREE
1030 || TREE_CODE (ctor) != CONSTRUCTOR
1031 || !TREE_STATIC (ctor))
1032 return NULL_TREE;
1034 field = TREE_OPERAND (t, 1);
1036 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1037 if (cfield == field
1038 /* FIXME: Handle bit-fields. */
1039 && ! DECL_BIT_FIELD (cfield))
1040 return cval;
1041 break;
1043 default:
1044 break;
1047 return NULL_TREE;
1050 /* Evaluate statement STMT. */
1052 static prop_value_t
1053 evaluate_stmt (tree stmt)
1055 prop_value_t val;
1056 tree simplified;
1057 ccp_lattice_t likelyvalue = likely_value (stmt);
1059 val.mem_ref = NULL_TREE;
1061 /* If the statement is likely to have a CONSTANT result, then try
1062 to fold the statement to determine the constant value. */
1063 if (likelyvalue == CONSTANT)
1064 simplified = ccp_fold (stmt);
1065 /* If the statement is likely to have a VARYING result, then do not
1066 bother folding the statement. */
1067 else if (likelyvalue == VARYING)
1068 simplified = get_rhs (stmt);
1069 /* If the statement is an ARRAY_REF or COMPONENT_REF into constant
1070 aggregates, extract the referenced constant. Otherwise the
1071 statement is likely to have an UNDEFINED value, and there will be
1072 nothing to do. Note that fold_const_aggregate_ref returns
1073 NULL_TREE if the first case does not match. */
1074 else
1075 simplified = fold_const_aggregate_ref (get_rhs (stmt));
1077 if (simplified && is_gimple_min_invariant (simplified))
1079 /* The statement produced a constant value. */
1080 val.lattice_val = CONSTANT;
1081 val.value = simplified;
1083 else
1085 /* The statement produced a nonconstant value. If the statement
1086 had UNDEFINED operands, then the result of the statement
1087 should be UNDEFINED. Otherwise, the statement is VARYING. */
1088 val.lattice_val = (likelyvalue == UNDEFINED) ? UNDEFINED : VARYING;
1089 val.value = NULL_TREE;
1092 return val;
1096 /* Visit the assignment statement STMT. Set the value of its LHS to the
1097 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1098 creates virtual definitions, set the value of each new name to that
1099 of the RHS (if we can derive a constant out of the RHS). */
1101 static enum ssa_prop_result
1102 visit_assignment (tree stmt, tree *output_p)
1104 prop_value_t val;
1105 tree lhs, rhs;
1106 enum ssa_prop_result retval;
1108 lhs = TREE_OPERAND (stmt, 0);
1109 rhs = TREE_OPERAND (stmt, 1);
1111 if (TREE_CODE (rhs) == SSA_NAME)
1113 /* For a simple copy operation, we copy the lattice values. */
1114 prop_value_t *nval = get_value (rhs, true);
1115 val = *nval;
1117 else if (do_store_ccp && stmt_makes_single_load (stmt))
1119 /* Same as above, but the RHS is not a gimple register and yet
1120 has a known VUSE. If STMT is loading from the same memory
1121 location that created the SSA_NAMEs for the virtual operands,
1122 we can propagate the value on the RHS. */
1123 prop_value_t *nval = get_value_loaded_by (stmt, const_val);
1125 if (nval && simple_cst_equal (nval->mem_ref, rhs) == 1)
1126 val = *nval;
1127 else
1128 val = evaluate_stmt (stmt);
1130 else
1131 /* Evaluate the statement. */
1132 val = evaluate_stmt (stmt);
1134 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
1135 value to be a VIEW_CONVERT_EXPR of the old constant value.
1137 ??? Also, if this was a definition of a bitfield, we need to widen
1138 the constant value into the type of the destination variable. This
1139 should not be necessary if GCC represented bitfields properly. */
1141 tree orig_lhs = TREE_OPERAND (stmt, 0);
1143 if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
1144 && val.lattice_val == CONSTANT)
1146 tree w = fold_build1 (VIEW_CONVERT_EXPR,
1147 TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
1148 val.value);
1150 orig_lhs = TREE_OPERAND (orig_lhs, 0);
1151 if (w && is_gimple_min_invariant (w))
1152 val.value = w;
1153 else
1155 val.lattice_val = VARYING;
1156 val.value = NULL;
1160 if (val.lattice_val == CONSTANT
1161 && TREE_CODE (orig_lhs) == COMPONENT_REF
1162 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
1164 tree w = widen_bitfield (val.value, TREE_OPERAND (orig_lhs, 1),
1165 orig_lhs);
1167 if (w && is_gimple_min_invariant (w))
1168 val.value = w;
1169 else
1171 val.lattice_val = VARYING;
1172 val.value = NULL_TREE;
1173 val.mem_ref = NULL_TREE;
1178 retval = SSA_PROP_NOT_INTERESTING;
1180 /* Set the lattice value of the statement's output. */
1181 if (TREE_CODE (lhs) == SSA_NAME)
1183 /* If STMT is an assignment to an SSA_NAME, we only have one
1184 value to set. */
1185 if (set_lattice_value (lhs, val))
1187 *output_p = lhs;
1188 if (val.lattice_val == VARYING)
1189 retval = SSA_PROP_VARYING;
1190 else
1191 retval = SSA_PROP_INTERESTING;
1194 else if (do_store_ccp && stmt_makes_single_store (stmt))
1196 /* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
1197 to the new constant value and mark the LHS as the memory
1198 reference associated with VAL. */
1199 ssa_op_iter i;
1200 tree vdef;
1201 bool changed;
1203 /* Stores cannot take on an UNDEFINED value. */
1204 if (val.lattice_val == UNDEFINED)
1205 val.lattice_val = UNKNOWN_VAL;
1207 /* Mark VAL as stored in the LHS of this assignment. */
1208 val.mem_ref = lhs;
1210 /* Set the value of every VDEF to VAL. */
1211 changed = false;
1212 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
1213 changed |= set_lattice_value (vdef, val);
1215 /* Note that for propagation purposes, we are only interested in
1216 visiting statements that load the exact same memory reference
1217 stored here. Those statements will have the exact same list
1218 of virtual uses, so it is enough to set the output of this
1219 statement to be its first virtual definition. */
1220 *output_p = first_vdef (stmt);
1221 if (changed)
1223 if (val.lattice_val == VARYING)
1224 retval = SSA_PROP_VARYING;
1225 else
1226 retval = SSA_PROP_INTERESTING;
1230 return retval;
1234 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1235 if it can determine which edge will be taken. Otherwise, return
1236 SSA_PROP_VARYING. */
1238 static enum ssa_prop_result
1239 visit_cond_stmt (tree stmt, edge *taken_edge_p)
1241 prop_value_t val;
1242 basic_block block;
1244 block = bb_for_stmt (stmt);
1245 val = evaluate_stmt (stmt);
1247 /* Find which edge out of the conditional block will be taken and add it
1248 to the worklist. If no single edge can be determined statically,
1249 return SSA_PROP_VARYING to feed all the outgoing edges to the
1250 propagation engine. */
1251 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1252 if (*taken_edge_p)
1253 return SSA_PROP_INTERESTING;
1254 else
1255 return SSA_PROP_VARYING;
1259 /* Evaluate statement STMT. If the statement produces an output value and
1260 its evaluation changes the lattice value of its output, return
1261 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1262 output value.
1264 If STMT is a conditional branch and we can determine its truth
1265 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1266 value, return SSA_PROP_VARYING. */
1268 static enum ssa_prop_result
1269 ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
1271 tree def;
1272 ssa_op_iter iter;
1274 if (dump_file && (dump_flags & TDF_DETAILS))
1276 fprintf (dump_file, "\nVisiting statement:\n");
1277 print_generic_stmt (dump_file, stmt, dump_flags);
1278 fprintf (dump_file, "\n");
1281 if (TREE_CODE (stmt) == MODIFY_EXPR)
1283 /* If the statement is an assignment that produces a single
1284 output value, evaluate its RHS to see if the lattice value of
1285 its output has changed. */
1286 return visit_assignment (stmt, output_p);
1288 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
1290 /* If STMT is a conditional branch, see if we can determine
1291 which branch will be taken. */
1292 return visit_cond_stmt (stmt, taken_edge_p);
1295 /* Any other kind of statement is not interesting for constant
1296 propagation and, therefore, not worth simulating. */
1297 if (dump_file && (dump_flags & TDF_DETAILS))
1298 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1300 /* Definitions made by statements other than assignments to
1301 SSA_NAMEs represent unknown modifications to their outputs.
1302 Mark them VARYING. */
1303 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1305 prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
1306 set_lattice_value (def, v);
1309 return SSA_PROP_VARYING;
1313 /* Main entry point for SSA Conditional Constant Propagation. */
1315 static void
1316 execute_ssa_ccp (bool store_ccp)
1318 do_store_ccp = store_ccp;
1319 ccp_initialize ();
1320 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1321 ccp_finalize ();
1325 static void
1326 do_ssa_ccp (void)
1328 execute_ssa_ccp (false);
1332 static bool
1333 gate_ccp (void)
1335 return flag_tree_ccp != 0;
1339 struct tree_opt_pass pass_ccp =
1341 "ccp", /* name */
1342 gate_ccp, /* gate */
1343 do_ssa_ccp, /* execute */
1344 NULL, /* sub */
1345 NULL, /* next */
1346 0, /* static_pass_number */
1347 TV_TREE_CCP, /* tv_id */
1348 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1349 0, /* properties_provided */
1350 0, /* properties_destroyed */
1351 0, /* todo_flags_start */
1352 TODO_cleanup_cfg | TODO_dump_func | TODO_update_ssa
1353 | TODO_ggc_collect | TODO_verify_ssa
1354 | TODO_verify_stmts, /* todo_flags_finish */
1355 0 /* letter */
1359 static void
1360 do_ssa_store_ccp (void)
1362 /* If STORE-CCP is not enabled, we just run regular CCP. */
1363 execute_ssa_ccp (flag_tree_store_ccp != 0);
1366 static bool
1367 gate_store_ccp (void)
1369 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1370 -fno-tree-store-ccp is specified, we should run regular CCP.
1371 That's why the pass is enabled with either flag. */
1372 return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
1376 struct tree_opt_pass pass_store_ccp =
1378 "store_ccp", /* name */
1379 gate_store_ccp, /* gate */
1380 do_ssa_store_ccp, /* execute */
1381 NULL, /* sub */
1382 NULL, /* next */
1383 0, /* static_pass_number */
1384 TV_TREE_STORE_CCP, /* tv_id */
1385 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1386 0, /* properties_provided */
1387 0, /* properties_destroyed */
1388 0, /* todo_flags_start */
1389 TODO_dump_func | TODO_update_ssa
1390 | TODO_ggc_collect | TODO_verify_ssa
1391 | TODO_cleanup_cfg
1392 | TODO_verify_stmts, /* todo_flags_finish */
1393 0 /* letter */
1396 /* Given a constant value VAL for bitfield FIELD, and a destination
1397 variable VAR, return VAL appropriately widened to fit into VAR. If
1398 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1400 tree
1401 widen_bitfield (tree val, tree field, tree var)
1403 unsigned HOST_WIDE_INT var_size, field_size;
1404 tree wide_val;
1405 unsigned HOST_WIDE_INT mask;
1406 unsigned int i;
1408 /* We can only do this if the size of the type and field and VAL are
1409 all constants representable in HOST_WIDE_INT. */
1410 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1)
1411 || !host_integerp (DECL_SIZE (field), 1)
1412 || !host_integerp (val, 0))
1413 return NULL_TREE;
1415 var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1);
1416 field_size = tree_low_cst (DECL_SIZE (field), 1);
1418 /* Give up if either the bitfield or the variable are too wide. */
1419 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1420 return NULL_TREE;
1422 gcc_assert (var_size >= field_size);
1424 /* If the sign bit of the value is not set or the field's type is unsigned,
1425 just mask off the high order bits of the value. */
1426 if (DECL_UNSIGNED (field)
1427 || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1))))
1429 /* Zero extension. Build a mask with the lower 'field_size' bits
1430 set and a BIT_AND_EXPR node to clear the high order bits of
1431 the value. */
1432 for (i = 0, mask = 0; i < field_size; i++)
1433 mask |= ((HOST_WIDE_INT) 1) << i;
1435 wide_val = build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
1436 build_int_cst (TREE_TYPE (var), mask));
1438 else
1440 /* Sign extension. Create a mask with the upper 'field_size'
1441 bits set and a BIT_IOR_EXPR to set the high order bits of the
1442 value. */
1443 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1444 mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
1446 wide_val = build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
1447 build_int_cst (TREE_TYPE (var), mask));
1450 return fold (wide_val);
1454 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1455 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1456 is the desired result type. */
1458 static tree
1459 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1461 tree min_idx, idx, elt_offset = integer_zero_node;
1462 tree array_type, elt_type, elt_size;
1464 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1465 measured in units of the size of elements type) from that ARRAY_REF).
1466 We can't do anything if either is variable.
1468 The case we handle here is *(&A[N]+O). */
1469 if (TREE_CODE (base) == ARRAY_REF)
1471 tree low_bound = array_ref_low_bound (base);
1473 elt_offset = TREE_OPERAND (base, 1);
1474 if (TREE_CODE (low_bound) != INTEGER_CST
1475 || TREE_CODE (elt_offset) != INTEGER_CST)
1476 return NULL_TREE;
1478 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1479 base = TREE_OPERAND (base, 0);
1482 /* Ignore stupid user tricks of indexing non-array variables. */
1483 array_type = TREE_TYPE (base);
1484 if (TREE_CODE (array_type) != ARRAY_TYPE)
1485 return NULL_TREE;
1486 elt_type = TREE_TYPE (array_type);
1487 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1488 return NULL_TREE;
1490 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1491 element type (so we can use the alignment if it's not constant).
1492 Otherwise, compute the offset as an index by using a division. If the
1493 division isn't exact, then don't do anything. */
1494 elt_size = TYPE_SIZE_UNIT (elt_type);
1495 if (integer_zerop (offset))
1497 if (TREE_CODE (elt_size) != INTEGER_CST)
1498 elt_size = size_int (TYPE_ALIGN (elt_type));
1500 idx = integer_zero_node;
1502 else
1504 unsigned HOST_WIDE_INT lquo, lrem;
1505 HOST_WIDE_INT hquo, hrem;
1507 if (TREE_CODE (elt_size) != INTEGER_CST
1508 || div_and_round_double (TRUNC_DIV_EXPR, 1,
1509 TREE_INT_CST_LOW (offset),
1510 TREE_INT_CST_HIGH (offset),
1511 TREE_INT_CST_LOW (elt_size),
1512 TREE_INT_CST_HIGH (elt_size),
1513 &lquo, &hquo, &lrem, &hrem)
1514 || lrem || hrem)
1515 return NULL_TREE;
1517 idx = build_int_cst_wide (NULL_TREE, lquo, hquo);
1520 /* Assume the low bound is zero. If there is a domain type, get the
1521 low bound, if any, convert the index into that type, and add the
1522 low bound. */
1523 min_idx = integer_zero_node;
1524 if (TYPE_DOMAIN (array_type))
1526 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
1527 min_idx = TYPE_MIN_VALUE (TYPE_DOMAIN (array_type));
1528 else
1529 min_idx = fold_convert (TYPE_DOMAIN (array_type), min_idx);
1531 if (TREE_CODE (min_idx) != INTEGER_CST)
1532 return NULL_TREE;
1534 idx = fold_convert (TYPE_DOMAIN (array_type), idx);
1535 elt_offset = fold_convert (TYPE_DOMAIN (array_type), elt_offset);
1538 if (!integer_zerop (min_idx))
1539 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1540 if (!integer_zerop (elt_offset))
1541 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1543 return build (ARRAY_REF, orig_type, base, idx, min_idx,
1544 size_int (tree_low_cst (elt_size, 1)
1545 / (TYPE_ALIGN_UNIT (elt_type))));
1549 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1550 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1551 is the desired result type. */
1552 /* ??? This doesn't handle class inheritance. */
1554 static tree
1555 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1556 tree orig_type, bool base_is_ptr)
1558 tree f, t, field_type, tail_array_field, field_offset;
1560 if (TREE_CODE (record_type) != RECORD_TYPE
1561 && TREE_CODE (record_type) != UNION_TYPE
1562 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1563 return NULL_TREE;
1565 /* Short-circuit silly cases. */
1566 if (lang_hooks.types_compatible_p (record_type, orig_type))
1567 return NULL_TREE;
1569 tail_array_field = NULL_TREE;
1570 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1572 int cmp;
1574 if (TREE_CODE (f) != FIELD_DECL)
1575 continue;
1576 if (DECL_BIT_FIELD (f))
1577 continue;
1579 field_offset = byte_position (f);
1580 if (TREE_CODE (field_offset) != INTEGER_CST)
1581 continue;
1583 /* ??? Java creates "interesting" fields for representing base classes.
1584 They have no name, and have no context. With no context, we get into
1585 trouble with nonoverlapping_component_refs_p. Skip them. */
1586 if (!DECL_FIELD_CONTEXT (f))
1587 continue;
1589 /* The previous array field isn't at the end. */
1590 tail_array_field = NULL_TREE;
1592 /* Check to see if this offset overlaps with the field. */
1593 cmp = tree_int_cst_compare (field_offset, offset);
1594 if (cmp > 0)
1595 continue;
1597 field_type = TREE_TYPE (f);
1599 /* Here we exactly match the offset being checked. If the types match,
1600 then we can return that field. */
1601 if (cmp == 0
1602 && lang_hooks.types_compatible_p (orig_type, field_type))
1604 if (base_is_ptr)
1605 base = build1 (INDIRECT_REF, record_type, base);
1606 t = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
1607 return t;
1610 /* Don't care about offsets into the middle of scalars. */
1611 if (!AGGREGATE_TYPE_P (field_type))
1612 continue;
1614 /* Check for array at the end of the struct. This is often
1615 used as for flexible array members. We should be able to
1616 turn this into an array access anyway. */
1617 if (TREE_CODE (field_type) == ARRAY_TYPE)
1618 tail_array_field = f;
1620 /* Check the end of the field against the offset. */
1621 if (!DECL_SIZE_UNIT (f)
1622 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1623 continue;
1624 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1625 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1626 continue;
1628 /* If we matched, then set offset to the displacement into
1629 this field. */
1630 offset = t;
1631 goto found;
1634 if (!tail_array_field)
1635 return NULL_TREE;
1637 f = tail_array_field;
1638 field_type = TREE_TYPE (f);
1639 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1641 found:
1642 /* If we get here, we've got an aggregate field, and a possibly
1643 nonzero offset into them. Recurse and hope for a valid match. */
1644 if (base_is_ptr)
1645 base = build1 (INDIRECT_REF, record_type, base);
1646 base = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
1648 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1649 if (t)
1650 return t;
1651 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1652 orig_type, false);
1656 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1657 Return the simplified expression, or NULL if nothing could be done. */
1659 static tree
1660 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1662 tree t;
1664 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1665 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1666 are sometimes added. */
1667 base = fold (base);
1668 STRIP_TYPE_NOPS (base);
1669 TREE_OPERAND (expr, 0) = base;
1671 /* One possibility is that the address reduces to a string constant. */
1672 t = fold_read_from_constant_string (expr);
1673 if (t)
1674 return t;
1676 /* Add in any offset from a PLUS_EXPR. */
1677 if (TREE_CODE (base) == PLUS_EXPR)
1679 tree offset2;
1681 offset2 = TREE_OPERAND (base, 1);
1682 if (TREE_CODE (offset2) != INTEGER_CST)
1683 return NULL_TREE;
1684 base = TREE_OPERAND (base, 0);
1686 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1689 if (TREE_CODE (base) == ADDR_EXPR)
1691 /* Strip the ADDR_EXPR. */
1692 base = TREE_OPERAND (base, 0);
1694 /* Fold away CONST_DECL to its value, if the type is scalar. */
1695 if (TREE_CODE (base) == CONST_DECL
1696 && is_gimple_min_invariant (DECL_INITIAL (base)))
1697 return DECL_INITIAL (base);
1699 /* Try folding *(&B+O) to B[X]. */
1700 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1701 if (t)
1702 return t;
1704 /* Try folding *(&B+O) to B.X. */
1705 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1706 TREE_TYPE (expr), false);
1707 if (t)
1708 return t;
1710 /* Fold *&B to B. We can only do this if EXPR is the same type
1711 as BASE. We can't do this if EXPR is the element type of an array
1712 and BASE is the array. */
1713 if (integer_zerop (offset)
1714 && lang_hooks.types_compatible_p (TREE_TYPE (base),
1715 TREE_TYPE (expr)))
1716 return base;
1718 else
1720 /* We can get here for out-of-range string constant accesses,
1721 such as "_"[3]. Bail out of the entire substitution search
1722 and arrange for the entire statement to be replaced by a
1723 call to __builtin_trap. In all likelihood this will all be
1724 constant-folded away, but in the meantime we can't leave with
1725 something that get_expr_operands can't understand. */
1727 t = base;
1728 STRIP_NOPS (t);
1729 if (TREE_CODE (t) == ADDR_EXPR
1730 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1732 /* FIXME: Except that this causes problems elsewhere with dead
1733 code not being deleted, and we die in the rtl expanders
1734 because we failed to remove some ssa_name. In the meantime,
1735 just return zero. */
1736 /* FIXME2: This condition should be signaled by
1737 fold_read_from_constant_string directly, rather than
1738 re-checking for it here. */
1739 return integer_zero_node;
1742 /* Try folding *(B+O) to B->X. Still an improvement. */
1743 if (POINTER_TYPE_P (TREE_TYPE (base)))
1745 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1746 base, offset,
1747 TREE_TYPE (expr), true);
1748 if (t)
1749 return t;
1753 /* Otherwise we had an offset that we could not simplify. */
1754 return NULL_TREE;
1758 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1760 A quaint feature extant in our address arithmetic is that there
1761 can be hidden type changes here. The type of the result need
1762 not be the same as the type of the input pointer.
1764 What we're after here is an expression of the form
1765 (T *)(&array + const)
1766 where the cast doesn't actually exist, but is implicit in the
1767 type of the PLUS_EXPR. We'd like to turn this into
1768 &array[x]
1769 which may be able to propagate further. */
1771 static tree
1772 maybe_fold_stmt_addition (tree expr)
1774 tree op0 = TREE_OPERAND (expr, 0);
1775 tree op1 = TREE_OPERAND (expr, 1);
1776 tree ptr_type = TREE_TYPE (expr);
1777 tree ptd_type;
1778 tree t;
1779 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1781 /* We're only interested in pointer arithmetic. */
1782 if (!POINTER_TYPE_P (ptr_type))
1783 return NULL_TREE;
1784 /* Canonicalize the integral operand to op1. */
1785 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1787 if (subtract)
1788 return NULL_TREE;
1789 t = op0, op0 = op1, op1 = t;
1791 /* It had better be a constant. */
1792 if (TREE_CODE (op1) != INTEGER_CST)
1793 return NULL_TREE;
1794 /* The first operand should be an ADDR_EXPR. */
1795 if (TREE_CODE (op0) != ADDR_EXPR)
1796 return NULL_TREE;
1797 op0 = TREE_OPERAND (op0, 0);
1799 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1800 the offset into it. */
1801 while (TREE_CODE (op0) == ARRAY_REF)
1803 tree array_obj = TREE_OPERAND (op0, 0);
1804 tree array_idx = TREE_OPERAND (op0, 1);
1805 tree elt_type = TREE_TYPE (op0);
1806 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1807 tree min_idx;
1809 if (TREE_CODE (array_idx) != INTEGER_CST)
1810 break;
1811 if (TREE_CODE (elt_size) != INTEGER_CST)
1812 break;
1814 /* Un-bias the index by the min index of the array type. */
1815 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1816 if (min_idx)
1818 min_idx = TYPE_MIN_VALUE (min_idx);
1819 if (min_idx)
1821 if (TREE_CODE (min_idx) != INTEGER_CST)
1822 break;
1824 array_idx = convert (TREE_TYPE (min_idx), array_idx);
1825 if (!integer_zerop (min_idx))
1826 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1827 min_idx, 0);
1831 /* Convert the index to a byte offset. */
1832 array_idx = convert (sizetype, array_idx);
1833 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1835 /* Update the operands for the next round, or for folding. */
1836 /* If we're manipulating unsigned types, then folding into negative
1837 values can produce incorrect results. Particularly if the type
1838 is smaller than the width of the pointer. */
1839 if (subtract
1840 && TYPE_UNSIGNED (TREE_TYPE (op1))
1841 && tree_int_cst_lt (array_idx, op1))
1842 return NULL;
1843 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1844 array_idx, op1, 0);
1845 subtract = false;
1846 op0 = array_obj;
1849 /* If we weren't able to fold the subtraction into another array reference,
1850 canonicalize the integer for passing to the array and component ref
1851 simplification functions. */
1852 if (subtract)
1854 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1855 return NULL;
1856 op1 = fold_build1 (NEGATE_EXPR, TREE_TYPE (op1), op1);
1857 /* ??? In theory fold should always produce another integer. */
1858 if (TREE_CODE (op1) != INTEGER_CST)
1859 return NULL;
1862 ptd_type = TREE_TYPE (ptr_type);
1864 /* At which point we can try some of the same things as for indirects. */
1865 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1866 if (!t)
1867 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1868 ptd_type, false);
1869 if (t)
1870 t = build1 (ADDR_EXPR, ptr_type, t);
1872 return t;
1875 /* Subroutine of fold_stmt called via walk_tree. We perform several
1876 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1878 static tree
1879 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1881 bool *changed_p = data;
1882 tree expr = *expr_p, t;
1884 /* ??? It'd be nice if walk_tree had a pre-order option. */
1885 switch (TREE_CODE (expr))
1887 case INDIRECT_REF:
1888 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1889 if (t)
1890 return t;
1891 *walk_subtrees = 0;
1893 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1894 integer_zero_node);
1895 break;
1897 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1898 We'd only want to bother decomposing an existing ARRAY_REF if
1899 the base array is found to have another offset contained within.
1900 Otherwise we'd be wasting time. */
1902 case ADDR_EXPR:
1903 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1904 if (t)
1905 return t;
1906 *walk_subtrees = 0;
1908 /* Set TREE_INVARIANT properly so that the value is properly
1909 considered constant, and so gets propagated as expected. */
1910 if (*changed_p)
1911 recompute_tree_invarant_for_addr_expr (expr);
1912 return NULL_TREE;
1914 case PLUS_EXPR:
1915 case MINUS_EXPR:
1916 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1917 if (t)
1918 return t;
1919 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
1920 if (t)
1921 return t;
1922 *walk_subtrees = 0;
1924 t = maybe_fold_stmt_addition (expr);
1925 break;
1927 case COMPONENT_REF:
1928 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1929 if (t)
1930 return t;
1931 *walk_subtrees = 0;
1933 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
1934 We've already checked that the records are compatible, so we should
1935 come up with a set of compatible fields. */
1937 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
1938 tree expr_field = TREE_OPERAND (expr, 1);
1940 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
1942 expr_field = find_compatible_field (expr_record, expr_field);
1943 TREE_OPERAND (expr, 1) = expr_field;
1946 break;
1948 case TARGET_MEM_REF:
1949 t = maybe_fold_tmr (expr);
1950 break;
1952 default:
1953 return NULL_TREE;
1956 if (t)
1958 *expr_p = t;
1959 *changed_p = true;
1962 return NULL_TREE;
1966 /* Return the string length, maximum string length or maximum value of
1967 ARG in LENGTH.
1968 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1969 is not NULL and, for TYPE == 0, its value is not equal to the length
1970 we determine or if we are unable to determine the length or value,
1971 return false. VISITED is a bitmap of visited variables.
1972 TYPE is 0 if string length should be returned, 1 for maximum string
1973 length and 2 for maximum value ARG can have. */
1975 static bool
1976 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1978 tree var, def_stmt, val;
1980 if (TREE_CODE (arg) != SSA_NAME)
1982 if (type == 2)
1984 val = arg;
1985 if (TREE_CODE (val) != INTEGER_CST
1986 || tree_int_cst_sgn (val) < 0)
1987 return false;
1989 else
1990 val = c_strlen (arg, 1);
1991 if (!val)
1992 return false;
1994 if (*length)
1996 if (type > 0)
1998 if (TREE_CODE (*length) != INTEGER_CST
1999 || TREE_CODE (val) != INTEGER_CST)
2000 return false;
2002 if (tree_int_cst_lt (*length, val))
2003 *length = val;
2004 return true;
2006 else if (simple_cst_equal (val, *length) != 1)
2007 return false;
2010 *length = val;
2011 return true;
2014 /* If we were already here, break the infinite cycle. */
2015 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2016 return true;
2017 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2019 var = arg;
2020 def_stmt = SSA_NAME_DEF_STMT (var);
2022 switch (TREE_CODE (def_stmt))
2024 case MODIFY_EXPR:
2026 tree rhs;
2028 /* The RHS of the statement defining VAR must either have a
2029 constant length or come from another SSA_NAME with a constant
2030 length. */
2031 rhs = TREE_OPERAND (def_stmt, 1);
2032 STRIP_NOPS (rhs);
2033 return get_maxval_strlen (rhs, length, visited, type);
2036 case PHI_NODE:
2038 /* All the arguments of the PHI node must have the same constant
2039 length. */
2040 int i;
2042 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
2044 tree arg = PHI_ARG_DEF (def_stmt, i);
2046 /* If this PHI has itself as an argument, we cannot
2047 determine the string length of this argument. However,
2048 if we can find a constant string length for the other
2049 PHI args then we can still be sure that this is a
2050 constant string length. So be optimistic and just
2051 continue with the next argument. */
2052 if (arg == PHI_RESULT (def_stmt))
2053 continue;
2055 if (!get_maxval_strlen (arg, length, visited, type))
2056 return false;
2059 return true;
2062 default:
2063 break;
2067 return false;
2071 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
2072 constant, return NULL_TREE. Otherwise, return its constant value. */
2074 static tree
2075 ccp_fold_builtin (tree stmt, tree fn)
2077 tree result, val[3];
2078 tree callee, arglist, a;
2079 int arg_mask, i, type;
2080 bitmap visited;
2081 bool ignore;
2083 ignore = TREE_CODE (stmt) != MODIFY_EXPR;
2085 /* First try the generic builtin folder. If that succeeds, return the
2086 result directly. */
2087 callee = get_callee_fndecl (fn);
2088 arglist = TREE_OPERAND (fn, 1);
2089 result = fold_builtin (callee, arglist, ignore);
2090 if (result)
2092 if (ignore)
2093 STRIP_NOPS (result);
2094 return result;
2097 /* Ignore MD builtins. */
2098 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2099 return NULL_TREE;
2101 /* If the builtin could not be folded, and it has no argument list,
2102 we're done. */
2103 if (!arglist)
2104 return NULL_TREE;
2106 /* Limit the work only for builtins we know how to simplify. */
2107 switch (DECL_FUNCTION_CODE (callee))
2109 case BUILT_IN_STRLEN:
2110 case BUILT_IN_FPUTS:
2111 case BUILT_IN_FPUTS_UNLOCKED:
2112 arg_mask = 1;
2113 type = 0;
2114 break;
2115 case BUILT_IN_STRCPY:
2116 case BUILT_IN_STRNCPY:
2117 arg_mask = 2;
2118 type = 0;
2119 break;
2120 case BUILT_IN_MEMCPY_CHK:
2121 case BUILT_IN_MEMPCPY_CHK:
2122 case BUILT_IN_MEMMOVE_CHK:
2123 case BUILT_IN_MEMSET_CHK:
2124 case BUILT_IN_STRNCPY_CHK:
2125 arg_mask = 4;
2126 type = 2;
2127 break;
2128 case BUILT_IN_STRCPY_CHK:
2129 case BUILT_IN_STPCPY_CHK:
2130 arg_mask = 2;
2131 type = 1;
2132 break;
2133 case BUILT_IN_SNPRINTF_CHK:
2134 case BUILT_IN_VSNPRINTF_CHK:
2135 arg_mask = 2;
2136 type = 2;
2137 break;
2138 default:
2139 return NULL_TREE;
2142 /* Try to use the dataflow information gathered by the CCP process. */
2143 visited = BITMAP_ALLOC (NULL);
2145 memset (val, 0, sizeof (val));
2146 for (i = 0, a = arglist;
2147 arg_mask;
2148 i++, arg_mask >>= 1, a = TREE_CHAIN (a))
2149 if (arg_mask & 1)
2151 bitmap_clear (visited);
2152 if (!get_maxval_strlen (TREE_VALUE (a), &val[i], visited, type))
2153 val[i] = NULL_TREE;
2156 BITMAP_FREE (visited);
2158 result = NULL_TREE;
2159 switch (DECL_FUNCTION_CODE (callee))
2161 case BUILT_IN_STRLEN:
2162 if (val[0])
2164 tree new = fold_convert (TREE_TYPE (fn), val[0]);
2166 /* If the result is not a valid gimple value, or not a cast
2167 of a valid gimple value, then we can not use the result. */
2168 if (is_gimple_val (new)
2169 || (is_gimple_cast (new)
2170 && is_gimple_val (TREE_OPERAND (new, 0))))
2171 return new;
2173 break;
2175 case BUILT_IN_STRCPY:
2176 if (val[1] && is_gimple_val (val[1]))
2177 result = fold_builtin_strcpy (callee, arglist, val[1]);
2178 break;
2180 case BUILT_IN_STRNCPY:
2181 if (val[1] && is_gimple_val (val[1]))
2182 result = fold_builtin_strncpy (callee, arglist, val[1]);
2183 break;
2185 case BUILT_IN_FPUTS:
2186 result = fold_builtin_fputs (arglist,
2187 TREE_CODE (stmt) != MODIFY_EXPR, 0,
2188 val[0]);
2189 break;
2191 case BUILT_IN_FPUTS_UNLOCKED:
2192 result = fold_builtin_fputs (arglist,
2193 TREE_CODE (stmt) != MODIFY_EXPR, 1,
2194 val[0]);
2195 break;
2197 case BUILT_IN_MEMCPY_CHK:
2198 case BUILT_IN_MEMPCPY_CHK:
2199 case BUILT_IN_MEMMOVE_CHK:
2200 case BUILT_IN_MEMSET_CHK:
2201 if (val[2] && is_gimple_val (val[2]))
2202 result = fold_builtin_memory_chk (callee, arglist, val[2], ignore,
2203 DECL_FUNCTION_CODE (callee));
2204 break;
2206 case BUILT_IN_STRCPY_CHK:
2207 case BUILT_IN_STPCPY_CHK:
2208 if (val[1] && is_gimple_val (val[1]))
2209 result = fold_builtin_stxcpy_chk (callee, arglist, val[1], ignore,
2210 DECL_FUNCTION_CODE (callee));
2211 break;
2213 case BUILT_IN_STRNCPY_CHK:
2214 if (val[2] && is_gimple_val (val[2]))
2215 result = fold_builtin_strncpy_chk (arglist, val[2]);
2216 break;
2218 case BUILT_IN_SNPRINTF_CHK:
2219 case BUILT_IN_VSNPRINTF_CHK:
2220 if (val[1] && is_gimple_val (val[1]))
2221 result = fold_builtin_snprintf_chk (arglist, val[1],
2222 DECL_FUNCTION_CODE (callee));
2223 break;
2225 default:
2226 gcc_unreachable ();
2229 if (result && ignore)
2230 result = fold_ignored_result (result);
2231 return result;
2235 /* Fold the statement pointed to by STMT_P. In some cases, this function may
2236 replace the whole statement with a new one. Returns true iff folding
2237 makes any changes. */
2239 bool
2240 fold_stmt (tree *stmt_p)
2242 tree rhs, result, stmt;
2243 bool changed = false;
2245 stmt = *stmt_p;
2247 /* If we replaced constants and the statement makes pointer dereferences,
2248 then we may need to fold instances of *&VAR into VAR, etc. */
2249 if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL))
2251 *stmt_p
2252 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
2253 NULL);
2254 return true;
2257 rhs = get_rhs (stmt);
2258 if (!rhs)
2259 return changed;
2260 result = NULL_TREE;
2262 if (TREE_CODE (rhs) == CALL_EXPR)
2264 tree callee;
2266 /* Check for builtins that CCP can handle using information not
2267 available in the generic fold routines. */
2268 callee = get_callee_fndecl (rhs);
2269 if (callee && DECL_BUILT_IN (callee))
2270 result = ccp_fold_builtin (stmt, rhs);
2271 else
2273 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2274 here are when we've propagated the address of a decl into the
2275 object slot. */
2276 /* ??? Should perhaps do this in fold proper. However, doing it
2277 there requires that we create a new CALL_EXPR, and that requires
2278 copying EH region info to the new node. Easier to just do it
2279 here where we can just smash the call operand. */
2280 callee = TREE_OPERAND (rhs, 0);
2281 if (TREE_CODE (callee) == OBJ_TYPE_REF
2282 && lang_hooks.fold_obj_type_ref
2283 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2284 && DECL_P (TREE_OPERAND
2285 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2287 tree t;
2289 /* ??? Caution: Broken ADDR_EXPR semantics means that
2290 looking at the type of the operand of the addr_expr
2291 can yield an array type. See silly exception in
2292 check_pointer_types_r. */
2294 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2295 t = lang_hooks.fold_obj_type_ref (callee, t);
2296 if (t)
2298 TREE_OPERAND (rhs, 0) = t;
2299 changed = true;
2305 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2306 if (result == NULL_TREE)
2307 result = fold (rhs);
2309 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2310 may have been added by fold, and "useless" type conversions that might
2311 now be apparent due to propagation. */
2312 STRIP_USELESS_TYPE_CONVERSION (result);
2314 if (result != rhs)
2315 changed |= set_rhs (stmt_p, result);
2317 return changed;
2320 /* Perform the minimal folding on statement STMT. Only operations like
2321 *&x created by constant propagation are handled. The statement cannot
2322 be replaced with a new one. */
2324 bool
2325 fold_stmt_inplace (tree stmt)
2327 tree old_stmt = stmt, rhs, new_rhs;
2328 bool changed = false;
2330 walk_tree (&stmt, fold_stmt_r, &changed, NULL);
2331 gcc_assert (stmt == old_stmt);
2333 rhs = get_rhs (stmt);
2334 if (!rhs || rhs == stmt)
2335 return changed;
2337 new_rhs = fold (rhs);
2338 STRIP_USELESS_TYPE_CONVERSION (new_rhs);
2339 if (new_rhs == rhs)
2340 return changed;
2342 changed |= set_rhs (&stmt, new_rhs);
2343 gcc_assert (stmt == old_stmt);
2345 return changed;
2348 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2349 RHS of an assignment. Insert the necessary statements before
2350 iterator *SI_P. */
2352 static tree
2353 convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr)
2355 tree_stmt_iterator ti;
2356 tree stmt = bsi_stmt (*si_p);
2357 tree tmp, stmts = NULL;
2359 push_gimplify_context ();
2360 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
2361 pop_gimplify_context (NULL);
2363 if (EXPR_HAS_LOCATION (stmt))
2364 annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt));
2366 /* The replacement can expose previously unreferenced variables. */
2367 for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
2369 tree new_stmt = tsi_stmt (ti);
2370 find_new_referenced_vars (tsi_stmt_ptr (ti));
2371 bsi_insert_before (si_p, new_stmt, BSI_NEW_STMT);
2372 mark_new_vars_to_rename (bsi_stmt (*si_p));
2373 bsi_next (si_p);
2376 return tmp;
2380 /* A simple pass that attempts to fold all builtin functions. This pass
2381 is run after we've propagated as many constants as we can. */
2383 static void
2384 execute_fold_all_builtins (void)
2386 bool cfg_changed = false;
2387 basic_block bb;
2388 FOR_EACH_BB (bb)
2390 block_stmt_iterator i;
2391 for (i = bsi_start (bb); !bsi_end_p (i); )
2393 tree *stmtp = bsi_stmt_ptr (i);
2394 tree old_stmt = *stmtp;
2395 tree call = get_rhs (*stmtp);
2396 tree callee, result;
2397 enum built_in_function fcode;
2399 if (!call || TREE_CODE (call) != CALL_EXPR)
2401 bsi_next (&i);
2402 continue;
2404 callee = get_callee_fndecl (call);
2405 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2407 bsi_next (&i);
2408 continue;
2410 fcode = DECL_FUNCTION_CODE (callee);
2412 result = ccp_fold_builtin (*stmtp, call);
2413 if (!result)
2414 switch (DECL_FUNCTION_CODE (callee))
2416 case BUILT_IN_CONSTANT_P:
2417 /* Resolve __builtin_constant_p. If it hasn't been
2418 folded to integer_one_node by now, it's fairly
2419 certain that the value simply isn't constant. */
2420 result = integer_zero_node;
2421 break;
2423 default:
2424 bsi_next (&i);
2425 continue;
2428 if (dump_file && (dump_flags & TDF_DETAILS))
2430 fprintf (dump_file, "Simplified\n ");
2431 print_generic_stmt (dump_file, *stmtp, dump_flags);
2434 if (!set_rhs (stmtp, result))
2436 result = convert_to_gimple_builtin (&i, result);
2437 if (result)
2439 bool ok = set_rhs (stmtp, result);
2441 gcc_assert (ok);
2444 update_stmt (*stmtp);
2445 if (maybe_clean_or_replace_eh_stmt (old_stmt, *stmtp)
2446 && tree_purge_dead_eh_edges (bb))
2447 cfg_changed = true;
2449 if (dump_file && (dump_flags & TDF_DETAILS))
2451 fprintf (dump_file, "to\n ");
2452 print_generic_stmt (dump_file, *stmtp, dump_flags);
2453 fprintf (dump_file, "\n");
2456 /* Retry the same statement if it changed into another
2457 builtin, there might be new opportunities now. */
2458 call = get_rhs (*stmtp);
2459 if (!call || TREE_CODE (call) != CALL_EXPR)
2461 bsi_next (&i);
2462 continue;
2464 callee = get_callee_fndecl (call);
2465 if (!callee
2466 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2467 || DECL_FUNCTION_CODE (callee) == fcode)
2468 bsi_next (&i);
2472 /* Delete unreachable blocks. */
2473 if (cfg_changed)
2474 cleanup_tree_cfg ();
2478 struct tree_opt_pass pass_fold_builtins =
2480 "fab", /* name */
2481 NULL, /* gate */
2482 execute_fold_all_builtins, /* execute */
2483 NULL, /* sub */
2484 NULL, /* next */
2485 0, /* static_pass_number */
2486 0, /* tv_id */
2487 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2488 0, /* properties_provided */
2489 0, /* properties_destroyed */
2490 0, /* todo_flags_start */
2491 TODO_dump_func
2492 | TODO_verify_ssa
2493 | TODO_update_ssa, /* todo_flags_finish */
2494 0 /* letter */