* config/alpha/alpha.md, arm/arm.c, darwin.c, frv/frv.md,
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob493bdecedcde6a00fbad1949ca945a693cb2683a
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
24 /* Conditional constant propagation (CCP) is based on the SSA
25 propagation engine (tree-ssa-propagate.c). Constant assignments of
26 the form VAR = CST are propagated from the assignments into uses of
27 VAR, which in turn may generate new constants. The simulation uses
28 a four level lattice to keep track of constant values associated
29 with SSA names. Given an SSA name V_i, it may take one of the
30 following values:
32 UNINITIALIZED -> the initial state of the value. This value
33 is replaced with a correct initial value
34 the first time the value is used, so the
35 rest of the pass does not need to care about
36 it. Using this value simplifies initialization
37 of the pass, and prevents us from needlessly
38 scanning statements that are never reached.
40 UNDEFINED -> V_i is a local variable whose definition
41 has not been processed yet. Therefore we
42 don't yet know if its value is a constant
43 or not.
45 CONSTANT -> V_i has been found to hold a constant
46 value C.
48 VARYING -> V_i cannot take a constant value, or if it
49 does, it is not possible to determine it
50 at compile time.
52 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
54 1- In ccp_visit_stmt, we are interested in assignments whose RHS
55 evaluates into a constant and conditional jumps whose predicate
56 evaluates into a boolean true or false. When an assignment of
57 the form V_i = CONST is found, V_i's lattice value is set to
58 CONSTANT and CONST is associated with it. This causes the
59 propagation engine to add all the SSA edges coming out the
60 assignment into the worklists, so that statements that use V_i
61 can be visited.
63 If the statement is a conditional with a constant predicate, we
64 mark the outgoing edges as executable or not executable
65 depending on the predicate's value. This is then used when
66 visiting PHI nodes to know when a PHI argument can be ignored.
69 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
70 same constant C, then the LHS of the PHI is set to C. This
71 evaluation is known as the "meet operation". Since one of the
72 goals of this evaluation is to optimistically return constant
73 values as often as possible, it uses two main short cuts:
75 - If an argument is flowing in through a non-executable edge, it
76 is ignored. This is useful in cases like this:
78 if (PRED)
79 a_9 = 3;
80 else
81 a_10 = 100;
82 a_11 = PHI (a_9, a_10)
84 If PRED is known to always evaluate to false, then we can
85 assume that a_11 will always take its value from a_10, meaning
86 that instead of consider it VARYING (a_9 and a_10 have
87 different values), we can consider it CONSTANT 100.
89 - If an argument has an UNDEFINED value, then it does not affect
90 the outcome of the meet operation. If a variable V_i has an
91 UNDEFINED value, it means that either its defining statement
92 hasn't been visited yet or V_i has no defining statement, in
93 which case the original symbol 'V' is being used
94 uninitialized. Since 'V' is a local variable, the compiler
95 may assume any initial value for it.
98 After propagation, every variable V_i that ends up with a lattice
99 value of CONSTANT will have the associated constant value in the
100 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
101 final substitution and folding.
104 Constant propagation in stores and loads (STORE-CCP)
105 ----------------------------------------------------
107 While CCP has all the logic to propagate constants in GIMPLE
108 registers, it is missing the ability to associate constants with
109 stores and loads (i.e., pointer dereferences, structures and
110 global/aliased variables). We don't keep loads and stores in
111 SSA, but we do build a factored use-def web for them (in the
112 virtual operands).
114 For instance, consider the following code fragment:
116 struct A a;
117 const int B = 42;
119 void foo (int i)
121 if (i > 10)
122 a.a = 42;
123 else
125 a.b = 21;
126 a.a = a.b + 21;
129 if (a.a != B)
130 never_executed ();
133 We should be able to deduce that the predicate 'a.a != B' is always
134 false. To achieve this, we associate constant values to the SSA
135 names in the VDEF operands for each store. Additionally,
136 since we also glob partial loads/stores with the base symbol, we
137 also keep track of the memory reference where the constant value
138 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
140 # a_5 = VDEF <a_4>
141 a.a = 2;
143 # VUSE <a_5>
144 x_3 = a.b;
146 In the example above, CCP will associate value '2' with 'a_5', but
147 it would be wrong to replace the load from 'a.b' with '2', because
148 '2' had been stored into a.a.
150 Note that the initial value of virtual operands is VARYING, not
151 UNDEFINED. Consider, for instance global variables:
153 int A;
155 foo (int i)
157 if (i_3 > 10)
158 A_4 = 3;
159 # A_5 = PHI (A_4, A_2);
161 # VUSE <A_5>
162 A.0_6 = A;
164 return A.0_6;
167 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
168 been defined outside of foo. If we were to assume it UNDEFINED, we
169 would erroneously optimize the above into 'return 3;'.
171 Though STORE-CCP is not too expensive, it does have to do more work
172 than regular CCP, so it is only enabled at -O2. Both regular CCP
173 and STORE-CCP use the exact same algorithm. The only distinction
174 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
175 set to true. This affects the evaluation of statements and PHI
176 nodes.
178 References:
180 Constant propagation with conditional branches,
181 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
183 Building an Optimizing Compiler,
184 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
186 Advanced Compiler Design and Implementation,
187 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
189 #include "config.h"
190 #include "system.h"
191 #include "coretypes.h"
192 #include "tm.h"
193 #include "tree.h"
194 #include "flags.h"
195 #include "rtl.h"
196 #include "tm_p.h"
197 #include "ggc.h"
198 #include "basic-block.h"
199 #include "output.h"
200 #include "expr.h"
201 #include "function.h"
202 #include "diagnostic.h"
203 #include "timevar.h"
204 #include "tree-dump.h"
205 #include "tree-flow.h"
206 #include "tree-pass.h"
207 #include "tree-ssa-propagate.h"
208 #include "langhooks.h"
209 #include "target.h"
212 /* Possible lattice values. */
213 typedef enum
215 UNINITIALIZED,
216 UNDEFINED,
217 CONSTANT,
218 VARYING
219 } ccp_lattice_t;
221 /* Array of propagated constant values. After propagation,
222 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
223 the constant is held in an SSA name representing a memory store
224 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
225 memory reference used to store (i.e., the LHS of the assignment
226 doing the store). */
227 static prop_value_t *const_val;
229 /* True if we are also propagating constants in stores and loads. */
230 static bool do_store_ccp;
232 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
234 static void
235 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
237 switch (val.lattice_val)
239 case UNINITIALIZED:
240 fprintf (outf, "%sUNINITIALIZED", prefix);
241 break;
242 case UNDEFINED:
243 fprintf (outf, "%sUNDEFINED", prefix);
244 break;
245 case VARYING:
246 fprintf (outf, "%sVARYING", prefix);
247 break;
248 case CONSTANT:
249 fprintf (outf, "%sCONSTANT ", prefix);
250 print_generic_expr (outf, val.value, dump_flags);
251 break;
252 default:
253 gcc_unreachable ();
258 /* Print lattice value VAL to stderr. */
260 void debug_lattice_value (prop_value_t val);
262 void
263 debug_lattice_value (prop_value_t val)
265 dump_lattice_value (stderr, "", val);
266 fprintf (stderr, "\n");
270 /* The regular is_gimple_min_invariant does a shallow test of the object.
271 It assumes that full gimplification has happened, or will happen on the
272 object. For a value coming from DECL_INITIAL, this is not true, so we
273 have to be more strict ourselves. */
275 static bool
276 ccp_decl_initial_min_invariant (tree t)
278 if (!is_gimple_min_invariant (t))
279 return false;
280 if (TREE_CODE (t) == ADDR_EXPR)
282 /* Inline and unroll is_gimple_addressable. */
283 while (1)
285 t = TREE_OPERAND (t, 0);
286 if (is_gimple_id (t))
287 return true;
288 if (!handled_component_p (t))
289 return false;
292 return true;
295 /* If SYM is a constant variable with known value, return the value.
296 NULL_TREE is returned otherwise. */
298 static tree
299 get_symbol_constant_value (tree sym)
301 if (TREE_STATIC (sym)
302 && TREE_READONLY (sym)
303 && !MTAG_P (sym))
305 tree val = DECL_INITIAL (sym);
306 if (val
307 && ccp_decl_initial_min_invariant (val))
308 return val;
311 return NULL_TREE;
314 /* Compute a default value for variable VAR and store it in the
315 CONST_VAL array. The following rules are used to get default
316 values:
318 1- Global and static variables that are declared constant are
319 considered CONSTANT.
321 2- Any other value is considered UNDEFINED. This is useful when
322 considering PHI nodes. PHI arguments that are undefined do not
323 change the constant value of the PHI node, which allows for more
324 constants to be propagated.
326 3- If SSA_NAME_VALUE is set and it is a constant, its value is
327 used.
329 4- Variables defined by statements other than assignments and PHI
330 nodes are considered VARYING.
332 5- Initial values of variables that are not GIMPLE registers are
333 considered VARYING. */
335 static prop_value_t
336 get_default_value (tree var)
338 tree sym = SSA_NAME_VAR (var);
339 prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
340 tree cst_val;
342 if (!do_store_ccp && !is_gimple_reg (var))
344 /* Short circuit for regular CCP. We are not interested in any
345 non-register when DO_STORE_CCP is false. */
346 val.lattice_val = VARYING;
348 else if (SSA_NAME_VALUE (var)
349 && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
351 val.lattice_val = CONSTANT;
352 val.value = SSA_NAME_VALUE (var);
354 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
356 /* Globals and static variables declared 'const' take their
357 initial value. */
358 val.lattice_val = CONSTANT;
359 val.value = cst_val;
360 val.mem_ref = sym;
362 else
364 tree stmt = SSA_NAME_DEF_STMT (var);
366 if (IS_EMPTY_STMT (stmt))
368 /* Variables defined by an empty statement are those used
369 before being initialized. If VAR is a local variable, we
370 can assume initially that it is UNDEFINED, otherwise we must
371 consider it VARYING. */
372 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
373 val.lattice_val = UNDEFINED;
374 else
375 val.lattice_val = VARYING;
377 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
378 || TREE_CODE (stmt) == PHI_NODE)
380 /* Any other variable defined by an assignment or a PHI node
381 is considered UNDEFINED. */
382 val.lattice_val = UNDEFINED;
384 else
386 /* Otherwise, VAR will never take on a constant value. */
387 val.lattice_val = VARYING;
391 return val;
395 /* Get the constant value associated with variable VAR. */
397 static inline prop_value_t *
398 get_value (tree var)
400 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
402 if (val->lattice_val == UNINITIALIZED)
403 *val = get_default_value (var);
405 return val;
408 /* Sets the value associated with VAR to VARYING. */
410 static inline void
411 set_value_varying (tree var)
413 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
415 val->lattice_val = VARYING;
416 val->value = NULL_TREE;
417 val->mem_ref = NULL_TREE;
420 /* For float types, modify the value of VAL to make ccp work correctly
421 for non-standard values (-0, NaN):
423 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
424 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
425 This is to fix the following problem (see PR 29921): Suppose we have
427 x = 0.0 * y
429 and we set value of y to NaN. This causes value of x to be set to NaN.
430 When we later determine that y is in fact VARYING, fold uses the fact
431 that HONOR_NANS is false, and we try to change the value of x to 0,
432 causing an ICE. With HONOR_NANS being false, the real appearance of
433 NaN would cause undefined behavior, though, so claiming that y (and x)
434 are UNDEFINED initially is correct. */
436 static void
437 canonicalize_float_value (prop_value_t *val)
439 enum machine_mode mode;
440 tree type;
441 REAL_VALUE_TYPE d;
443 if (val->lattice_val != CONSTANT
444 || TREE_CODE (val->value) != REAL_CST)
445 return;
447 d = TREE_REAL_CST (val->value);
448 type = TREE_TYPE (val->value);
449 mode = TYPE_MODE (type);
451 if (!HONOR_SIGNED_ZEROS (mode)
452 && REAL_VALUE_MINUS_ZERO (d))
454 val->value = build_real (type, dconst0);
455 return;
458 if (!HONOR_NANS (mode)
459 && REAL_VALUE_ISNAN (d))
461 val->lattice_val = UNDEFINED;
462 val->value = NULL;
463 val->mem_ref = NULL;
464 return;
468 /* Set the value for variable VAR to NEW_VAL. Return true if the new
469 value is different from VAR's previous value. */
471 static bool
472 set_lattice_value (tree var, prop_value_t new_val)
474 prop_value_t *old_val = get_value (var);
476 canonicalize_float_value (&new_val);
478 /* Lattice transitions must always be monotonically increasing in
479 value. If *OLD_VAL and NEW_VAL are the same, return false to
480 inform the caller that this was a non-transition. */
482 gcc_assert (old_val->lattice_val < new_val.lattice_val
483 || (old_val->lattice_val == new_val.lattice_val
484 && ((!old_val->value && !new_val.value)
485 || operand_equal_p (old_val->value, new_val.value, 0))
486 && old_val->mem_ref == new_val.mem_ref));
488 if (old_val->lattice_val != new_val.lattice_val)
490 if (dump_file && (dump_flags & TDF_DETAILS))
492 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
493 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
496 *old_val = new_val;
498 gcc_assert (new_val.lattice_val != UNDEFINED);
499 return true;
502 return false;
506 /* Return the likely CCP lattice value for STMT.
508 If STMT has no operands, then return CONSTANT.
510 Else if any operands of STMT are undefined, then return UNDEFINED.
512 Else if any operands of STMT are constants, then return CONSTANT.
514 Else return VARYING. */
516 static ccp_lattice_t
517 likely_value (tree stmt)
519 bool has_constant_operand;
520 stmt_ann_t ann;
521 tree use;
522 ssa_op_iter iter;
524 ann = stmt_ann (stmt);
526 /* If the statement has volatile operands, it won't fold to a
527 constant value. */
528 if (ann->has_volatile_ops)
529 return VARYING;
531 /* If we are not doing store-ccp, statements with loads
532 and/or stores will never fold into a constant. */
533 if (!do_store_ccp
534 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
535 return VARYING;
538 /* A CALL_EXPR is assumed to be varying. NOTE: This may be overly
539 conservative, in the presence of const and pure calls. */
540 if (get_call_expr_in (stmt) != NULL_TREE)
541 return VARYING;
543 /* Anything other than assignments and conditional jumps are not
544 interesting for CCP. */
545 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
546 && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
547 && TREE_CODE (stmt) != COND_EXPR
548 && TREE_CODE (stmt) != SWITCH_EXPR)
549 return VARYING;
551 if (is_gimple_min_invariant (get_rhs (stmt)))
552 return CONSTANT;
554 has_constant_operand = false;
555 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
557 prop_value_t *val = get_value (use);
559 if (val->lattice_val == UNDEFINED)
560 return UNDEFINED;
562 if (val->lattice_val == CONSTANT)
563 has_constant_operand = true;
566 if (has_constant_operand
567 /* We do not consider virtual operands here -- load from read-only
568 memory may have only VARYING virtual operands, but still be
569 constant. */
570 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
571 return CONSTANT;
573 return VARYING;
576 /* Returns true if STMT cannot be constant. */
578 static bool
579 surely_varying_stmt_p (tree stmt)
581 /* If the statement has operands that we cannot handle, it cannot be
582 constant. */
583 if (stmt_ann (stmt)->has_volatile_ops)
584 return true;
586 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
588 if (!do_store_ccp)
589 return true;
591 /* We can only handle simple loads and stores. */
592 if (!stmt_makes_single_load (stmt)
593 && !stmt_makes_single_store (stmt))
594 return true;
597 /* If it contains a call, it is varying. */
598 if (get_call_expr_in (stmt) != NULL_TREE)
599 return true;
601 /* Anything other than assignments and conditional jumps are not
602 interesting for CCP. */
603 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
604 && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
605 && TREE_CODE (stmt) != COND_EXPR
606 && TREE_CODE (stmt) != SWITCH_EXPR)
607 return true;
609 return false;
612 /* Initialize local data structures for CCP. */
614 static void
615 ccp_initialize (void)
617 basic_block bb;
619 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
621 /* Initialize simulation flags for PHI nodes and statements. */
622 FOR_EACH_BB (bb)
624 block_stmt_iterator i;
626 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
628 tree stmt = bsi_stmt (i);
629 bool is_varying = surely_varying_stmt_p (stmt);
631 if (is_varying)
633 tree def;
634 ssa_op_iter iter;
636 /* If the statement will not produce a constant, mark
637 all its outputs VARYING. */
638 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
640 if (is_varying)
641 set_value_varying (def);
645 DONT_SIMULATE_AGAIN (stmt) = is_varying;
649 /* Now process PHI nodes. We never set DONT_SIMULATE_AGAIN on phi node,
650 since we do not know which edges are executable yet, except for
651 phi nodes for virtual operands when we do not do store ccp. */
652 FOR_EACH_BB (bb)
654 tree phi;
656 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
658 if (!do_store_ccp && !is_gimple_reg (PHI_RESULT (phi)))
659 DONT_SIMULATE_AGAIN (phi) = true;
660 else
661 DONT_SIMULATE_AGAIN (phi) = false;
667 /* Do final substitution of propagated values, cleanup the flowgraph and
668 free allocated storage. */
670 static void
671 ccp_finalize (void)
673 /* Perform substitutions based on the known constant values. */
674 substitute_and_fold (const_val, false);
676 free (const_val);
680 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
681 in VAL1.
683 any M UNDEFINED = any
684 any M VARYING = VARYING
685 Ci M Cj = Ci if (i == j)
686 Ci M Cj = VARYING if (i != j)
689 static void
690 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
692 if (val1->lattice_val == UNDEFINED)
694 /* UNDEFINED M any = any */
695 *val1 = *val2;
697 else if (val2->lattice_val == UNDEFINED)
699 /* any M UNDEFINED = any
700 Nothing to do. VAL1 already contains the value we want. */
703 else if (val1->lattice_val == VARYING
704 || val2->lattice_val == VARYING)
706 /* any M VARYING = VARYING. */
707 val1->lattice_val = VARYING;
708 val1->value = NULL_TREE;
709 val1->mem_ref = NULL_TREE;
711 else if (val1->lattice_val == CONSTANT
712 && val2->lattice_val == CONSTANT
713 && simple_cst_equal (val1->value, val2->value) == 1
714 && (!do_store_ccp
715 || (val1->mem_ref && val2->mem_ref
716 && operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
718 /* Ci M Cj = Ci if (i == j)
719 Ci M Cj = VARYING if (i != j)
721 If these two values come from memory stores, make sure that
722 they come from the same memory reference. */
723 val1->lattice_val = CONSTANT;
724 val1->value = val1->value;
725 val1->mem_ref = val1->mem_ref;
727 else
729 /* Any other combination is VARYING. */
730 val1->lattice_val = VARYING;
731 val1->value = NULL_TREE;
732 val1->mem_ref = NULL_TREE;
737 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
738 lattice values to determine PHI_NODE's lattice value. The value of a
739 PHI node is determined calling ccp_lattice_meet with all the arguments
740 of the PHI node that are incoming via executable edges. */
742 static enum ssa_prop_result
743 ccp_visit_phi_node (tree phi)
745 int i;
746 prop_value_t *old_val, new_val;
748 if (dump_file && (dump_flags & TDF_DETAILS))
750 fprintf (dump_file, "\nVisiting PHI node: ");
751 print_generic_expr (dump_file, phi, dump_flags);
754 old_val = get_value (PHI_RESULT (phi));
755 switch (old_val->lattice_val)
757 case VARYING:
758 return SSA_PROP_VARYING;
760 case CONSTANT:
761 new_val = *old_val;
762 break;
764 case UNDEFINED:
765 new_val.lattice_val = UNDEFINED;
766 new_val.value = NULL_TREE;
767 new_val.mem_ref = NULL_TREE;
768 break;
770 default:
771 gcc_unreachable ();
774 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
776 /* Compute the meet operator over all the PHI arguments flowing
777 through executable edges. */
778 edge e = PHI_ARG_EDGE (phi, i);
780 if (dump_file && (dump_flags & TDF_DETAILS))
782 fprintf (dump_file,
783 "\n Argument #%d (%d -> %d %sexecutable)\n",
784 i, e->src->index, e->dest->index,
785 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
788 /* If the incoming edge is executable, Compute the meet operator for
789 the existing value of the PHI node and the current PHI argument. */
790 if (e->flags & EDGE_EXECUTABLE)
792 tree arg = PHI_ARG_DEF (phi, i);
793 prop_value_t arg_val;
795 if (is_gimple_min_invariant (arg))
797 arg_val.lattice_val = CONSTANT;
798 arg_val.value = arg;
799 arg_val.mem_ref = NULL_TREE;
801 else
802 arg_val = *(get_value (arg));
804 ccp_lattice_meet (&new_val, &arg_val);
806 if (dump_file && (dump_flags & TDF_DETAILS))
808 fprintf (dump_file, "\t");
809 print_generic_expr (dump_file, arg, dump_flags);
810 dump_lattice_value (dump_file, "\tValue: ", arg_val);
811 fprintf (dump_file, "\n");
814 if (new_val.lattice_val == VARYING)
815 break;
819 if (dump_file && (dump_flags & TDF_DETAILS))
821 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
822 fprintf (dump_file, "\n\n");
825 /* Make the transition to the new value. */
826 if (set_lattice_value (PHI_RESULT (phi), new_val))
828 if (new_val.lattice_val == VARYING)
829 return SSA_PROP_VARYING;
830 else
831 return SSA_PROP_INTERESTING;
833 else
834 return SSA_PROP_NOT_INTERESTING;
838 /* CCP specific front-end to the non-destructive constant folding
839 routines.
841 Attempt to simplify the RHS of STMT knowing that one or more
842 operands are constants.
844 If simplification is possible, return the simplified RHS,
845 otherwise return the original RHS. */
847 static tree
848 ccp_fold (tree stmt)
850 tree rhs = get_rhs (stmt);
851 enum tree_code code = TREE_CODE (rhs);
852 enum tree_code_class kind = TREE_CODE_CLASS (code);
853 tree retval = NULL_TREE;
855 if (TREE_CODE (rhs) == SSA_NAME)
857 /* If the RHS is an SSA_NAME, return its known constant value,
858 if any. */
859 return get_value (rhs)->value;
861 else if (do_store_ccp && stmt_makes_single_load (stmt))
863 /* If the RHS is a memory load, see if the VUSEs associated with
864 it are a valid constant for that memory load. */
865 prop_value_t *val = get_value_loaded_by (stmt, const_val);
866 if (val && val->mem_ref)
868 if (operand_equal_p (val->mem_ref, rhs, 0))
869 return val->value;
871 /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
872 complex type with a known constant value, return it. */
873 if ((TREE_CODE (rhs) == REALPART_EXPR
874 || TREE_CODE (rhs) == IMAGPART_EXPR)
875 && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
876 return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
878 return NULL_TREE;
881 /* Unary operators. Note that we know the single operand must
882 be a constant. So this should almost always return a
883 simplified RHS. */
884 if (kind == tcc_unary)
886 /* Handle unary operators which can appear in GIMPLE form. */
887 tree op0 = TREE_OPERAND (rhs, 0);
889 /* Simplify the operand down to a constant. */
890 if (TREE_CODE (op0) == SSA_NAME)
892 prop_value_t *val = get_value (op0);
893 if (val->lattice_val == CONSTANT)
894 op0 = get_value (op0)->value;
897 if ((code == NOP_EXPR || code == CONVERT_EXPR)
898 && tree_ssa_useless_type_conversion_1 (TREE_TYPE (rhs),
899 TREE_TYPE (op0)))
900 return op0;
901 return fold_unary (code, TREE_TYPE (rhs), op0);
904 /* Binary and comparison operators. We know one or both of the
905 operands are constants. */
906 else if (kind == tcc_binary
907 || kind == tcc_comparison
908 || code == TRUTH_AND_EXPR
909 || code == TRUTH_OR_EXPR
910 || code == TRUTH_XOR_EXPR)
912 /* Handle binary and comparison operators that can appear in
913 GIMPLE form. */
914 tree op0 = TREE_OPERAND (rhs, 0);
915 tree op1 = TREE_OPERAND (rhs, 1);
917 /* Simplify the operands down to constants when appropriate. */
918 if (TREE_CODE (op0) == SSA_NAME)
920 prop_value_t *val = get_value (op0);
921 if (val->lattice_val == CONSTANT)
922 op0 = val->value;
925 if (TREE_CODE (op1) == SSA_NAME)
927 prop_value_t *val = get_value (op1);
928 if (val->lattice_val == CONSTANT)
929 op1 = val->value;
932 return fold_binary (code, TREE_TYPE (rhs), op0, op1);
935 /* We may be able to fold away calls to builtin functions if their
936 arguments are constants. */
937 else if (code == CALL_EXPR
938 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
939 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
940 == FUNCTION_DECL)
941 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
943 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
945 tree *orig, var;
946 tree fndecl, arglist;
947 size_t i = 0;
948 ssa_op_iter iter;
949 use_operand_p var_p;
951 /* Preserve the original values of every operand. */
952 orig = XNEWVEC (tree, NUM_SSA_OPERANDS (stmt, SSA_OP_USE));
953 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
954 orig[i++] = var;
956 /* Substitute operands with their values and try to fold. */
957 replace_uses_in (stmt, NULL, const_val);
958 fndecl = get_callee_fndecl (rhs);
959 arglist = TREE_OPERAND (rhs, 1);
960 retval = fold_builtin (fndecl, arglist, false);
962 /* Restore operands to their original form. */
963 i = 0;
964 FOR_EACH_SSA_USE_OPERAND (var_p, stmt, iter, SSA_OP_USE)
965 SET_USE (var_p, orig[i++]);
966 free (orig);
969 else
970 return rhs;
972 /* If we got a simplified form, see if we need to convert its type. */
973 if (retval)
974 return fold_convert (TREE_TYPE (rhs), retval);
976 /* No simplification was possible. */
977 return rhs;
981 /* Return the tree representing the element referenced by T if T is an
982 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
983 NULL_TREE otherwise. */
985 static tree
986 fold_const_aggregate_ref (tree t)
988 prop_value_t *value;
989 tree base, ctor, idx, field;
990 unsigned HOST_WIDE_INT cnt;
991 tree cfield, cval;
993 switch (TREE_CODE (t))
995 case ARRAY_REF:
996 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
997 DECL_INITIAL. If BASE is a nested reference into another
998 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
999 the inner reference. */
1000 base = TREE_OPERAND (t, 0);
1001 switch (TREE_CODE (base))
1003 case VAR_DECL:
1004 if (!TREE_READONLY (base)
1005 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1006 || !targetm.binds_local_p (base))
1007 return NULL_TREE;
1009 ctor = DECL_INITIAL (base);
1010 break;
1012 case ARRAY_REF:
1013 case COMPONENT_REF:
1014 ctor = fold_const_aggregate_ref (base);
1015 break;
1017 default:
1018 return NULL_TREE;
1021 if (ctor == NULL_TREE
1022 || (TREE_CODE (ctor) != CONSTRUCTOR
1023 && TREE_CODE (ctor) != STRING_CST)
1024 || !TREE_STATIC (ctor))
1025 return NULL_TREE;
1027 /* Get the index. If we have an SSA_NAME, try to resolve it
1028 with the current lattice value for the SSA_NAME. */
1029 idx = TREE_OPERAND (t, 1);
1030 switch (TREE_CODE (idx))
1032 case SSA_NAME:
1033 if ((value = get_value (idx))
1034 && value->lattice_val == CONSTANT
1035 && TREE_CODE (value->value) == INTEGER_CST)
1036 idx = value->value;
1037 else
1038 return NULL_TREE;
1039 break;
1041 case INTEGER_CST:
1042 break;
1044 default:
1045 return NULL_TREE;
1048 /* Fold read from constant string. */
1049 if (TREE_CODE (ctor) == STRING_CST)
1051 if ((TYPE_MODE (TREE_TYPE (t))
1052 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1053 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1054 == MODE_INT)
1055 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1056 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1057 return build_int_cst (TREE_TYPE (t), (TREE_STRING_POINTER (ctor)
1058 [TREE_INT_CST_LOW (idx)]));
1059 return NULL_TREE;
1062 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1063 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1064 if (tree_int_cst_equal (cfield, idx))
1065 return cval;
1066 break;
1068 case COMPONENT_REF:
1069 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1070 DECL_INITIAL. If BASE is a nested reference into another
1071 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1072 the inner reference. */
1073 base = TREE_OPERAND (t, 0);
1074 switch (TREE_CODE (base))
1076 case VAR_DECL:
1077 if (!TREE_READONLY (base)
1078 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1079 || !targetm.binds_local_p (base))
1080 return NULL_TREE;
1082 ctor = DECL_INITIAL (base);
1083 break;
1085 case ARRAY_REF:
1086 case COMPONENT_REF:
1087 ctor = fold_const_aggregate_ref (base);
1088 break;
1090 default:
1091 return NULL_TREE;
1094 if (ctor == NULL_TREE
1095 || TREE_CODE (ctor) != CONSTRUCTOR
1096 || !TREE_STATIC (ctor))
1097 return NULL_TREE;
1099 field = TREE_OPERAND (t, 1);
1101 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1102 if (cfield == field
1103 /* FIXME: Handle bit-fields. */
1104 && ! DECL_BIT_FIELD (cfield))
1105 return cval;
1106 break;
1108 case REALPART_EXPR:
1109 case IMAGPART_EXPR:
1111 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1112 if (c && TREE_CODE (c) == COMPLEX_CST)
1113 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1114 break;
1117 default:
1118 break;
1121 return NULL_TREE;
1124 /* Evaluate statement STMT. */
1126 static prop_value_t
1127 evaluate_stmt (tree stmt)
1129 prop_value_t val;
1130 tree simplified = NULL_TREE;
1131 ccp_lattice_t likelyvalue = likely_value (stmt);
1133 val.mem_ref = NULL_TREE;
1135 /* If the statement is likely to have a CONSTANT result, then try
1136 to fold the statement to determine the constant value. */
1137 if (likelyvalue == CONSTANT)
1138 simplified = ccp_fold (stmt);
1139 /* If the statement is likely to have a VARYING result, then do not
1140 bother folding the statement. */
1141 if (likelyvalue == VARYING)
1142 simplified = get_rhs (stmt);
1143 /* If the statement is an ARRAY_REF or COMPONENT_REF into constant
1144 aggregates, extract the referenced constant. Otherwise the
1145 statement is likely to have an UNDEFINED value, and there will be
1146 nothing to do. Note that fold_const_aggregate_ref returns
1147 NULL_TREE if the first case does not match. */
1148 else if (!simplified)
1149 simplified = fold_const_aggregate_ref (get_rhs (stmt));
1151 if (simplified && is_gimple_min_invariant (simplified))
1153 /* The statement produced a constant value. */
1154 val.lattice_val = CONSTANT;
1155 val.value = simplified;
1157 else
1159 /* The statement produced a nonconstant value. If the statement
1160 had UNDEFINED operands, then the result of the statement
1161 should be UNDEFINED. Otherwise, the statement is VARYING. */
1162 if (likelyvalue == UNDEFINED)
1163 val.lattice_val = likelyvalue;
1164 else
1165 val.lattice_val = VARYING;
1167 val.value = NULL_TREE;
1170 return val;
1174 /* Visit the assignment statement STMT. Set the value of its LHS to the
1175 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1176 creates virtual definitions, set the value of each new name to that
1177 of the RHS (if we can derive a constant out of the RHS). */
1179 static enum ssa_prop_result
1180 visit_assignment (tree stmt, tree *output_p)
1182 prop_value_t val;
1183 tree lhs, rhs;
1184 enum ssa_prop_result retval;
1186 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1187 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1189 if (TREE_CODE (rhs) == SSA_NAME)
1191 /* For a simple copy operation, we copy the lattice values. */
1192 prop_value_t *nval = get_value (rhs);
1193 val = *nval;
1195 else if (do_store_ccp && stmt_makes_single_load (stmt))
1197 /* Same as above, but the RHS is not a gimple register and yet
1198 has a known VUSE. If STMT is loading from the same memory
1199 location that created the SSA_NAMEs for the virtual operands,
1200 we can propagate the value on the RHS. */
1201 prop_value_t *nval = get_value_loaded_by (stmt, const_val);
1203 if (nval
1204 && nval->mem_ref
1205 && operand_equal_p (nval->mem_ref, rhs, 0))
1206 val = *nval;
1207 else
1208 val = evaluate_stmt (stmt);
1210 else
1211 /* Evaluate the statement. */
1212 val = evaluate_stmt (stmt);
1214 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
1215 value to be a VIEW_CONVERT_EXPR of the old constant value.
1217 ??? Also, if this was a definition of a bitfield, we need to widen
1218 the constant value into the type of the destination variable. This
1219 should not be necessary if GCC represented bitfields properly. */
1221 tree orig_lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1223 if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
1224 && val.lattice_val == CONSTANT)
1226 tree w = fold_unary (VIEW_CONVERT_EXPR,
1227 TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
1228 val.value);
1230 orig_lhs = TREE_OPERAND (orig_lhs, 0);
1231 if (w && is_gimple_min_invariant (w))
1232 val.value = w;
1233 else
1235 val.lattice_val = VARYING;
1236 val.value = NULL;
1240 if (val.lattice_val == CONSTANT
1241 && TREE_CODE (orig_lhs) == COMPONENT_REF
1242 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
1244 tree w = widen_bitfield (val.value, TREE_OPERAND (orig_lhs, 1),
1245 orig_lhs);
1247 if (w && is_gimple_min_invariant (w))
1248 val.value = w;
1249 else
1251 val.lattice_val = VARYING;
1252 val.value = NULL_TREE;
1253 val.mem_ref = NULL_TREE;
1258 retval = SSA_PROP_NOT_INTERESTING;
1260 /* Set the lattice value of the statement's output. */
1261 if (TREE_CODE (lhs) == SSA_NAME)
1263 /* If STMT is an assignment to an SSA_NAME, we only have one
1264 value to set. */
1265 if (set_lattice_value (lhs, val))
1267 *output_p = lhs;
1268 if (val.lattice_val == VARYING)
1269 retval = SSA_PROP_VARYING;
1270 else
1271 retval = SSA_PROP_INTERESTING;
1274 else if (do_store_ccp && stmt_makes_single_store (stmt))
1276 /* Otherwise, set the names in VDEF operands to the new
1277 constant value and mark the LHS as the memory reference
1278 associated with VAL. */
1279 ssa_op_iter i;
1280 tree vdef;
1281 bool changed;
1283 /* Mark VAL as stored in the LHS of this assignment. */
1284 if (val.lattice_val == CONSTANT)
1285 val.mem_ref = lhs;
1287 /* Set the value of every VDEF to VAL. */
1288 changed = false;
1289 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
1291 /* See PR 29801. We may have VDEFs for read-only variables
1292 (see the handling of unmodifiable variables in
1293 add_virtual_operand); do not attempt to change their value. */
1294 if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
1295 continue;
1297 changed |= set_lattice_value (vdef, val);
1300 /* Note that for propagation purposes, we are only interested in
1301 visiting statements that load the exact same memory reference
1302 stored here. Those statements will have the exact same list
1303 of virtual uses, so it is enough to set the output of this
1304 statement to be its first virtual definition. */
1305 *output_p = first_vdef (stmt);
1306 if (changed)
1308 if (val.lattice_val == VARYING)
1309 retval = SSA_PROP_VARYING;
1310 else
1311 retval = SSA_PROP_INTERESTING;
1315 return retval;
1319 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1320 if it can determine which edge will be taken. Otherwise, return
1321 SSA_PROP_VARYING. */
1323 static enum ssa_prop_result
1324 visit_cond_stmt (tree stmt, edge *taken_edge_p)
1326 prop_value_t val;
1327 basic_block block;
1329 block = bb_for_stmt (stmt);
1330 val = evaluate_stmt (stmt);
1332 /* Find which edge out of the conditional block will be taken and add it
1333 to the worklist. If no single edge can be determined statically,
1334 return SSA_PROP_VARYING to feed all the outgoing edges to the
1335 propagation engine. */
1336 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1337 if (*taken_edge_p)
1338 return SSA_PROP_INTERESTING;
1339 else
1340 return SSA_PROP_VARYING;
1344 /* Evaluate statement STMT. If the statement produces an output value and
1345 its evaluation changes the lattice value of its output, return
1346 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1347 output value.
1349 If STMT is a conditional branch and we can determine its truth
1350 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1351 value, return SSA_PROP_VARYING. */
1353 static enum ssa_prop_result
1354 ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
1356 tree def;
1357 ssa_op_iter iter;
1359 if (dump_file && (dump_flags & TDF_DETAILS))
1361 fprintf (dump_file, "\nVisiting statement:\n");
1362 print_generic_stmt (dump_file, stmt, dump_flags);
1363 fprintf (dump_file, "\n");
1366 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1368 /* If the statement is an assignment that produces a single
1369 output value, evaluate its RHS to see if the lattice value of
1370 its output has changed. */
1371 return visit_assignment (stmt, output_p);
1373 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
1375 /* If STMT is a conditional branch, see if we can determine
1376 which branch will be taken. */
1377 return visit_cond_stmt (stmt, taken_edge_p);
1380 /* Any other kind of statement is not interesting for constant
1381 propagation and, therefore, not worth simulating. */
1382 if (dump_file && (dump_flags & TDF_DETAILS))
1383 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1385 /* Definitions made by statements other than assignments to
1386 SSA_NAMEs represent unknown modifications to their outputs.
1387 Mark them VARYING. */
1388 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1390 prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
1391 set_lattice_value (def, v);
1394 return SSA_PROP_VARYING;
1398 /* Main entry point for SSA Conditional Constant Propagation. */
1400 static void
1401 execute_ssa_ccp (bool store_ccp)
1403 do_store_ccp = store_ccp;
1404 ccp_initialize ();
1405 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1406 ccp_finalize ();
1410 static unsigned int
1411 do_ssa_ccp (void)
1413 execute_ssa_ccp (false);
1414 return 0;
1418 static bool
1419 gate_ccp (void)
1421 return flag_tree_ccp != 0;
1425 struct tree_opt_pass pass_ccp =
1427 "ccp", /* name */
1428 gate_ccp, /* gate */
1429 do_ssa_ccp, /* execute */
1430 NULL, /* sub */
1431 NULL, /* next */
1432 0, /* static_pass_number */
1433 TV_TREE_CCP, /* tv_id */
1434 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1435 0, /* properties_provided */
1436 0, /* properties_destroyed */
1437 0, /* todo_flags_start */
1438 TODO_cleanup_cfg
1439 | TODO_dump_func
1440 | TODO_update_ssa
1441 | TODO_ggc_collect
1442 | TODO_verify_ssa
1443 | TODO_verify_stmts
1444 | TODO_update_smt_usage, /* todo_flags_finish */
1445 0 /* letter */
1449 static unsigned int
1450 do_ssa_store_ccp (void)
1452 /* If STORE-CCP is not enabled, we just run regular CCP. */
1453 execute_ssa_ccp (flag_tree_store_ccp != 0);
1454 return 0;
1457 static bool
1458 gate_store_ccp (void)
1460 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1461 -fno-tree-store-ccp is specified, we should run regular CCP.
1462 That's why the pass is enabled with either flag. */
1463 return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
1467 struct tree_opt_pass pass_store_ccp =
1469 "store_ccp", /* name */
1470 gate_store_ccp, /* gate */
1471 do_ssa_store_ccp, /* execute */
1472 NULL, /* sub */
1473 NULL, /* next */
1474 0, /* static_pass_number */
1475 TV_TREE_STORE_CCP, /* tv_id */
1476 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1477 0, /* properties_provided */
1478 0, /* properties_destroyed */
1479 0, /* todo_flags_start */
1480 TODO_dump_func
1481 | TODO_update_ssa
1482 | TODO_ggc_collect
1483 | TODO_verify_ssa
1484 | TODO_cleanup_cfg
1485 | TODO_verify_stmts
1486 | TODO_update_smt_usage, /* todo_flags_finish */
1487 0 /* letter */
1490 /* Given a constant value VAL for bitfield FIELD, and a destination
1491 variable VAR, return VAL appropriately widened to fit into VAR. If
1492 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1494 tree
1495 widen_bitfield (tree val, tree field, tree var)
1497 unsigned HOST_WIDE_INT var_size, field_size;
1498 tree wide_val;
1499 unsigned HOST_WIDE_INT mask;
1500 unsigned int i;
1502 /* We can only do this if the size of the type and field and VAL are
1503 all constants representable in HOST_WIDE_INT. */
1504 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1)
1505 || !host_integerp (DECL_SIZE (field), 1)
1506 || !host_integerp (val, 0))
1507 return NULL_TREE;
1509 var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1);
1510 field_size = tree_low_cst (DECL_SIZE (field), 1);
1512 /* Give up if either the bitfield or the variable are too wide. */
1513 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1514 return NULL_TREE;
1516 gcc_assert (var_size >= field_size);
1518 /* If the sign bit of the value is not set or the field's type is unsigned,
1519 just mask off the high order bits of the value. */
1520 if (DECL_UNSIGNED (field)
1521 || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1))))
1523 /* Zero extension. Build a mask with the lower 'field_size' bits
1524 set and a BIT_AND_EXPR node to clear the high order bits of
1525 the value. */
1526 for (i = 0, mask = 0; i < field_size; i++)
1527 mask |= ((HOST_WIDE_INT) 1) << i;
1529 wide_val = fold_build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
1530 build_int_cst (TREE_TYPE (var), mask));
1532 else
1534 /* Sign extension. Create a mask with the upper 'field_size'
1535 bits set and a BIT_IOR_EXPR to set the high order bits of the
1536 value. */
1537 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1538 mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
1540 wide_val = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
1541 build_int_cst (TREE_TYPE (var), mask));
1544 return wide_val;
1548 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1549 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1550 is the desired result type. */
1552 static tree
1553 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1555 tree min_idx, idx, elt_offset = integer_zero_node;
1556 tree array_type, elt_type, elt_size;
1558 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1559 measured in units of the size of elements type) from that ARRAY_REF).
1560 We can't do anything if either is variable.
1562 The case we handle here is *(&A[N]+O). */
1563 if (TREE_CODE (base) == ARRAY_REF)
1565 tree low_bound = array_ref_low_bound (base);
1567 elt_offset = TREE_OPERAND (base, 1);
1568 if (TREE_CODE (low_bound) != INTEGER_CST
1569 || TREE_CODE (elt_offset) != INTEGER_CST)
1570 return NULL_TREE;
1572 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1573 base = TREE_OPERAND (base, 0);
1576 /* Ignore stupid user tricks of indexing non-array variables. */
1577 array_type = TREE_TYPE (base);
1578 if (TREE_CODE (array_type) != ARRAY_TYPE)
1579 return NULL_TREE;
1580 elt_type = TREE_TYPE (array_type);
1581 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1582 return NULL_TREE;
1584 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1585 element type (so we can use the alignment if it's not constant).
1586 Otherwise, compute the offset as an index by using a division. If the
1587 division isn't exact, then don't do anything. */
1588 elt_size = TYPE_SIZE_UNIT (elt_type);
1589 if (integer_zerop (offset))
1591 if (TREE_CODE (elt_size) != INTEGER_CST)
1592 elt_size = size_int (TYPE_ALIGN (elt_type));
1594 idx = integer_zero_node;
1596 else
1598 unsigned HOST_WIDE_INT lquo, lrem;
1599 HOST_WIDE_INT hquo, hrem;
1601 if (TREE_CODE (elt_size) != INTEGER_CST
1602 || div_and_round_double (TRUNC_DIV_EXPR, 1,
1603 TREE_INT_CST_LOW (offset),
1604 TREE_INT_CST_HIGH (offset),
1605 TREE_INT_CST_LOW (elt_size),
1606 TREE_INT_CST_HIGH (elt_size),
1607 &lquo, &hquo, &lrem, &hrem)
1608 || lrem || hrem)
1609 return NULL_TREE;
1611 idx = build_int_cst_wide (NULL_TREE, lquo, hquo);
1614 /* Assume the low bound is zero. If there is a domain type, get the
1615 low bound, if any, convert the index into that type, and add the
1616 low bound. */
1617 min_idx = integer_zero_node;
1618 if (TYPE_DOMAIN (array_type))
1620 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
1621 min_idx = TYPE_MIN_VALUE (TYPE_DOMAIN (array_type));
1622 else
1623 min_idx = fold_convert (TYPE_DOMAIN (array_type), min_idx);
1625 if (TREE_CODE (min_idx) != INTEGER_CST)
1626 return NULL_TREE;
1628 idx = fold_convert (TYPE_DOMAIN (array_type), idx);
1629 elt_offset = fold_convert (TYPE_DOMAIN (array_type), elt_offset);
1632 if (!integer_zerop (min_idx))
1633 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1634 if (!integer_zerop (elt_offset))
1635 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1637 return build4 (ARRAY_REF, orig_type, base, idx, min_idx,
1638 size_int (tree_low_cst (elt_size, 1)
1639 / (TYPE_ALIGN_UNIT (elt_type))));
1643 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1644 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1645 is the desired result type. */
1646 /* ??? This doesn't handle class inheritance. */
1648 static tree
1649 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1650 tree orig_type, bool base_is_ptr)
1652 tree f, t, field_type, tail_array_field, field_offset;
1654 if (TREE_CODE (record_type) != RECORD_TYPE
1655 && TREE_CODE (record_type) != UNION_TYPE
1656 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1657 return NULL_TREE;
1659 /* Short-circuit silly cases. */
1660 if (lang_hooks.types_compatible_p (record_type, orig_type))
1661 return NULL_TREE;
1663 tail_array_field = NULL_TREE;
1664 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1666 int cmp;
1668 if (TREE_CODE (f) != FIELD_DECL)
1669 continue;
1670 if (DECL_BIT_FIELD (f))
1671 continue;
1673 field_offset = byte_position (f);
1674 if (TREE_CODE (field_offset) != INTEGER_CST)
1675 continue;
1677 /* ??? Java creates "interesting" fields for representing base classes.
1678 They have no name, and have no context. With no context, we get into
1679 trouble with nonoverlapping_component_refs_p. Skip them. */
1680 if (!DECL_FIELD_CONTEXT (f))
1681 continue;
1683 /* The previous array field isn't at the end. */
1684 tail_array_field = NULL_TREE;
1686 /* Check to see if this offset overlaps with the field. */
1687 cmp = tree_int_cst_compare (field_offset, offset);
1688 if (cmp > 0)
1689 continue;
1691 field_type = TREE_TYPE (f);
1693 /* Here we exactly match the offset being checked. If the types match,
1694 then we can return that field. */
1695 if (cmp == 0
1696 && lang_hooks.types_compatible_p (orig_type, field_type))
1698 if (base_is_ptr)
1699 base = build1 (INDIRECT_REF, record_type, base);
1700 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1701 return t;
1704 /* Don't care about offsets into the middle of scalars. */
1705 if (!AGGREGATE_TYPE_P (field_type))
1706 continue;
1708 /* Check for array at the end of the struct. This is often
1709 used as for flexible array members. We should be able to
1710 turn this into an array access anyway. */
1711 if (TREE_CODE (field_type) == ARRAY_TYPE)
1712 tail_array_field = f;
1714 /* Check the end of the field against the offset. */
1715 if (!DECL_SIZE_UNIT (f)
1716 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1717 continue;
1718 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1719 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1720 continue;
1722 /* If we matched, then set offset to the displacement into
1723 this field. */
1724 offset = t;
1725 goto found;
1728 if (!tail_array_field)
1729 return NULL_TREE;
1731 f = tail_array_field;
1732 field_type = TREE_TYPE (f);
1733 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1735 found:
1736 /* If we get here, we've got an aggregate field, and a possibly
1737 nonzero offset into them. Recurse and hope for a valid match. */
1738 if (base_is_ptr)
1739 base = build1 (INDIRECT_REF, record_type, base);
1740 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1742 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1743 if (t)
1744 return t;
1745 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1746 orig_type, false);
1750 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1751 Return the simplified expression, or NULL if nothing could be done. */
1753 static tree
1754 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1756 tree t;
1758 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1759 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1760 are sometimes added. */
1761 base = fold (base);
1762 STRIP_TYPE_NOPS (base);
1763 TREE_OPERAND (expr, 0) = base;
1765 /* One possibility is that the address reduces to a string constant. */
1766 t = fold_read_from_constant_string (expr);
1767 if (t)
1768 return t;
1770 /* Add in any offset from a PLUS_EXPR. */
1771 if (TREE_CODE (base) == PLUS_EXPR)
1773 tree offset2;
1775 offset2 = TREE_OPERAND (base, 1);
1776 if (TREE_CODE (offset2) != INTEGER_CST)
1777 return NULL_TREE;
1778 base = TREE_OPERAND (base, 0);
1780 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1783 if (TREE_CODE (base) == ADDR_EXPR)
1785 /* Strip the ADDR_EXPR. */
1786 base = TREE_OPERAND (base, 0);
1788 /* Fold away CONST_DECL to its value, if the type is scalar. */
1789 if (TREE_CODE (base) == CONST_DECL
1790 && ccp_decl_initial_min_invariant (DECL_INITIAL (base)))
1791 return DECL_INITIAL (base);
1793 /* Try folding *(&B+O) to B[X]. */
1794 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1795 if (t)
1796 return t;
1798 /* Try folding *(&B+O) to B.X. */
1799 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1800 TREE_TYPE (expr), false);
1801 if (t)
1802 return t;
1804 /* Fold *&B to B. We can only do this if EXPR is the same type
1805 as BASE. We can't do this if EXPR is the element type of an array
1806 and BASE is the array. */
1807 if (integer_zerop (offset)
1808 && lang_hooks.types_compatible_p (TREE_TYPE (base),
1809 TREE_TYPE (expr)))
1810 return base;
1812 else
1814 /* We can get here for out-of-range string constant accesses,
1815 such as "_"[3]. Bail out of the entire substitution search
1816 and arrange for the entire statement to be replaced by a
1817 call to __builtin_trap. In all likelihood this will all be
1818 constant-folded away, but in the meantime we can't leave with
1819 something that get_expr_operands can't understand. */
1821 t = base;
1822 STRIP_NOPS (t);
1823 if (TREE_CODE (t) == ADDR_EXPR
1824 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1826 /* FIXME: Except that this causes problems elsewhere with dead
1827 code not being deleted, and we die in the rtl expanders
1828 because we failed to remove some ssa_name. In the meantime,
1829 just return zero. */
1830 /* FIXME2: This condition should be signaled by
1831 fold_read_from_constant_string directly, rather than
1832 re-checking for it here. */
1833 return integer_zero_node;
1836 /* Try folding *(B+O) to B->X. Still an improvement. */
1837 if (POINTER_TYPE_P (TREE_TYPE (base)))
1839 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1840 base, offset,
1841 TREE_TYPE (expr), true);
1842 if (t)
1843 return t;
1847 /* Otherwise we had an offset that we could not simplify. */
1848 return NULL_TREE;
1852 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1854 A quaint feature extant in our address arithmetic is that there
1855 can be hidden type changes here. The type of the result need
1856 not be the same as the type of the input pointer.
1858 What we're after here is an expression of the form
1859 (T *)(&array + const)
1860 where the cast doesn't actually exist, but is implicit in the
1861 type of the PLUS_EXPR. We'd like to turn this into
1862 &array[x]
1863 which may be able to propagate further. */
1865 static tree
1866 maybe_fold_stmt_addition (tree expr)
1868 tree op0 = TREE_OPERAND (expr, 0);
1869 tree op1 = TREE_OPERAND (expr, 1);
1870 tree ptr_type = TREE_TYPE (expr);
1871 tree ptd_type;
1872 tree t;
1873 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1875 /* We're only interested in pointer arithmetic. */
1876 if (!POINTER_TYPE_P (ptr_type))
1877 return NULL_TREE;
1878 /* Canonicalize the integral operand to op1. */
1879 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1881 if (subtract)
1882 return NULL_TREE;
1883 t = op0, op0 = op1, op1 = t;
1885 /* It had better be a constant. */
1886 if (TREE_CODE (op1) != INTEGER_CST)
1887 return NULL_TREE;
1888 /* The first operand should be an ADDR_EXPR. */
1889 if (TREE_CODE (op0) != ADDR_EXPR)
1890 return NULL_TREE;
1891 op0 = TREE_OPERAND (op0, 0);
1893 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1894 the offset into it. */
1895 while (TREE_CODE (op0) == ARRAY_REF)
1897 tree array_obj = TREE_OPERAND (op0, 0);
1898 tree array_idx = TREE_OPERAND (op0, 1);
1899 tree elt_type = TREE_TYPE (op0);
1900 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1901 tree min_idx;
1903 if (TREE_CODE (array_idx) != INTEGER_CST)
1904 break;
1905 if (TREE_CODE (elt_size) != INTEGER_CST)
1906 break;
1908 /* Un-bias the index by the min index of the array type. */
1909 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1910 if (min_idx)
1912 min_idx = TYPE_MIN_VALUE (min_idx);
1913 if (min_idx)
1915 if (TREE_CODE (min_idx) != INTEGER_CST)
1916 break;
1918 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
1919 if (!integer_zerop (min_idx))
1920 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1921 min_idx, 0);
1925 /* Convert the index to a byte offset. */
1926 array_idx = fold_convert (sizetype, array_idx);
1927 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1929 /* Update the operands for the next round, or for folding. */
1930 /* If we're manipulating unsigned types, then folding into negative
1931 values can produce incorrect results. Particularly if the type
1932 is smaller than the width of the pointer. */
1933 if (subtract
1934 && TYPE_UNSIGNED (TREE_TYPE (op1))
1935 && tree_int_cst_lt (array_idx, op1))
1936 return NULL;
1937 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1938 array_idx, op1, 0);
1939 subtract = false;
1940 op0 = array_obj;
1943 /* If we weren't able to fold the subtraction into another array reference,
1944 canonicalize the integer for passing to the array and component ref
1945 simplification functions. */
1946 if (subtract)
1948 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1949 return NULL;
1950 op1 = fold_unary (NEGATE_EXPR, TREE_TYPE (op1), op1);
1951 /* ??? In theory fold should always produce another integer. */
1952 if (op1 == NULL || TREE_CODE (op1) != INTEGER_CST)
1953 return NULL;
1956 ptd_type = TREE_TYPE (ptr_type);
1958 /* At which point we can try some of the same things as for indirects. */
1959 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1960 if (!t)
1961 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1962 ptd_type, false);
1963 if (t)
1964 t = build1 (ADDR_EXPR, ptr_type, t);
1966 return t;
1969 /* For passing state through walk_tree into fold_stmt_r and its
1970 children. */
1972 struct fold_stmt_r_data
1974 bool *changed_p;
1975 bool *inside_addr_expr_p;
1978 /* Subroutine of fold_stmt called via walk_tree. We perform several
1979 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1981 static tree
1982 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1984 struct fold_stmt_r_data *fold_stmt_r_data = (struct fold_stmt_r_data *) data;
1985 bool *inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
1986 bool *changed_p = fold_stmt_r_data->changed_p;
1987 tree expr = *expr_p, t;
1989 /* ??? It'd be nice if walk_tree had a pre-order option. */
1990 switch (TREE_CODE (expr))
1992 case INDIRECT_REF:
1993 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1994 if (t)
1995 return t;
1996 *walk_subtrees = 0;
1998 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1999 integer_zero_node);
2000 break;
2002 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2003 We'd only want to bother decomposing an existing ARRAY_REF if
2004 the base array is found to have another offset contained within.
2005 Otherwise we'd be wasting time. */
2006 case ARRAY_REF:
2007 /* If we are not processing expressions found within an
2008 ADDR_EXPR, then we can fold constant array references. */
2009 if (!*inside_addr_expr_p)
2010 t = fold_read_from_constant_string (expr);
2011 else
2012 t = NULL;
2013 break;
2015 case ADDR_EXPR:
2016 *inside_addr_expr_p = true;
2017 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2018 *inside_addr_expr_p = false;
2019 if (t)
2020 return t;
2021 *walk_subtrees = 0;
2023 /* Set TREE_INVARIANT properly so that the value is properly
2024 considered constant, and so gets propagated as expected. */
2025 if (*changed_p)
2026 recompute_tree_invariant_for_addr_expr (expr);
2027 return NULL_TREE;
2029 case PLUS_EXPR:
2030 case MINUS_EXPR:
2031 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2032 if (t)
2033 return t;
2034 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2035 if (t)
2036 return t;
2037 *walk_subtrees = 0;
2039 t = maybe_fold_stmt_addition (expr);
2040 break;
2042 case COMPONENT_REF:
2043 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2044 if (t)
2045 return t;
2046 *walk_subtrees = 0;
2048 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2049 We've already checked that the records are compatible, so we should
2050 come up with a set of compatible fields. */
2052 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2053 tree expr_field = TREE_OPERAND (expr, 1);
2055 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2057 expr_field = find_compatible_field (expr_record, expr_field);
2058 TREE_OPERAND (expr, 1) = expr_field;
2061 break;
2063 case TARGET_MEM_REF:
2064 t = maybe_fold_tmr (expr);
2065 break;
2067 case COND_EXPR:
2068 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2070 tree op0 = TREE_OPERAND (expr, 0);
2071 tree tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2072 TREE_OPERAND (op0, 0),
2073 TREE_OPERAND (op0, 1));
2074 if (tem && set_rhs (expr_p, tem))
2076 t = *expr_p;
2077 break;
2080 return NULL_TREE;
2082 default:
2083 return NULL_TREE;
2086 if (t)
2088 *expr_p = t;
2089 *changed_p = true;
2092 return NULL_TREE;
2096 /* Return the string length, maximum string length or maximum value of
2097 ARG in LENGTH.
2098 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2099 is not NULL and, for TYPE == 0, its value is not equal to the length
2100 we determine or if we are unable to determine the length or value,
2101 return false. VISITED is a bitmap of visited variables.
2102 TYPE is 0 if string length should be returned, 1 for maximum string
2103 length and 2 for maximum value ARG can have. */
2105 static bool
2106 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2108 tree var, def_stmt, val;
2110 if (TREE_CODE (arg) != SSA_NAME)
2112 if (type == 2)
2114 val = arg;
2115 if (TREE_CODE (val) != INTEGER_CST
2116 || tree_int_cst_sgn (val) < 0)
2117 return false;
2119 else
2120 val = c_strlen (arg, 1);
2121 if (!val)
2122 return false;
2124 if (*length)
2126 if (type > 0)
2128 if (TREE_CODE (*length) != INTEGER_CST
2129 || TREE_CODE (val) != INTEGER_CST)
2130 return false;
2132 if (tree_int_cst_lt (*length, val))
2133 *length = val;
2134 return true;
2136 else if (simple_cst_equal (val, *length) != 1)
2137 return false;
2140 *length = val;
2141 return true;
2144 /* If we were already here, break the infinite cycle. */
2145 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2146 return true;
2147 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2149 var = arg;
2150 def_stmt = SSA_NAME_DEF_STMT (var);
2152 switch (TREE_CODE (def_stmt))
2154 case GIMPLE_MODIFY_STMT:
2156 tree rhs;
2158 /* The RHS of the statement defining VAR must either have a
2159 constant length or come from another SSA_NAME with a constant
2160 length. */
2161 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
2162 STRIP_NOPS (rhs);
2163 return get_maxval_strlen (rhs, length, visited, type);
2166 case PHI_NODE:
2168 /* All the arguments of the PHI node must have the same constant
2169 length. */
2170 int i;
2172 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
2174 tree arg = PHI_ARG_DEF (def_stmt, i);
2176 /* If this PHI has itself as an argument, we cannot
2177 determine the string length of this argument. However,
2178 if we can find a constant string length for the other
2179 PHI args then we can still be sure that this is a
2180 constant string length. So be optimistic and just
2181 continue with the next argument. */
2182 if (arg == PHI_RESULT (def_stmt))
2183 continue;
2185 if (!get_maxval_strlen (arg, length, visited, type))
2186 return false;
2189 return true;
2192 default:
2193 break;
2197 return false;
2201 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
2202 constant, return NULL_TREE. Otherwise, return its constant value. */
2204 static tree
2205 ccp_fold_builtin (tree stmt, tree fn)
2207 tree result, val[3];
2208 tree callee, arglist, a;
2209 int arg_mask, i, type;
2210 bitmap visited;
2211 bool ignore;
2213 ignore = TREE_CODE (stmt) != GIMPLE_MODIFY_STMT;
2215 /* First try the generic builtin folder. If that succeeds, return the
2216 result directly. */
2217 callee = get_callee_fndecl (fn);
2218 arglist = TREE_OPERAND (fn, 1);
2219 result = fold_builtin (callee, arglist, ignore);
2220 if (result)
2222 if (ignore)
2223 STRIP_NOPS (result);
2224 return result;
2227 /* Ignore MD builtins. */
2228 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2229 return NULL_TREE;
2231 /* If the builtin could not be folded, and it has no argument list,
2232 we're done. */
2233 if (!arglist)
2234 return NULL_TREE;
2236 /* Limit the work only for builtins we know how to simplify. */
2237 switch (DECL_FUNCTION_CODE (callee))
2239 case BUILT_IN_STRLEN:
2240 case BUILT_IN_FPUTS:
2241 case BUILT_IN_FPUTS_UNLOCKED:
2242 arg_mask = 1;
2243 type = 0;
2244 break;
2245 case BUILT_IN_STRCPY:
2246 case BUILT_IN_STRNCPY:
2247 arg_mask = 2;
2248 type = 0;
2249 break;
2250 case BUILT_IN_MEMCPY_CHK:
2251 case BUILT_IN_MEMPCPY_CHK:
2252 case BUILT_IN_MEMMOVE_CHK:
2253 case BUILT_IN_MEMSET_CHK:
2254 case BUILT_IN_STRNCPY_CHK:
2255 arg_mask = 4;
2256 type = 2;
2257 break;
2258 case BUILT_IN_STRCPY_CHK:
2259 case BUILT_IN_STPCPY_CHK:
2260 arg_mask = 2;
2261 type = 1;
2262 break;
2263 case BUILT_IN_SNPRINTF_CHK:
2264 case BUILT_IN_VSNPRINTF_CHK:
2265 arg_mask = 2;
2266 type = 2;
2267 break;
2268 default:
2269 return NULL_TREE;
2272 /* Try to use the dataflow information gathered by the CCP process. */
2273 visited = BITMAP_ALLOC (NULL);
2275 memset (val, 0, sizeof (val));
2276 for (i = 0, a = arglist;
2277 arg_mask;
2278 i++, arg_mask >>= 1, a = TREE_CHAIN (a))
2279 if (arg_mask & 1)
2281 bitmap_clear (visited);
2282 if (!get_maxval_strlen (TREE_VALUE (a), &val[i], visited, type))
2283 val[i] = NULL_TREE;
2286 BITMAP_FREE (visited);
2288 result = NULL_TREE;
2289 switch (DECL_FUNCTION_CODE (callee))
2291 case BUILT_IN_STRLEN:
2292 if (val[0])
2294 tree new = fold_convert (TREE_TYPE (fn), val[0]);
2296 /* If the result is not a valid gimple value, or not a cast
2297 of a valid gimple value, then we can not use the result. */
2298 if (is_gimple_val (new)
2299 || (is_gimple_cast (new)
2300 && is_gimple_val (TREE_OPERAND (new, 0))))
2301 return new;
2303 break;
2305 case BUILT_IN_STRCPY:
2306 if (val[1] && is_gimple_val (val[1]))
2307 result = fold_builtin_strcpy (callee, arglist, val[1]);
2308 break;
2310 case BUILT_IN_STRNCPY:
2311 if (val[1] && is_gimple_val (val[1]))
2312 result = fold_builtin_strncpy (callee, arglist, val[1]);
2313 break;
2315 case BUILT_IN_FPUTS:
2316 result = fold_builtin_fputs (arglist,
2317 TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 0,
2318 val[0]);
2319 break;
2321 case BUILT_IN_FPUTS_UNLOCKED:
2322 result = fold_builtin_fputs (arglist,
2323 TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 1,
2324 val[0]);
2325 break;
2327 case BUILT_IN_MEMCPY_CHK:
2328 case BUILT_IN_MEMPCPY_CHK:
2329 case BUILT_IN_MEMMOVE_CHK:
2330 case BUILT_IN_MEMSET_CHK:
2331 if (val[2] && is_gimple_val (val[2]))
2332 result = fold_builtin_memory_chk (callee, arglist, val[2], ignore,
2333 DECL_FUNCTION_CODE (callee));
2334 break;
2336 case BUILT_IN_STRCPY_CHK:
2337 case BUILT_IN_STPCPY_CHK:
2338 if (val[1] && is_gimple_val (val[1]))
2339 result = fold_builtin_stxcpy_chk (callee, arglist, val[1], ignore,
2340 DECL_FUNCTION_CODE (callee));
2341 break;
2343 case BUILT_IN_STRNCPY_CHK:
2344 if (val[2] && is_gimple_val (val[2]))
2345 result = fold_builtin_strncpy_chk (arglist, val[2]);
2346 break;
2348 case BUILT_IN_SNPRINTF_CHK:
2349 case BUILT_IN_VSNPRINTF_CHK:
2350 if (val[1] && is_gimple_val (val[1]))
2351 result = fold_builtin_snprintf_chk (arglist, val[1],
2352 DECL_FUNCTION_CODE (callee));
2353 break;
2355 default:
2356 gcc_unreachable ();
2359 if (result && ignore)
2360 result = fold_ignored_result (result);
2361 return result;
2365 /* Fold the statement pointed to by STMT_P. In some cases, this function may
2366 replace the whole statement with a new one. Returns true iff folding
2367 makes any changes. */
2369 bool
2370 fold_stmt (tree *stmt_p)
2372 tree rhs, result, stmt;
2373 struct fold_stmt_r_data fold_stmt_r_data;
2374 bool changed = false;
2375 bool inside_addr_expr = false;
2377 fold_stmt_r_data.changed_p = &changed;
2378 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2380 stmt = *stmt_p;
2382 /* If we replaced constants and the statement makes pointer dereferences,
2383 then we may need to fold instances of *&VAR into VAR, etc. */
2384 if (walk_tree (stmt_p, fold_stmt_r, &fold_stmt_r_data, NULL))
2386 *stmt_p
2387 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
2388 NULL);
2389 return true;
2392 rhs = get_rhs (stmt);
2393 if (!rhs)
2394 return changed;
2395 result = NULL_TREE;
2397 if (TREE_CODE (rhs) == CALL_EXPR)
2399 tree callee;
2401 /* Check for builtins that CCP can handle using information not
2402 available in the generic fold routines. */
2403 callee = get_callee_fndecl (rhs);
2404 if (callee && DECL_BUILT_IN (callee))
2405 result = ccp_fold_builtin (stmt, rhs);
2406 else
2408 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2409 here are when we've propagated the address of a decl into the
2410 object slot. */
2411 /* ??? Should perhaps do this in fold proper. However, doing it
2412 there requires that we create a new CALL_EXPR, and that requires
2413 copying EH region info to the new node. Easier to just do it
2414 here where we can just smash the call operand. Also
2415 CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
2416 copied, fold_ternary does not have not information. */
2417 callee = TREE_OPERAND (rhs, 0);
2418 if (TREE_CODE (callee) == OBJ_TYPE_REF
2419 && lang_hooks.fold_obj_type_ref
2420 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2421 && DECL_P (TREE_OPERAND
2422 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2424 tree t;
2426 /* ??? Caution: Broken ADDR_EXPR semantics means that
2427 looking at the type of the operand of the addr_expr
2428 can yield an array type. See silly exception in
2429 check_pointer_types_r. */
2431 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2432 t = lang_hooks.fold_obj_type_ref (callee, t);
2433 if (t)
2435 TREE_OPERAND (rhs, 0) = t;
2436 changed = true;
2442 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2443 if (result == NULL_TREE)
2444 result = fold (rhs);
2446 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2447 may have been added by fold, and "useless" type conversions that might
2448 now be apparent due to propagation. */
2449 STRIP_USELESS_TYPE_CONVERSION (result);
2451 if (result != rhs)
2452 changed |= set_rhs (stmt_p, result);
2454 return changed;
2457 /* Perform the minimal folding on statement STMT. Only operations like
2458 *&x created by constant propagation are handled. The statement cannot
2459 be replaced with a new one. */
2461 bool
2462 fold_stmt_inplace (tree stmt)
2464 tree old_stmt = stmt, rhs, new_rhs;
2465 struct fold_stmt_r_data fold_stmt_r_data;
2466 bool changed = false;
2467 bool inside_addr_expr = false;
2469 fold_stmt_r_data.changed_p = &changed;
2470 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2472 walk_tree (&stmt, fold_stmt_r, &fold_stmt_r_data, NULL);
2473 gcc_assert (stmt == old_stmt);
2475 rhs = get_rhs (stmt);
2476 if (!rhs || rhs == stmt)
2477 return changed;
2479 new_rhs = fold (rhs);
2480 STRIP_USELESS_TYPE_CONVERSION (new_rhs);
2481 if (new_rhs == rhs)
2482 return changed;
2484 changed |= set_rhs (&stmt, new_rhs);
2485 gcc_assert (stmt == old_stmt);
2487 return changed;
2490 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2491 RHS of an assignment. Insert the necessary statements before
2492 iterator *SI_P.
2493 When IGNORE is set, don't worry about the return value. */
2495 static tree
2496 convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr, bool ignore)
2498 tree_stmt_iterator ti;
2499 tree stmt = bsi_stmt (*si_p);
2500 tree tmp, stmts = NULL;
2502 push_gimplify_context ();
2503 if (ignore)
2505 tmp = build_empty_stmt ();
2506 gimplify_and_add (expr, &stmts);
2508 else
2509 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
2510 pop_gimplify_context (NULL);
2512 if (EXPR_HAS_LOCATION (stmt))
2513 annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt));
2515 /* The replacement can expose previously unreferenced variables. */
2516 for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
2518 tree new_stmt = tsi_stmt (ti);
2519 find_new_referenced_vars (tsi_stmt_ptr (ti));
2520 bsi_insert_before (si_p, new_stmt, BSI_NEW_STMT);
2521 mark_symbols_for_renaming (new_stmt);
2522 bsi_next (si_p);
2525 return tmp;
2529 /* A simple pass that attempts to fold all builtin functions. This pass
2530 is run after we've propagated as many constants as we can. */
2532 static unsigned int
2533 execute_fold_all_builtins (void)
2535 bool cfg_changed = false;
2536 basic_block bb;
2537 FOR_EACH_BB (bb)
2539 block_stmt_iterator i;
2540 for (i = bsi_start (bb); !bsi_end_p (i); )
2542 tree *stmtp = bsi_stmt_ptr (i);
2543 tree old_stmt = *stmtp;
2544 tree call = get_rhs (*stmtp);
2545 tree callee, result;
2546 enum built_in_function fcode;
2548 if (!call || TREE_CODE (call) != CALL_EXPR)
2550 bsi_next (&i);
2551 continue;
2553 callee = get_callee_fndecl (call);
2554 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2556 bsi_next (&i);
2557 continue;
2559 fcode = DECL_FUNCTION_CODE (callee);
2561 result = ccp_fold_builtin (*stmtp, call);
2562 if (!result)
2563 switch (DECL_FUNCTION_CODE (callee))
2565 case BUILT_IN_CONSTANT_P:
2566 /* Resolve __builtin_constant_p. If it hasn't been
2567 folded to integer_one_node by now, it's fairly
2568 certain that the value simply isn't constant. */
2569 result = integer_zero_node;
2570 break;
2572 default:
2573 bsi_next (&i);
2574 continue;
2577 if (dump_file && (dump_flags & TDF_DETAILS))
2579 fprintf (dump_file, "Simplified\n ");
2580 print_generic_stmt (dump_file, *stmtp, dump_flags);
2583 push_stmt_changes (stmtp);
2585 if (!set_rhs (stmtp, result))
2587 result = convert_to_gimple_builtin (&i, result,
2588 TREE_CODE (old_stmt)
2589 != GIMPLE_MODIFY_STMT);
2590 if (result)
2592 bool ok = set_rhs (stmtp, result);
2593 gcc_assert (ok);
2597 pop_stmt_changes (stmtp);
2599 if (maybe_clean_or_replace_eh_stmt (old_stmt, *stmtp)
2600 && tree_purge_dead_eh_edges (bb))
2601 cfg_changed = true;
2603 if (dump_file && (dump_flags & TDF_DETAILS))
2605 fprintf (dump_file, "to\n ");
2606 print_generic_stmt (dump_file, *stmtp, dump_flags);
2607 fprintf (dump_file, "\n");
2610 /* Retry the same statement if it changed into another
2611 builtin, there might be new opportunities now. */
2612 call = get_rhs (*stmtp);
2613 if (!call || TREE_CODE (call) != CALL_EXPR)
2615 bsi_next (&i);
2616 continue;
2618 callee = get_callee_fndecl (call);
2619 if (!callee
2620 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2621 || DECL_FUNCTION_CODE (callee) == fcode)
2622 bsi_next (&i);
2626 /* Delete unreachable blocks. */
2627 if (cfg_changed)
2628 cleanup_tree_cfg ();
2629 return 0;
2633 struct tree_opt_pass pass_fold_builtins =
2635 "fab", /* name */
2636 NULL, /* gate */
2637 execute_fold_all_builtins, /* execute */
2638 NULL, /* sub */
2639 NULL, /* next */
2640 0, /* static_pass_number */
2641 0, /* tv_id */
2642 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2643 0, /* properties_provided */
2644 0, /* properties_destroyed */
2645 0, /* todo_flags_start */
2646 TODO_dump_func
2647 | TODO_verify_ssa
2648 | TODO_update_ssa, /* todo_flags_finish */
2649 0 /* letter */