re PR middle-end/30142 ([meta-bug] invalid gimple)
[official-gcc.git] / gcc / tree-ssa-ccp.c
blobd0fcf3937a73ea9f890159baa2a376162c8932dd
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
113 For instance, consider the following code fragment:
115 struct A a;
116 const int B = 42;
118 void foo (int i)
120 if (i > 10)
121 a.a = 42;
122 else
124 a.b = 21;
125 a.a = a.b + 21;
128 if (a.a != B)
129 never_executed ();
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
139 # a_5 = VDEF <a_4>
140 a.a = 2;
142 # VUSE <a_5>
143 x_3 = a.b;
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
152 int A;
154 foo (int i)
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
160 # VUSE <A_5>
161 A.0_6 = A;
163 return A.0_6;
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
177 References:
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
188 #include "config.h"
189 #include "system.h"
190 #include "coretypes.h"
191 #include "tm.h"
192 #include "tree.h"
193 #include "flags.h"
194 #include "rtl.h"
195 #include "tm_p.h"
196 #include "ggc.h"
197 #include "basic-block.h"
198 #include "output.h"
199 #include "expr.h"
200 #include "function.h"
201 #include "diagnostic.h"
202 #include "timevar.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
209 #include "target.h"
210 #include "toplev.h"
213 /* Possible lattice values. */
214 typedef enum
216 UNINITIALIZED,
217 UNDEFINED,
218 CONSTANT,
219 VARYING
220 } ccp_lattice_t;
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
227 doing the store). */
228 static prop_value_t *const_val;
230 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
232 static void
233 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
235 switch (val.lattice_val)
237 case UNINITIALIZED:
238 fprintf (outf, "%sUNINITIALIZED", prefix);
239 break;
240 case UNDEFINED:
241 fprintf (outf, "%sUNDEFINED", prefix);
242 break;
243 case VARYING:
244 fprintf (outf, "%sVARYING", prefix);
245 break;
246 case CONSTANT:
247 fprintf (outf, "%sCONSTANT ", prefix);
248 print_generic_expr (outf, val.value, dump_flags);
249 break;
250 default:
251 gcc_unreachable ();
256 /* Print lattice value VAL to stderr. */
258 void debug_lattice_value (prop_value_t val);
260 void
261 debug_lattice_value (prop_value_t val)
263 dump_lattice_value (stderr, "", val);
264 fprintf (stderr, "\n");
269 /* If SYM is a constant variable with known value, return the value.
270 NULL_TREE is returned otherwise. */
272 tree
273 get_symbol_constant_value (tree sym)
275 if (TREE_STATIC (sym)
276 && TREE_READONLY (sym)
277 && !MTAG_P (sym))
279 tree val = DECL_INITIAL (sym);
280 if (val)
282 STRIP_USELESS_TYPE_CONVERSION (val);
283 if (is_gimple_min_invariant (val))
284 return val;
286 /* Variables declared 'const' without an initializer
287 have zero as the initializer if they may not be
288 overridden at link or run time. */
289 if (!val
290 && targetm.binds_local_p (sym)
291 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
292 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
293 return fold_convert (TREE_TYPE (sym), integer_zero_node);
296 return NULL_TREE;
299 /* Compute a default value for variable VAR and store it in the
300 CONST_VAL array. The following rules are used to get default
301 values:
303 1- Global and static variables that are declared constant are
304 considered CONSTANT.
306 2- Any other value is considered UNDEFINED. This is useful when
307 considering PHI nodes. PHI arguments that are undefined do not
308 change the constant value of the PHI node, which allows for more
309 constants to be propagated.
311 3- Variables defined by statements other than assignments and PHI
312 nodes are considered VARYING.
314 4- Initial values of variables that are not GIMPLE registers are
315 considered VARYING. */
317 static prop_value_t
318 get_default_value (tree var)
320 tree sym = SSA_NAME_VAR (var);
321 prop_value_t val = { UNINITIALIZED, NULL_TREE };
322 tree cst_val;
324 if (!is_gimple_reg (var))
326 /* Short circuit for regular CCP. We are not interested in any
327 non-register when DO_STORE_CCP is false. */
328 val.lattice_val = VARYING;
330 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
332 /* Globals and static variables declared 'const' take their
333 initial value. */
334 val.lattice_val = CONSTANT;
335 val.value = cst_val;
337 else
339 gimple stmt = SSA_NAME_DEF_STMT (var);
341 if (gimple_nop_p (stmt))
343 /* Variables defined by an empty statement are those used
344 before being initialized. If VAR is a local variable, we
345 can assume initially that it is UNDEFINED, otherwise we must
346 consider it VARYING. */
347 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
348 val.lattice_val = UNDEFINED;
349 else
350 val.lattice_val = VARYING;
352 else if (is_gimple_assign (stmt)
353 /* Value-returning GIMPLE_CALL statements assign to
354 a variable, and are treated similarly to GIMPLE_ASSIGN. */
355 || (is_gimple_call (stmt)
356 && gimple_call_lhs (stmt) != NULL_TREE)
357 || gimple_code (stmt) == GIMPLE_PHI)
359 /* Any other variable defined by an assignment or a PHI node
360 is considered UNDEFINED. */
361 val.lattice_val = UNDEFINED;
363 else
365 /* Otherwise, VAR will never take on a constant value. */
366 val.lattice_val = VARYING;
370 return val;
374 /* Get the constant value associated with variable VAR. */
376 static inline prop_value_t *
377 get_value (tree var)
379 prop_value_t *val;
381 if (const_val == NULL)
382 return NULL;
384 val = &const_val[SSA_NAME_VERSION (var)];
385 if (val->lattice_val == UNINITIALIZED)
386 *val = get_default_value (var);
388 return val;
391 /* Sets the value associated with VAR to VARYING. */
393 static inline void
394 set_value_varying (tree var)
396 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
398 val->lattice_val = VARYING;
399 val->value = NULL_TREE;
402 /* For float types, modify the value of VAL to make ccp work correctly
403 for non-standard values (-0, NaN):
405 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
406 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
407 This is to fix the following problem (see PR 29921): Suppose we have
409 x = 0.0 * y
411 and we set value of y to NaN. This causes value of x to be set to NaN.
412 When we later determine that y is in fact VARYING, fold uses the fact
413 that HONOR_NANS is false, and we try to change the value of x to 0,
414 causing an ICE. With HONOR_NANS being false, the real appearance of
415 NaN would cause undefined behavior, though, so claiming that y (and x)
416 are UNDEFINED initially is correct. */
418 static void
419 canonicalize_float_value (prop_value_t *val)
421 enum machine_mode mode;
422 tree type;
423 REAL_VALUE_TYPE d;
425 if (val->lattice_val != CONSTANT
426 || TREE_CODE (val->value) != REAL_CST)
427 return;
429 d = TREE_REAL_CST (val->value);
430 type = TREE_TYPE (val->value);
431 mode = TYPE_MODE (type);
433 if (!HONOR_SIGNED_ZEROS (mode)
434 && REAL_VALUE_MINUS_ZERO (d))
436 val->value = build_real (type, dconst0);
437 return;
440 if (!HONOR_NANS (mode)
441 && REAL_VALUE_ISNAN (d))
443 val->lattice_val = UNDEFINED;
444 val->value = NULL;
445 return;
449 /* Set the value for variable VAR to NEW_VAL. Return true if the new
450 value is different from VAR's previous value. */
452 static bool
453 set_lattice_value (tree var, prop_value_t new_val)
455 prop_value_t *old_val = get_value (var);
457 canonicalize_float_value (&new_val);
459 /* Lattice transitions must always be monotonically increasing in
460 value. If *OLD_VAL and NEW_VAL are the same, return false to
461 inform the caller that this was a non-transition. */
463 gcc_assert (old_val->lattice_val < new_val.lattice_val
464 || (old_val->lattice_val == new_val.lattice_val
465 && ((!old_val->value && !new_val.value)
466 || operand_equal_p (old_val->value, new_val.value, 0))));
468 if (old_val->lattice_val != new_val.lattice_val)
470 if (dump_file && (dump_flags & TDF_DETAILS))
472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
473 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
476 *old_val = new_val;
478 gcc_assert (new_val.lattice_val != UNDEFINED);
479 return true;
482 return false;
486 /* Return the likely CCP lattice value for STMT.
488 If STMT has no operands, then return CONSTANT.
490 Else if undefinedness of operands of STMT cause its value to be
491 undefined, then return UNDEFINED.
493 Else if any operands of STMT are constants, then return CONSTANT.
495 Else return VARYING. */
497 static ccp_lattice_t
498 likely_value (gimple stmt)
500 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
501 tree use;
502 ssa_op_iter iter;
504 enum gimple_code code = gimple_code (stmt);
506 /* This function appears to be called only for assignments, calls,
507 conditionals, and switches, due to the logic in visit_stmt. */
508 gcc_assert (code == GIMPLE_ASSIGN
509 || code == GIMPLE_CALL
510 || code == GIMPLE_COND
511 || code == GIMPLE_SWITCH);
513 /* If the statement has volatile operands, it won't fold to a
514 constant value. */
515 if (gimple_has_volatile_ops (stmt))
516 return VARYING;
518 /* If we are not doing store-ccp, statements with loads
519 and/or stores will never fold into a constant. */
520 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
521 return VARYING;
523 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
524 is_gimple_min_invariant, so we do not consider calls or
525 other forms of assignment. */
526 if (gimple_assign_single_p (stmt)
527 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
528 return CONSTANT;
530 if (code == GIMPLE_COND
531 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
532 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
533 return CONSTANT;
535 if (code == GIMPLE_SWITCH
536 && is_gimple_min_invariant (gimple_switch_index (stmt)))
537 return CONSTANT;
539 /* Arrive here for more complex cases. */
541 has_constant_operand = false;
542 has_undefined_operand = false;
543 all_undefined_operands = true;
544 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
546 prop_value_t *val = get_value (use);
548 if (val->lattice_val == UNDEFINED)
549 has_undefined_operand = true;
550 else
551 all_undefined_operands = false;
553 if (val->lattice_val == CONSTANT)
554 has_constant_operand = true;
557 /* If the operation combines operands like COMPLEX_EXPR make sure to
558 not mark the result UNDEFINED if only one part of the result is
559 undefined. */
560 if (has_undefined_operand && all_undefined_operands)
561 return UNDEFINED;
562 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
564 switch (gimple_assign_rhs_code (stmt))
566 /* Unary operators are handled with all_undefined_operands. */
567 case PLUS_EXPR:
568 case MINUS_EXPR:
569 case POINTER_PLUS_EXPR:
570 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
571 Not bitwise operators, one VARYING operand may specify the
572 result completely. Not logical operators for the same reason.
573 Not COMPLEX_EXPR as one VARYING operand makes the result partly
574 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
575 the undefined operand may be promoted. */
576 return UNDEFINED;
578 default:
582 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
583 fall back to VARYING even if there were CONSTANT operands. */
584 if (has_undefined_operand)
585 return VARYING;
587 if (has_constant_operand
588 /* We do not consider virtual operands here -- load from read-only
589 memory may have only VARYING virtual operands, but still be
590 constant. */
591 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
592 return CONSTANT;
594 return VARYING;
597 /* Returns true if STMT cannot be constant. */
599 static bool
600 surely_varying_stmt_p (gimple stmt)
602 /* If the statement has operands that we cannot handle, it cannot be
603 constant. */
604 if (gimple_has_volatile_ops (stmt))
605 return true;
607 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
608 return true;
610 /* If it is a call and does not return a value or is not a
611 builtin and not an indirect call, it is varying. */
612 if (is_gimple_call (stmt))
614 tree fndecl;
615 if (!gimple_call_lhs (stmt)
616 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
617 && !DECL_BUILT_IN (fndecl)))
618 return true;
621 /* Anything other than assignments and conditional jumps are not
622 interesting for CCP. */
623 if (gimple_code (stmt) != GIMPLE_ASSIGN
624 && gimple_code (stmt) != GIMPLE_COND
625 && gimple_code (stmt) != GIMPLE_SWITCH
626 && gimple_code (stmt) != GIMPLE_CALL)
627 return true;
629 return false;
632 /* Initialize local data structures for CCP. */
634 static void
635 ccp_initialize (void)
637 basic_block bb;
639 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
641 /* Initialize simulation flags for PHI nodes and statements. */
642 FOR_EACH_BB (bb)
644 gimple_stmt_iterator i;
646 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
648 gimple stmt = gsi_stmt (i);
649 bool is_varying = surely_varying_stmt_p (stmt);
651 if (is_varying)
653 tree def;
654 ssa_op_iter iter;
656 /* If the statement will not produce a constant, mark
657 all its outputs VARYING. */
658 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
660 if (is_varying)
661 set_value_varying (def);
664 prop_set_simulate_again (stmt, !is_varying);
668 /* Now process PHI nodes. We never clear the simulate_again flag on
669 phi nodes, since we do not know which edges are executable yet,
670 except for phi nodes for virtual operands when we do not do store ccp. */
671 FOR_EACH_BB (bb)
673 gimple_stmt_iterator i;
675 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
677 gimple phi = gsi_stmt (i);
679 if (!is_gimple_reg (gimple_phi_result (phi)))
680 prop_set_simulate_again (phi, false);
681 else
682 prop_set_simulate_again (phi, true);
688 /* Do final substitution of propagated values, cleanup the flowgraph and
689 free allocated storage.
691 Return TRUE when something was optimized. */
693 static bool
694 ccp_finalize (void)
696 /* Perform substitutions based on the known constant values. */
697 bool something_changed = substitute_and_fold (const_val, false);
699 free (const_val);
700 const_val = NULL;
701 return something_changed;;
705 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
706 in VAL1.
708 any M UNDEFINED = any
709 any M VARYING = VARYING
710 Ci M Cj = Ci if (i == j)
711 Ci M Cj = VARYING if (i != j)
714 static void
715 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
717 if (val1->lattice_val == UNDEFINED)
719 /* UNDEFINED M any = any */
720 *val1 = *val2;
722 else if (val2->lattice_val == UNDEFINED)
724 /* any M UNDEFINED = any
725 Nothing to do. VAL1 already contains the value we want. */
728 else if (val1->lattice_val == VARYING
729 || val2->lattice_val == VARYING)
731 /* any M VARYING = VARYING. */
732 val1->lattice_val = VARYING;
733 val1->value = NULL_TREE;
735 else if (val1->lattice_val == CONSTANT
736 && val2->lattice_val == CONSTANT
737 && simple_cst_equal (val1->value, val2->value) == 1)
739 /* Ci M Cj = Ci if (i == j)
740 Ci M Cj = VARYING if (i != j)
742 If these two values come from memory stores, make sure that
743 they come from the same memory reference. */
744 val1->lattice_val = CONSTANT;
745 val1->value = val1->value;
747 else
749 /* Any other combination is VARYING. */
750 val1->lattice_val = VARYING;
751 val1->value = NULL_TREE;
756 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
757 lattice values to determine PHI_NODE's lattice value. The value of a
758 PHI node is determined calling ccp_lattice_meet with all the arguments
759 of the PHI node that are incoming via executable edges. */
761 static enum ssa_prop_result
762 ccp_visit_phi_node (gimple phi)
764 unsigned i;
765 prop_value_t *old_val, new_val;
767 if (dump_file && (dump_flags & TDF_DETAILS))
769 fprintf (dump_file, "\nVisiting PHI node: ");
770 print_gimple_stmt (dump_file, phi, 0, dump_flags);
773 old_val = get_value (gimple_phi_result (phi));
774 switch (old_val->lattice_val)
776 case VARYING:
777 return SSA_PROP_VARYING;
779 case CONSTANT:
780 new_val = *old_val;
781 break;
783 case UNDEFINED:
784 new_val.lattice_val = UNDEFINED;
785 new_val.value = NULL_TREE;
786 break;
788 default:
789 gcc_unreachable ();
792 for (i = 0; i < gimple_phi_num_args (phi); i++)
794 /* Compute the meet operator over all the PHI arguments flowing
795 through executable edges. */
796 edge e = gimple_phi_arg_edge (phi, i);
798 if (dump_file && (dump_flags & TDF_DETAILS))
800 fprintf (dump_file,
801 "\n Argument #%d (%d -> %d %sexecutable)\n",
802 i, e->src->index, e->dest->index,
803 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
806 /* If the incoming edge is executable, Compute the meet operator for
807 the existing value of the PHI node and the current PHI argument. */
808 if (e->flags & EDGE_EXECUTABLE)
810 tree arg = gimple_phi_arg (phi, i)->def;
811 prop_value_t arg_val;
813 if (is_gimple_min_invariant (arg))
815 arg_val.lattice_val = CONSTANT;
816 arg_val.value = arg;
818 else
819 arg_val = *(get_value (arg));
821 ccp_lattice_meet (&new_val, &arg_val);
823 if (dump_file && (dump_flags & TDF_DETAILS))
825 fprintf (dump_file, "\t");
826 print_generic_expr (dump_file, arg, dump_flags);
827 dump_lattice_value (dump_file, "\tValue: ", arg_val);
828 fprintf (dump_file, "\n");
831 if (new_val.lattice_val == VARYING)
832 break;
836 if (dump_file && (dump_flags & TDF_DETAILS))
838 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
839 fprintf (dump_file, "\n\n");
842 /* Make the transition to the new value. */
843 if (set_lattice_value (gimple_phi_result (phi), new_val))
845 if (new_val.lattice_val == VARYING)
846 return SSA_PROP_VARYING;
847 else
848 return SSA_PROP_INTERESTING;
850 else
851 return SSA_PROP_NOT_INTERESTING;
854 /* Return true if we may propagate the address expression ADDR into the
855 dereference DEREF and cancel them. */
857 bool
858 may_propagate_address_into_dereference (tree addr, tree deref)
860 gcc_assert (INDIRECT_REF_P (deref)
861 && TREE_CODE (addr) == ADDR_EXPR);
863 /* Don't propagate if ADDR's operand has incomplete type. */
864 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
865 return false;
867 /* If the address is invariant then we do not need to preserve restrict
868 qualifications. But we do need to preserve volatile qualifiers until
869 we can annotate the folded dereference itself properly. */
870 if (is_gimple_min_invariant (addr)
871 && (!TREE_THIS_VOLATILE (deref)
872 || TYPE_VOLATILE (TREE_TYPE (addr))))
873 return useless_type_conversion_p (TREE_TYPE (deref),
874 TREE_TYPE (TREE_OPERAND (addr, 0)));
876 /* Else both the address substitution and the folding must result in
877 a valid useless type conversion sequence. */
878 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
879 TREE_TYPE (addr))
880 && useless_type_conversion_p (TREE_TYPE (deref),
881 TREE_TYPE (TREE_OPERAND (addr, 0))));
884 /* CCP specific front-end to the non-destructive constant folding
885 routines.
887 Attempt to simplify the RHS of STMT knowing that one or more
888 operands are constants.
890 If simplification is possible, return the simplified RHS,
891 otherwise return the original RHS or NULL_TREE. */
893 static tree
894 ccp_fold (gimple stmt)
896 switch (gimple_code (stmt))
898 case GIMPLE_ASSIGN:
900 enum tree_code subcode = gimple_assign_rhs_code (stmt);
902 switch (get_gimple_rhs_class (subcode))
904 case GIMPLE_SINGLE_RHS:
906 tree rhs = gimple_assign_rhs1 (stmt);
907 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
909 if (TREE_CODE (rhs) == SSA_NAME)
911 /* If the RHS is an SSA_NAME, return its known constant value,
912 if any. */
913 return get_value (rhs)->value;
915 /* Handle propagating invariant addresses into address operations.
916 The folding we do here matches that in tree-ssa-forwprop.c. */
917 else if (TREE_CODE (rhs) == ADDR_EXPR)
919 tree *base;
920 base = &TREE_OPERAND (rhs, 0);
921 while (handled_component_p (*base))
922 base = &TREE_OPERAND (*base, 0);
923 if (TREE_CODE (*base) == INDIRECT_REF
924 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
926 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
927 if (val->lattice_val == CONSTANT
928 && TREE_CODE (val->value) == ADDR_EXPR
929 && may_propagate_address_into_dereference
930 (val->value, *base))
932 /* We need to return a new tree, not modify the IL
933 or share parts of it. So play some tricks to
934 avoid manually building it. */
935 tree ret, save = *base;
936 *base = TREE_OPERAND (val->value, 0);
937 ret = unshare_expr (rhs);
938 recompute_tree_invariant_for_addr_expr (ret);
939 *base = save;
940 return ret;
945 if (kind == tcc_reference)
947 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
948 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
950 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
951 if (val->lattice_val == CONSTANT)
952 return fold_unary (VIEW_CONVERT_EXPR,
953 TREE_TYPE (rhs), val->value);
955 return fold_const_aggregate_ref (rhs);
957 else if (kind == tcc_declaration)
958 return get_symbol_constant_value (rhs);
959 return rhs;
962 case GIMPLE_UNARY_RHS:
964 /* Handle unary operators that can appear in GIMPLE form.
965 Note that we know the single operand must be a constant,
966 so this should almost always return a simplified RHS. */
967 tree lhs = gimple_assign_lhs (stmt);
968 tree op0 = gimple_assign_rhs1 (stmt);
969 tree res;
971 /* Simplify the operand down to a constant. */
972 if (TREE_CODE (op0) == SSA_NAME)
974 prop_value_t *val = get_value (op0);
975 if (val->lattice_val == CONSTANT)
976 op0 = get_value (op0)->value;
979 /* Conversions are useless for CCP purposes if they are
980 value-preserving. Thus the restrictions that
981 useless_type_conversion_p places for pointer type conversions
982 do not apply here. Substitution later will only substitute to
983 allowed places. */
984 if (CONVERT_EXPR_CODE_P (subcode)
985 && POINTER_TYPE_P (TREE_TYPE (lhs))
986 && POINTER_TYPE_P (TREE_TYPE (op0))
987 /* Do not allow differences in volatile qualification
988 as this might get us confused as to whether a
989 propagation destination statement is volatile
990 or not. See PR36988. */
991 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
992 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
994 tree tem;
995 /* Still try to generate a constant of correct type. */
996 if (!useless_type_conversion_p (TREE_TYPE (lhs),
997 TREE_TYPE (op0))
998 && ((tem = maybe_fold_offset_to_address
999 (op0, integer_zero_node, TREE_TYPE (lhs)))
1000 != NULL_TREE))
1001 return tem;
1002 return op0;
1005 res = fold_unary (subcode, gimple_expr_type (stmt), op0);
1007 /* If the operation was a conversion do _not_ mark a
1008 resulting constant with TREE_OVERFLOW if the original
1009 constant was not. These conversions have implementation
1010 defined behavior and retaining the TREE_OVERFLOW flag
1011 here would confuse later passes such as VRP. */
1012 if (res
1013 && TREE_CODE (res) == INTEGER_CST
1014 && TREE_CODE (op0) == INTEGER_CST
1015 && CONVERT_EXPR_CODE_P (subcode))
1016 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
1018 return res;
1021 case GIMPLE_BINARY_RHS:
1023 /* Handle binary operators that can appear in GIMPLE form. */
1024 tree op0 = gimple_assign_rhs1 (stmt);
1025 tree op1 = gimple_assign_rhs2 (stmt);
1027 /* Simplify the operands down to constants when appropriate. */
1028 if (TREE_CODE (op0) == SSA_NAME)
1030 prop_value_t *val = get_value (op0);
1031 if (val->lattice_val == CONSTANT)
1032 op0 = val->value;
1035 if (TREE_CODE (op1) == SSA_NAME)
1037 prop_value_t *val = get_value (op1);
1038 if (val->lattice_val == CONSTANT)
1039 op1 = val->value;
1042 /* Fold &foo + CST into an invariant reference if possible. */
1043 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1044 && TREE_CODE (op0) == ADDR_EXPR
1045 && TREE_CODE (op1) == INTEGER_CST)
1047 tree lhs = gimple_assign_lhs (stmt);
1048 tree tem = maybe_fold_offset_to_address (op0, op1,
1049 TREE_TYPE (lhs));
1050 if (tem != NULL_TREE)
1051 return tem;
1054 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1057 default:
1058 gcc_unreachable ();
1061 break;
1063 case GIMPLE_CALL:
1065 tree fn = gimple_call_fn (stmt);
1066 prop_value_t *val;
1068 if (TREE_CODE (fn) == SSA_NAME)
1070 val = get_value (fn);
1071 if (val->lattice_val == CONSTANT)
1072 fn = val->value;
1074 if (TREE_CODE (fn) == ADDR_EXPR
1075 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1076 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1078 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1079 tree call, retval;
1080 unsigned i;
1081 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1083 args[i] = gimple_call_arg (stmt, i);
1084 if (TREE_CODE (args[i]) == SSA_NAME)
1086 val = get_value (args[i]);
1087 if (val->lattice_val == CONSTANT)
1088 args[i] = val->value;
1091 call = build_call_array (gimple_call_return_type (stmt),
1092 fn, gimple_call_num_args (stmt), args);
1093 retval = fold_call_expr (call, false);
1094 if (retval)
1095 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1096 STRIP_NOPS (retval);
1097 return retval;
1099 return NULL_TREE;
1102 case GIMPLE_COND:
1104 /* Handle comparison operators that can appear in GIMPLE form. */
1105 tree op0 = gimple_cond_lhs (stmt);
1106 tree op1 = gimple_cond_rhs (stmt);
1107 enum tree_code code = gimple_cond_code (stmt);
1109 /* Simplify the operands down to constants when appropriate. */
1110 if (TREE_CODE (op0) == SSA_NAME)
1112 prop_value_t *val = get_value (op0);
1113 if (val->lattice_val == CONSTANT)
1114 op0 = val->value;
1117 if (TREE_CODE (op1) == SSA_NAME)
1119 prop_value_t *val = get_value (op1);
1120 if (val->lattice_val == CONSTANT)
1121 op1 = val->value;
1124 return fold_binary (code, boolean_type_node, op0, op1);
1127 case GIMPLE_SWITCH:
1129 tree rhs = gimple_switch_index (stmt);
1131 if (TREE_CODE (rhs) == SSA_NAME)
1133 /* If the RHS is an SSA_NAME, return its known constant value,
1134 if any. */
1135 return get_value (rhs)->value;
1138 return rhs;
1141 default:
1142 gcc_unreachable ();
1147 /* Return the tree representing the element referenced by T if T is an
1148 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1149 NULL_TREE otherwise. */
1151 tree
1152 fold_const_aggregate_ref (tree t)
1154 prop_value_t *value;
1155 tree base, ctor, idx, field;
1156 unsigned HOST_WIDE_INT cnt;
1157 tree cfield, cval;
1159 switch (TREE_CODE (t))
1161 case ARRAY_REF:
1162 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1163 DECL_INITIAL. If BASE is a nested reference into another
1164 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1165 the inner reference. */
1166 base = TREE_OPERAND (t, 0);
1167 switch (TREE_CODE (base))
1169 case VAR_DECL:
1170 if (!TREE_READONLY (base)
1171 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1172 || !targetm.binds_local_p (base))
1173 return NULL_TREE;
1175 ctor = DECL_INITIAL (base);
1176 break;
1178 case ARRAY_REF:
1179 case COMPONENT_REF:
1180 ctor = fold_const_aggregate_ref (base);
1181 break;
1183 case STRING_CST:
1184 case CONSTRUCTOR:
1185 ctor = base;
1186 break;
1188 default:
1189 return NULL_TREE;
1192 if (ctor == NULL_TREE
1193 || (TREE_CODE (ctor) != CONSTRUCTOR
1194 && TREE_CODE (ctor) != STRING_CST)
1195 || !TREE_STATIC (ctor))
1196 return NULL_TREE;
1198 /* Get the index. If we have an SSA_NAME, try to resolve it
1199 with the current lattice value for the SSA_NAME. */
1200 idx = TREE_OPERAND (t, 1);
1201 switch (TREE_CODE (idx))
1203 case SSA_NAME:
1204 if ((value = get_value (idx))
1205 && value->lattice_val == CONSTANT
1206 && TREE_CODE (value->value) == INTEGER_CST)
1207 idx = value->value;
1208 else
1209 return NULL_TREE;
1210 break;
1212 case INTEGER_CST:
1213 break;
1215 default:
1216 return NULL_TREE;
1219 /* Fold read from constant string. */
1220 if (TREE_CODE (ctor) == STRING_CST)
1222 if ((TYPE_MODE (TREE_TYPE (t))
1223 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1224 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1225 == MODE_INT)
1226 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1227 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1228 return build_int_cst_type (TREE_TYPE (t),
1229 (TREE_STRING_POINTER (ctor)
1230 [TREE_INT_CST_LOW (idx)]));
1231 return NULL_TREE;
1234 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1235 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1236 if (tree_int_cst_equal (cfield, idx))
1238 STRIP_USELESS_TYPE_CONVERSION (cval);
1239 return cval;
1241 break;
1243 case COMPONENT_REF:
1244 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1245 DECL_INITIAL. If BASE is a nested reference into another
1246 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1247 the inner reference. */
1248 base = TREE_OPERAND (t, 0);
1249 switch (TREE_CODE (base))
1251 case VAR_DECL:
1252 if (!TREE_READONLY (base)
1253 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1254 || !targetm.binds_local_p (base))
1255 return NULL_TREE;
1257 ctor = DECL_INITIAL (base);
1258 break;
1260 case ARRAY_REF:
1261 case COMPONENT_REF:
1262 ctor = fold_const_aggregate_ref (base);
1263 break;
1265 default:
1266 return NULL_TREE;
1269 if (ctor == NULL_TREE
1270 || TREE_CODE (ctor) != CONSTRUCTOR
1271 || !TREE_STATIC (ctor))
1272 return NULL_TREE;
1274 field = TREE_OPERAND (t, 1);
1276 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1277 if (cfield == field
1278 /* FIXME: Handle bit-fields. */
1279 && ! DECL_BIT_FIELD (cfield))
1281 STRIP_USELESS_TYPE_CONVERSION (cval);
1282 return cval;
1284 break;
1286 case REALPART_EXPR:
1287 case IMAGPART_EXPR:
1289 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1290 if (c && TREE_CODE (c) == COMPLEX_CST)
1291 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1292 break;
1295 case INDIRECT_REF:
1297 tree base = TREE_OPERAND (t, 0);
1298 if (TREE_CODE (base) == SSA_NAME
1299 && (value = get_value (base))
1300 && value->lattice_val == CONSTANT
1301 && TREE_CODE (value->value) == ADDR_EXPR)
1302 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1303 break;
1306 default:
1307 break;
1310 return NULL_TREE;
1313 /* Evaluate statement STMT.
1314 Valid only for assignments, calls, conditionals, and switches. */
1316 static prop_value_t
1317 evaluate_stmt (gimple stmt)
1319 prop_value_t val;
1320 tree simplified = NULL_TREE;
1321 ccp_lattice_t likelyvalue = likely_value (stmt);
1322 bool is_constant;
1324 fold_defer_overflow_warnings ();
1326 /* If the statement is likely to have a CONSTANT result, then try
1327 to fold the statement to determine the constant value. */
1328 /* FIXME. This is the only place that we call ccp_fold.
1329 Since likely_value never returns CONSTANT for calls, we will
1330 not attempt to fold them, including builtins that may profit. */
1331 if (likelyvalue == CONSTANT)
1332 simplified = ccp_fold (stmt);
1333 /* If the statement is likely to have a VARYING result, then do not
1334 bother folding the statement. */
1335 else if (likelyvalue == VARYING)
1337 enum gimple_code code = gimple_code (stmt);
1338 if (code == GIMPLE_ASSIGN)
1340 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1342 /* Other cases cannot satisfy is_gimple_min_invariant
1343 without folding. */
1344 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1345 simplified = gimple_assign_rhs1 (stmt);
1347 else if (code == GIMPLE_SWITCH)
1348 simplified = gimple_switch_index (stmt);
1349 else
1350 /* These cannot satisfy is_gimple_min_invariant without folding. */
1351 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1354 is_constant = simplified && is_gimple_min_invariant (simplified);
1356 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1358 if (dump_file && (dump_flags & TDF_DETAILS))
1360 fprintf (dump_file, "which is likely ");
1361 switch (likelyvalue)
1363 case CONSTANT:
1364 fprintf (dump_file, "CONSTANT");
1365 break;
1366 case UNDEFINED:
1367 fprintf (dump_file, "UNDEFINED");
1368 break;
1369 case VARYING:
1370 fprintf (dump_file, "VARYING");
1371 break;
1372 default:;
1374 fprintf (dump_file, "\n");
1377 if (is_constant)
1379 /* The statement produced a constant value. */
1380 val.lattice_val = CONSTANT;
1381 val.value = simplified;
1383 else
1385 /* The statement produced a nonconstant value. If the statement
1386 had UNDEFINED operands, then the result of the statement
1387 should be UNDEFINED. Otherwise, the statement is VARYING. */
1388 if (likelyvalue == UNDEFINED)
1389 val.lattice_val = likelyvalue;
1390 else
1391 val.lattice_val = VARYING;
1393 val.value = NULL_TREE;
1396 return val;
1399 /* Visit the assignment statement STMT. Set the value of its LHS to the
1400 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1401 creates virtual definitions, set the value of each new name to that
1402 of the RHS (if we can derive a constant out of the RHS).
1403 Value-returning call statements also perform an assignment, and
1404 are handled here. */
1406 static enum ssa_prop_result
1407 visit_assignment (gimple stmt, tree *output_p)
1409 prop_value_t val;
1410 enum ssa_prop_result retval;
1412 tree lhs = gimple_get_lhs (stmt);
1414 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1415 || gimple_call_lhs (stmt) != NULL_TREE);
1417 if (gimple_assign_copy_p (stmt))
1419 tree rhs = gimple_assign_rhs1 (stmt);
1421 if (TREE_CODE (rhs) == SSA_NAME)
1423 /* For a simple copy operation, we copy the lattice values. */
1424 prop_value_t *nval = get_value (rhs);
1425 val = *nval;
1427 else
1428 val = evaluate_stmt (stmt);
1430 else
1431 /* Evaluate the statement, which could be
1432 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1433 val = evaluate_stmt (stmt);
1435 retval = SSA_PROP_NOT_INTERESTING;
1437 /* Set the lattice value of the statement's output. */
1438 if (TREE_CODE (lhs) == SSA_NAME)
1440 /* If STMT is an assignment to an SSA_NAME, we only have one
1441 value to set. */
1442 if (set_lattice_value (lhs, val))
1444 *output_p = lhs;
1445 if (val.lattice_val == VARYING)
1446 retval = SSA_PROP_VARYING;
1447 else
1448 retval = SSA_PROP_INTERESTING;
1452 return retval;
1456 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1457 if it can determine which edge will be taken. Otherwise, return
1458 SSA_PROP_VARYING. */
1460 static enum ssa_prop_result
1461 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1463 prop_value_t val;
1464 basic_block block;
1466 block = gimple_bb (stmt);
1467 val = evaluate_stmt (stmt);
1469 /* Find which edge out of the conditional block will be taken and add it
1470 to the worklist. If no single edge can be determined statically,
1471 return SSA_PROP_VARYING to feed all the outgoing edges to the
1472 propagation engine. */
1473 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1474 if (*taken_edge_p)
1475 return SSA_PROP_INTERESTING;
1476 else
1477 return SSA_PROP_VARYING;
1481 /* Evaluate statement STMT. If the statement produces an output value and
1482 its evaluation changes the lattice value of its output, return
1483 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1484 output value.
1486 If STMT is a conditional branch and we can determine its truth
1487 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1488 value, return SSA_PROP_VARYING. */
1490 static enum ssa_prop_result
1491 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1493 tree def;
1494 ssa_op_iter iter;
1496 if (dump_file && (dump_flags & TDF_DETAILS))
1498 fprintf (dump_file, "\nVisiting statement:\n");
1499 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1502 switch (gimple_code (stmt))
1504 case GIMPLE_ASSIGN:
1505 /* If the statement is an assignment that produces a single
1506 output value, evaluate its RHS to see if the lattice value of
1507 its output has changed. */
1508 return visit_assignment (stmt, output_p);
1510 case GIMPLE_CALL:
1511 /* A value-returning call also performs an assignment. */
1512 if (gimple_call_lhs (stmt) != NULL_TREE)
1513 return visit_assignment (stmt, output_p);
1514 break;
1516 case GIMPLE_COND:
1517 case GIMPLE_SWITCH:
1518 /* If STMT is a conditional branch, see if we can determine
1519 which branch will be taken. */
1520 /* FIXME. It appears that we should be able to optimize
1521 computed GOTOs here as well. */
1522 return visit_cond_stmt (stmt, taken_edge_p);
1524 default:
1525 break;
1528 /* Any other kind of statement is not interesting for constant
1529 propagation and, therefore, not worth simulating. */
1530 if (dump_file && (dump_flags & TDF_DETAILS))
1531 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1533 /* Definitions made by statements other than assignments to
1534 SSA_NAMEs represent unknown modifications to their outputs.
1535 Mark them VARYING. */
1536 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1538 prop_value_t v = { VARYING, NULL_TREE };
1539 set_lattice_value (def, v);
1542 return SSA_PROP_VARYING;
1546 /* Main entry point for SSA Conditional Constant Propagation. */
1548 static unsigned int
1549 do_ssa_ccp (void)
1551 ccp_initialize ();
1552 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1553 if (ccp_finalize ())
1554 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1555 else
1556 return 0;
1560 static bool
1561 gate_ccp (void)
1563 return flag_tree_ccp != 0;
1567 struct gimple_opt_pass pass_ccp =
1570 GIMPLE_PASS,
1571 "ccp", /* name */
1572 gate_ccp, /* gate */
1573 do_ssa_ccp, /* execute */
1574 NULL, /* sub */
1575 NULL, /* next */
1576 0, /* static_pass_number */
1577 TV_TREE_CCP, /* tv_id */
1578 PROP_cfg | PROP_ssa, /* properties_required */
1579 0, /* properties_provided */
1580 0, /* properties_destroyed */
1581 0, /* todo_flags_start */
1582 TODO_dump_func | TODO_verify_ssa
1583 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1588 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1589 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1590 is the desired result type. */
1592 static tree
1593 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1594 bool allow_negative_idx)
1596 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1597 tree array_type, elt_type, elt_size;
1598 tree domain_type;
1600 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1601 measured in units of the size of elements type) from that ARRAY_REF).
1602 We can't do anything if either is variable.
1604 The case we handle here is *(&A[N]+O). */
1605 if (TREE_CODE (base) == ARRAY_REF)
1607 tree low_bound = array_ref_low_bound (base);
1609 elt_offset = TREE_OPERAND (base, 1);
1610 if (TREE_CODE (low_bound) != INTEGER_CST
1611 || TREE_CODE (elt_offset) != INTEGER_CST)
1612 return NULL_TREE;
1614 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1615 base = TREE_OPERAND (base, 0);
1618 /* Ignore stupid user tricks of indexing non-array variables. */
1619 array_type = TREE_TYPE (base);
1620 if (TREE_CODE (array_type) != ARRAY_TYPE)
1621 return NULL_TREE;
1622 elt_type = TREE_TYPE (array_type);
1623 if (!useless_type_conversion_p (orig_type, elt_type))
1624 return NULL_TREE;
1626 /* Use signed size type for intermediate computation on the index. */
1627 idx_type = signed_type_for (size_type_node);
1629 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1630 element type (so we can use the alignment if it's not constant).
1631 Otherwise, compute the offset as an index by using a division. If the
1632 division isn't exact, then don't do anything. */
1633 elt_size = TYPE_SIZE_UNIT (elt_type);
1634 if (!elt_size)
1635 return NULL;
1636 if (integer_zerop (offset))
1638 if (TREE_CODE (elt_size) != INTEGER_CST)
1639 elt_size = size_int (TYPE_ALIGN (elt_type));
1641 idx = build_int_cst (idx_type, 0);
1643 else
1645 unsigned HOST_WIDE_INT lquo, lrem;
1646 HOST_WIDE_INT hquo, hrem;
1647 double_int soffset;
1649 /* The final array offset should be signed, so we need
1650 to sign-extend the (possibly pointer) offset here
1651 and use signed division. */
1652 soffset = double_int_sext (tree_to_double_int (offset),
1653 TYPE_PRECISION (TREE_TYPE (offset)));
1654 if (TREE_CODE (elt_size) != INTEGER_CST
1655 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1656 soffset.low, soffset.high,
1657 TREE_INT_CST_LOW (elt_size),
1658 TREE_INT_CST_HIGH (elt_size),
1659 &lquo, &hquo, &lrem, &hrem)
1660 || lrem || hrem)
1661 return NULL_TREE;
1663 idx = build_int_cst_wide (idx_type, lquo, hquo);
1666 /* Assume the low bound is zero. If there is a domain type, get the
1667 low bound, if any, convert the index into that type, and add the
1668 low bound. */
1669 min_idx = build_int_cst (idx_type, 0);
1670 domain_type = TYPE_DOMAIN (array_type);
1671 if (domain_type)
1673 idx_type = domain_type;
1674 if (TYPE_MIN_VALUE (idx_type))
1675 min_idx = TYPE_MIN_VALUE (idx_type);
1676 else
1677 min_idx = fold_convert (idx_type, min_idx);
1679 if (TREE_CODE (min_idx) != INTEGER_CST)
1680 return NULL_TREE;
1682 elt_offset = fold_convert (idx_type, elt_offset);
1685 if (!integer_zerop (min_idx))
1686 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1687 if (!integer_zerop (elt_offset))
1688 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1690 /* Make sure to possibly truncate late after offsetting. */
1691 idx = fold_convert (idx_type, idx);
1693 /* We don't want to construct access past array bounds. For example
1694 char *(c[4]);
1695 c[3][2];
1696 should not be simplified into (*c)[14] or tree-vrp will
1697 give false warnings. The same is true for
1698 struct A { long x; char d[0]; } *a;
1699 (char *)a - 4;
1700 which should be not folded to &a->d[-8]. */
1701 if (domain_type
1702 && TYPE_MAX_VALUE (domain_type)
1703 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1705 tree up_bound = TYPE_MAX_VALUE (domain_type);
1707 if (tree_int_cst_lt (up_bound, idx)
1708 /* Accesses after the end of arrays of size 0 (gcc
1709 extension) and 1 are likely intentional ("struct
1710 hack"). */
1711 && compare_tree_int (up_bound, 1) > 0)
1712 return NULL_TREE;
1714 if (domain_type
1715 && TYPE_MIN_VALUE (domain_type))
1717 if (!allow_negative_idx
1718 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1719 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1720 return NULL_TREE;
1722 else if (!allow_negative_idx
1723 && compare_tree_int (idx, 0) < 0)
1724 return NULL_TREE;
1726 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1730 /* Attempt to fold *(S+O) to S.X.
1731 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1732 is the desired result type. */
1734 static tree
1735 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1736 tree orig_type, bool base_is_ptr)
1738 tree f, t, field_type, tail_array_field, field_offset;
1739 tree ret;
1740 tree new_base;
1742 if (TREE_CODE (record_type) != RECORD_TYPE
1743 && TREE_CODE (record_type) != UNION_TYPE
1744 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1745 return NULL_TREE;
1747 /* Short-circuit silly cases. */
1748 if (useless_type_conversion_p (record_type, orig_type))
1749 return NULL_TREE;
1751 tail_array_field = NULL_TREE;
1752 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1754 int cmp;
1756 if (TREE_CODE (f) != FIELD_DECL)
1757 continue;
1758 if (DECL_BIT_FIELD (f))
1759 continue;
1761 if (!DECL_FIELD_OFFSET (f))
1762 continue;
1763 field_offset = byte_position (f);
1764 if (TREE_CODE (field_offset) != INTEGER_CST)
1765 continue;
1767 /* ??? Java creates "interesting" fields for representing base classes.
1768 They have no name, and have no context. With no context, we get into
1769 trouble with nonoverlapping_component_refs_p. Skip them. */
1770 if (!DECL_FIELD_CONTEXT (f))
1771 continue;
1773 /* The previous array field isn't at the end. */
1774 tail_array_field = NULL_TREE;
1776 /* Check to see if this offset overlaps with the field. */
1777 cmp = tree_int_cst_compare (field_offset, offset);
1778 if (cmp > 0)
1779 continue;
1781 field_type = TREE_TYPE (f);
1783 /* Here we exactly match the offset being checked. If the types match,
1784 then we can return that field. */
1785 if (cmp == 0
1786 && useless_type_conversion_p (orig_type, field_type))
1788 if (base_is_ptr)
1789 base = build1 (INDIRECT_REF, record_type, base);
1790 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1791 return t;
1794 /* Don't care about offsets into the middle of scalars. */
1795 if (!AGGREGATE_TYPE_P (field_type))
1796 continue;
1798 /* Check for array at the end of the struct. This is often
1799 used as for flexible array members. We should be able to
1800 turn this into an array access anyway. */
1801 if (TREE_CODE (field_type) == ARRAY_TYPE)
1802 tail_array_field = f;
1804 /* Check the end of the field against the offset. */
1805 if (!DECL_SIZE_UNIT (f)
1806 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1807 continue;
1808 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1809 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1810 continue;
1812 /* If we matched, then set offset to the displacement into
1813 this field. */
1814 if (base_is_ptr)
1815 new_base = build1 (INDIRECT_REF, record_type, base);
1816 else
1817 new_base = base;
1818 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1820 /* Recurse to possibly find the match. */
1821 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1822 f == TYPE_FIELDS (record_type));
1823 if (ret)
1824 return ret;
1825 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1826 orig_type, false);
1827 if (ret)
1828 return ret;
1831 if (!tail_array_field)
1832 return NULL_TREE;
1834 f = tail_array_field;
1835 field_type = TREE_TYPE (f);
1836 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1838 /* If we get here, we've got an aggregate field, and a possibly
1839 nonzero offset into them. Recurse and hope for a valid match. */
1840 if (base_is_ptr)
1841 base = build1 (INDIRECT_REF, record_type, base);
1842 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1844 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1845 f == TYPE_FIELDS (record_type));
1846 if (t)
1847 return t;
1848 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1849 orig_type, false);
1852 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1853 or BASE[index] or by combination of those.
1855 Before attempting the conversion strip off existing ADDR_EXPRs and
1856 handled component refs. */
1858 tree
1859 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1861 tree ret;
1862 tree type;
1863 bool base_is_ptr = true;
1865 STRIP_NOPS (base);
1866 if (TREE_CODE (base) == ADDR_EXPR)
1868 base_is_ptr = false;
1870 base = TREE_OPERAND (base, 0);
1872 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1873 so it needs to be removed and new COMPONENT_REF constructed.
1874 The wrong COMPONENT_REF are often constructed by folding the
1875 (type *)&object within the expression (type *)&object+offset */
1876 if (handled_component_p (base))
1878 HOST_WIDE_INT sub_offset, size, maxsize;
1879 tree newbase;
1880 newbase = get_ref_base_and_extent (base, &sub_offset,
1881 &size, &maxsize);
1882 gcc_assert (newbase);
1883 if (size == maxsize
1884 && size != -1
1885 && !(sub_offset & (BITS_PER_UNIT - 1)))
1887 base = newbase;
1888 if (sub_offset)
1889 offset = int_const_binop (PLUS_EXPR, offset,
1890 build_int_cst (TREE_TYPE (offset),
1891 sub_offset / BITS_PER_UNIT), 1);
1894 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1895 && integer_zerop (offset))
1896 return base;
1897 type = TREE_TYPE (base);
1899 else
1901 base_is_ptr = true;
1902 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1903 return NULL_TREE;
1904 type = TREE_TYPE (TREE_TYPE (base));
1906 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1907 orig_type, base_is_ptr);
1908 if (!ret)
1910 if (base_is_ptr)
1911 base = build1 (INDIRECT_REF, type, base);
1912 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1914 return ret;
1917 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1918 or &BASE[index] or by combination of those.
1920 Before attempting the conversion strip off existing component refs. */
1922 tree
1923 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1925 tree t;
1927 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1928 && POINTER_TYPE_P (orig_type));
1930 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1931 if (t != NULL_TREE)
1933 tree orig = addr;
1934 tree ptr_type;
1936 /* For __builtin_object_size to function correctly we need to
1937 make sure not to fold address arithmetic so that we change
1938 reference from one array to another. This would happen for
1939 example for
1941 struct X { char s1[10]; char s2[10] } s;
1942 char *foo (void) { return &s.s2[-4]; }
1944 where we need to avoid generating &s.s1[6]. As the C and
1945 C++ frontends create different initial trees
1946 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1947 sophisticated comparisons here. Note that checking for the
1948 condition after the fact is easier than trying to avoid doing
1949 the folding. */
1950 STRIP_NOPS (orig);
1951 if (TREE_CODE (orig) == ADDR_EXPR)
1952 orig = TREE_OPERAND (orig, 0);
1953 if ((TREE_CODE (orig) == ARRAY_REF
1954 || (TREE_CODE (orig) == COMPONENT_REF
1955 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1956 && (TREE_CODE (t) == ARRAY_REF
1957 || (TREE_CODE (t) == COMPONENT_REF
1958 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) == ARRAY_TYPE))
1959 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1960 ? TREE_OPERAND (orig, 0) : orig,
1961 TREE_CODE (t) == ARRAY_REF
1962 ? TREE_OPERAND (t, 0) : t, 0))
1963 return NULL_TREE;
1965 ptr_type = build_pointer_type (TREE_TYPE (t));
1966 if (!useless_type_conversion_p (orig_type, ptr_type))
1967 return NULL_TREE;
1968 return build_fold_addr_expr_with_type (t, ptr_type);
1971 return NULL_TREE;
1974 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1975 Return the simplified expression, or NULL if nothing could be done. */
1977 static tree
1978 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1980 tree t;
1981 bool volatile_p = TREE_THIS_VOLATILE (expr);
1983 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1984 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1985 are sometimes added. */
1986 base = fold (base);
1987 STRIP_TYPE_NOPS (base);
1988 TREE_OPERAND (expr, 0) = base;
1990 /* One possibility is that the address reduces to a string constant. */
1991 t = fold_read_from_constant_string (expr);
1992 if (t)
1993 return t;
1995 /* Add in any offset from a POINTER_PLUS_EXPR. */
1996 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
1998 tree offset2;
2000 offset2 = TREE_OPERAND (base, 1);
2001 if (TREE_CODE (offset2) != INTEGER_CST)
2002 return NULL_TREE;
2003 base = TREE_OPERAND (base, 0);
2005 offset = fold_convert (sizetype,
2006 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2009 if (TREE_CODE (base) == ADDR_EXPR)
2011 tree base_addr = base;
2013 /* Strip the ADDR_EXPR. */
2014 base = TREE_OPERAND (base, 0);
2016 /* Fold away CONST_DECL to its value, if the type is scalar. */
2017 if (TREE_CODE (base) == CONST_DECL
2018 && is_gimple_min_invariant (DECL_INITIAL (base)))
2019 return DECL_INITIAL (base);
2021 /* Try folding *(&B+O) to B.X. */
2022 t = maybe_fold_offset_to_reference (base_addr, offset,
2023 TREE_TYPE (expr));
2024 if (t)
2026 /* Preserve volatileness of the original expression.
2027 We can end up with a plain decl here which is shared
2028 and we shouldn't mess with its flags. */
2029 if (!SSA_VAR_P (t))
2030 TREE_THIS_VOLATILE (t) = volatile_p;
2031 return t;
2034 else
2036 /* We can get here for out-of-range string constant accesses,
2037 such as "_"[3]. Bail out of the entire substitution search
2038 and arrange for the entire statement to be replaced by a
2039 call to __builtin_trap. In all likelihood this will all be
2040 constant-folded away, but in the meantime we can't leave with
2041 something that get_expr_operands can't understand. */
2043 t = base;
2044 STRIP_NOPS (t);
2045 if (TREE_CODE (t) == ADDR_EXPR
2046 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2048 /* FIXME: Except that this causes problems elsewhere with dead
2049 code not being deleted, and we die in the rtl expanders
2050 because we failed to remove some ssa_name. In the meantime,
2051 just return zero. */
2052 /* FIXME2: This condition should be signaled by
2053 fold_read_from_constant_string directly, rather than
2054 re-checking for it here. */
2055 return integer_zero_node;
2058 /* Try folding *(B+O) to B->X. Still an improvement. */
2059 if (POINTER_TYPE_P (TREE_TYPE (base)))
2061 t = maybe_fold_offset_to_reference (base, offset,
2062 TREE_TYPE (expr));
2063 if (t)
2064 return t;
2068 /* Otherwise we had an offset that we could not simplify. */
2069 return NULL_TREE;
2073 /* A quaint feature extant in our address arithmetic is that there
2074 can be hidden type changes here. The type of the result need
2075 not be the same as the type of the input pointer.
2077 What we're after here is an expression of the form
2078 (T *)(&array + const)
2079 where array is OP0, const is OP1, RES_TYPE is T and
2080 the cast doesn't actually exist, but is implicit in the
2081 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2082 &array[x]
2083 which may be able to propagate further. */
2085 tree
2086 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2088 tree ptd_type;
2089 tree t;
2091 /* It had better be a constant. */
2092 if (TREE_CODE (op1) != INTEGER_CST)
2093 return NULL_TREE;
2094 /* The first operand should be an ADDR_EXPR. */
2095 if (TREE_CODE (op0) != ADDR_EXPR)
2096 return NULL_TREE;
2097 op0 = TREE_OPERAND (op0, 0);
2099 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2100 the offset into it. */
2101 while (TREE_CODE (op0) == ARRAY_REF)
2103 tree array_obj = TREE_OPERAND (op0, 0);
2104 tree array_idx = TREE_OPERAND (op0, 1);
2105 tree elt_type = TREE_TYPE (op0);
2106 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2107 tree min_idx;
2109 if (TREE_CODE (array_idx) != INTEGER_CST)
2110 break;
2111 if (TREE_CODE (elt_size) != INTEGER_CST)
2112 break;
2114 /* Un-bias the index by the min index of the array type. */
2115 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2116 if (min_idx)
2118 min_idx = TYPE_MIN_VALUE (min_idx);
2119 if (min_idx)
2121 if (TREE_CODE (min_idx) != INTEGER_CST)
2122 break;
2124 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2125 if (!integer_zerop (min_idx))
2126 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2127 min_idx, 0);
2131 /* Convert the index to a byte offset. */
2132 array_idx = fold_convert (sizetype, array_idx);
2133 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2135 /* Update the operands for the next round, or for folding. */
2136 op1 = int_const_binop (PLUS_EXPR,
2137 array_idx, op1, 0);
2138 op0 = array_obj;
2141 ptd_type = TREE_TYPE (res_type);
2142 /* If we want a pointer to void, reconstruct the reference from the
2143 array element type. A pointer to that can be trivially converted
2144 to void *. This happens as we fold (void *)(ptr p+ off). */
2145 if (VOID_TYPE_P (ptd_type)
2146 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2147 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2149 /* At which point we can try some of the same things as for indirects. */
2150 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2151 if (!t)
2152 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2153 ptd_type, false);
2154 if (t)
2155 t = build1 (ADDR_EXPR, res_type, t);
2157 return t;
2160 /* For passing state through walk_tree into fold_stmt_r and its
2161 children. */
2163 struct fold_stmt_r_data
2165 gimple stmt;
2166 bool *changed_p;
2167 bool *inside_addr_expr_p;
2170 /* Subroutine of fold_stmt called via walk_tree. We perform several
2171 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2173 static tree
2174 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2176 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2177 struct fold_stmt_r_data *fold_stmt_r_data;
2178 bool *inside_addr_expr_p;
2179 bool *changed_p;
2180 tree expr = *expr_p, t;
2181 bool volatile_p = TREE_THIS_VOLATILE (expr);
2183 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2184 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2185 changed_p = fold_stmt_r_data->changed_p;
2187 /* ??? It'd be nice if walk_tree had a pre-order option. */
2188 switch (TREE_CODE (expr))
2190 case INDIRECT_REF:
2191 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2192 if (t)
2193 return t;
2194 *walk_subtrees = 0;
2196 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2197 integer_zero_node);
2198 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2199 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
2200 t = NULL_TREE;
2201 if (!t
2202 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2203 /* If we had a good reason for propagating the address here,
2204 make sure we end up with valid gimple. See PR34989. */
2205 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2206 break;
2208 case NOP_EXPR:
2209 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2210 if (t)
2211 return t;
2212 *walk_subtrees = 0;
2214 if (POINTER_TYPE_P (TREE_TYPE (expr))
2215 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
2216 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2217 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2218 integer_zero_node,
2219 TREE_TYPE (TREE_TYPE (expr)))))
2220 return t;
2221 break;
2223 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2224 We'd only want to bother decomposing an existing ARRAY_REF if
2225 the base array is found to have another offset contained within.
2226 Otherwise we'd be wasting time. */
2227 case ARRAY_REF:
2228 /* If we are not processing expressions found within an
2229 ADDR_EXPR, then we can fold constant array references.
2230 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2231 into 'a' = 5. */
2232 if (!*inside_addr_expr_p && !wi->is_lhs)
2233 t = fold_read_from_constant_string (expr);
2234 else
2235 t = NULL;
2236 break;
2238 case ADDR_EXPR:
2239 *inside_addr_expr_p = true;
2240 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2241 *inside_addr_expr_p = false;
2242 if (t)
2243 return t;
2244 *walk_subtrees = 0;
2246 /* Make sure the value is properly considered constant, and so gets
2247 propagated as expected. */
2248 if (*changed_p)
2249 recompute_tree_invariant_for_addr_expr (expr);
2250 return NULL_TREE;
2252 case COMPONENT_REF:
2253 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2254 if (t)
2255 return t;
2256 *walk_subtrees = 0;
2258 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2259 We've already checked that the records are compatible, so we should
2260 come up with a set of compatible fields. */
2262 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2263 tree expr_field = TREE_OPERAND (expr, 1);
2265 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2267 expr_field = find_compatible_field (expr_record, expr_field);
2268 TREE_OPERAND (expr, 1) = expr_field;
2271 break;
2273 case TARGET_MEM_REF:
2274 t = maybe_fold_tmr (expr);
2275 break;
2277 case POINTER_PLUS_EXPR:
2278 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2279 if (t)
2280 return t;
2281 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2282 if (t)
2283 return t;
2284 *walk_subtrees = 0;
2286 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2287 TREE_OPERAND (expr, 0),
2288 TREE_OPERAND (expr, 1));
2289 break;
2291 case COND_EXPR:
2292 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2294 tree op0 = TREE_OPERAND (expr, 0);
2295 tree tem;
2296 bool set;
2298 fold_defer_overflow_warnings ();
2299 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2300 TREE_OPERAND (op0, 0),
2301 TREE_OPERAND (op0, 1));
2302 /* This is actually a conditional expression, not a GIMPLE
2303 conditional statement, however, the valid_gimple_rhs_p
2304 test still applies. */
2305 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2306 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2307 if (set)
2309 COND_EXPR_COND (expr) = tem;
2310 t = expr;
2311 break;
2314 return NULL_TREE;
2316 default:
2317 return NULL_TREE;
2320 if (t)
2322 /* Preserve volatileness of the original expression.
2323 We can end up with a plain decl here which is shared
2324 and we shouldn't mess with its flags. */
2325 if (!SSA_VAR_P (t))
2326 TREE_THIS_VOLATILE (t) = volatile_p;
2327 *expr_p = t;
2328 *changed_p = true;
2331 return NULL_TREE;
2334 /* Return the string length, maximum string length or maximum value of
2335 ARG in LENGTH.
2336 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2337 is not NULL and, for TYPE == 0, its value is not equal to the length
2338 we determine or if we are unable to determine the length or value,
2339 return false. VISITED is a bitmap of visited variables.
2340 TYPE is 0 if string length should be returned, 1 for maximum string
2341 length and 2 for maximum value ARG can have. */
2343 static bool
2344 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2346 tree var, val;
2347 gimple def_stmt;
2349 if (TREE_CODE (arg) != SSA_NAME)
2351 if (TREE_CODE (arg) == COND_EXPR)
2352 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2353 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2354 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2355 else if (TREE_CODE (arg) == ADDR_EXPR
2356 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2357 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2359 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2360 if (TREE_CODE (aop0) == INDIRECT_REF
2361 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2362 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2363 length, visited, type);
2366 if (type == 2)
2368 val = arg;
2369 if (TREE_CODE (val) != INTEGER_CST
2370 || tree_int_cst_sgn (val) < 0)
2371 return false;
2373 else
2374 val = c_strlen (arg, 1);
2375 if (!val)
2376 return false;
2378 if (*length)
2380 if (type > 0)
2382 if (TREE_CODE (*length) != INTEGER_CST
2383 || TREE_CODE (val) != INTEGER_CST)
2384 return false;
2386 if (tree_int_cst_lt (*length, val))
2387 *length = val;
2388 return true;
2390 else if (simple_cst_equal (val, *length) != 1)
2391 return false;
2394 *length = val;
2395 return true;
2398 /* If we were already here, break the infinite cycle. */
2399 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2400 return true;
2401 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2403 var = arg;
2404 def_stmt = SSA_NAME_DEF_STMT (var);
2406 switch (gimple_code (def_stmt))
2408 case GIMPLE_ASSIGN:
2409 /* The RHS of the statement defining VAR must either have a
2410 constant length or come from another SSA_NAME with a constant
2411 length. */
2412 if (gimple_assign_single_p (def_stmt)
2413 || gimple_assign_unary_nop_p (def_stmt))
2415 tree rhs = gimple_assign_rhs1 (def_stmt);
2416 return get_maxval_strlen (rhs, length, visited, type);
2418 return false;
2420 case GIMPLE_PHI:
2422 /* All the arguments of the PHI node must have the same constant
2423 length. */
2424 unsigned i;
2426 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2428 tree arg = gimple_phi_arg (def_stmt, i)->def;
2430 /* If this PHI has itself as an argument, we cannot
2431 determine the string length of this argument. However,
2432 if we can find a constant string length for the other
2433 PHI args then we can still be sure that this is a
2434 constant string length. So be optimistic and just
2435 continue with the next argument. */
2436 if (arg == gimple_phi_result (def_stmt))
2437 continue;
2439 if (!get_maxval_strlen (arg, length, visited, type))
2440 return false;
2443 return true;
2445 default:
2446 return false;
2451 /* Fold builtin call in statement STMT. Returns a simplified tree.
2452 We may return a non-constant expression, including another call
2453 to a different function and with different arguments, e.g.,
2454 substituting memcpy for strcpy when the string length is known.
2455 Note that some builtins expand into inline code that may not
2456 be valid in GIMPLE. Callers must take care. */
2458 static tree
2459 ccp_fold_builtin (gimple stmt)
2461 tree result, val[3];
2462 tree callee, a;
2463 int arg_idx, type;
2464 bitmap visited;
2465 bool ignore;
2466 int nargs;
2468 gcc_assert (is_gimple_call (stmt));
2470 ignore = (gimple_call_lhs (stmt) == NULL);
2472 /* First try the generic builtin folder. If that succeeds, return the
2473 result directly. */
2474 result = fold_call_stmt (stmt, ignore);
2475 if (result)
2477 if (ignore)
2478 STRIP_NOPS (result);
2479 return result;
2482 /* Ignore MD builtins. */
2483 callee = gimple_call_fndecl (stmt);
2484 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2485 return NULL_TREE;
2487 /* If the builtin could not be folded, and it has no argument list,
2488 we're done. */
2489 nargs = gimple_call_num_args (stmt);
2490 if (nargs == 0)
2491 return NULL_TREE;
2493 /* Limit the work only for builtins we know how to simplify. */
2494 switch (DECL_FUNCTION_CODE (callee))
2496 case BUILT_IN_STRLEN:
2497 case BUILT_IN_FPUTS:
2498 case BUILT_IN_FPUTS_UNLOCKED:
2499 arg_idx = 0;
2500 type = 0;
2501 break;
2502 case BUILT_IN_STRCPY:
2503 case BUILT_IN_STRNCPY:
2504 arg_idx = 1;
2505 type = 0;
2506 break;
2507 case BUILT_IN_MEMCPY_CHK:
2508 case BUILT_IN_MEMPCPY_CHK:
2509 case BUILT_IN_MEMMOVE_CHK:
2510 case BUILT_IN_MEMSET_CHK:
2511 case BUILT_IN_STRNCPY_CHK:
2512 arg_idx = 2;
2513 type = 2;
2514 break;
2515 case BUILT_IN_STRCPY_CHK:
2516 case BUILT_IN_STPCPY_CHK:
2517 arg_idx = 1;
2518 type = 1;
2519 break;
2520 case BUILT_IN_SNPRINTF_CHK:
2521 case BUILT_IN_VSNPRINTF_CHK:
2522 arg_idx = 1;
2523 type = 2;
2524 break;
2525 default:
2526 return NULL_TREE;
2529 if (arg_idx >= nargs)
2530 return NULL_TREE;
2532 /* Try to use the dataflow information gathered by the CCP process. */
2533 visited = BITMAP_ALLOC (NULL);
2534 bitmap_clear (visited);
2536 memset (val, 0, sizeof (val));
2537 a = gimple_call_arg (stmt, arg_idx);
2538 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2539 val[arg_idx] = NULL_TREE;
2541 BITMAP_FREE (visited);
2543 result = NULL_TREE;
2544 switch (DECL_FUNCTION_CODE (callee))
2546 case BUILT_IN_STRLEN:
2547 if (val[0] && nargs == 1)
2549 tree new_val =
2550 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2552 /* If the result is not a valid gimple value, or not a cast
2553 of a valid gimple value, then we can not use the result. */
2554 if (is_gimple_val (new_val)
2555 || (is_gimple_cast (new_val)
2556 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2557 return new_val;
2559 break;
2561 case BUILT_IN_STRCPY:
2562 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2563 result = fold_builtin_strcpy (callee,
2564 gimple_call_arg (stmt, 0),
2565 gimple_call_arg (stmt, 1),
2566 val[1]);
2567 break;
2569 case BUILT_IN_STRNCPY:
2570 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2571 result = fold_builtin_strncpy (callee,
2572 gimple_call_arg (stmt, 0),
2573 gimple_call_arg (stmt, 1),
2574 gimple_call_arg (stmt, 2),
2575 val[1]);
2576 break;
2578 case BUILT_IN_FPUTS:
2579 if (nargs == 2)
2580 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2581 gimple_call_arg (stmt, 1),
2582 ignore, false, val[0]);
2583 break;
2585 case BUILT_IN_FPUTS_UNLOCKED:
2586 if (nargs == 2)
2587 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2588 gimple_call_arg (stmt, 1),
2589 ignore, true, val[0]);
2590 break;
2592 case BUILT_IN_MEMCPY_CHK:
2593 case BUILT_IN_MEMPCPY_CHK:
2594 case BUILT_IN_MEMMOVE_CHK:
2595 case BUILT_IN_MEMSET_CHK:
2596 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2597 result = fold_builtin_memory_chk (callee,
2598 gimple_call_arg (stmt, 0),
2599 gimple_call_arg (stmt, 1),
2600 gimple_call_arg (stmt, 2),
2601 gimple_call_arg (stmt, 3),
2602 val[2], ignore,
2603 DECL_FUNCTION_CODE (callee));
2604 break;
2606 case BUILT_IN_STRCPY_CHK:
2607 case BUILT_IN_STPCPY_CHK:
2608 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2609 result = fold_builtin_stxcpy_chk (callee,
2610 gimple_call_arg (stmt, 0),
2611 gimple_call_arg (stmt, 1),
2612 gimple_call_arg (stmt, 2),
2613 val[1], ignore,
2614 DECL_FUNCTION_CODE (callee));
2615 break;
2617 case BUILT_IN_STRNCPY_CHK:
2618 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2619 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2620 gimple_call_arg (stmt, 1),
2621 gimple_call_arg (stmt, 2),
2622 gimple_call_arg (stmt, 3),
2623 val[2]);
2624 break;
2626 case BUILT_IN_SNPRINTF_CHK:
2627 case BUILT_IN_VSNPRINTF_CHK:
2628 if (val[1] && is_gimple_val (val[1]))
2629 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2630 DECL_FUNCTION_CODE (callee));
2631 break;
2633 default:
2634 gcc_unreachable ();
2637 if (result && ignore)
2638 result = fold_ignored_result (result);
2639 return result;
2642 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2643 replacement rhs for the statement or NULL_TREE if no simplification
2644 could be made. It is assumed that the operands have been previously
2645 folded. */
2647 static tree
2648 fold_gimple_assign (gimple_stmt_iterator *si)
2650 gimple stmt = gsi_stmt (*si);
2651 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2653 tree result = NULL;
2655 switch (get_gimple_rhs_class (subcode))
2657 case GIMPLE_SINGLE_RHS:
2659 tree rhs = gimple_assign_rhs1 (stmt);
2661 /* Try to fold a conditional expression. */
2662 if (TREE_CODE (rhs) == COND_EXPR)
2664 tree temp = fold (COND_EXPR_COND (rhs));
2665 if (temp != COND_EXPR_COND (rhs))
2666 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2667 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2670 /* If we couldn't fold the RHS, hand over to the generic
2671 fold routines. */
2672 if (result == NULL_TREE)
2673 result = fold (rhs);
2675 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2676 that may have been added by fold, and "useless" type
2677 conversions that might now be apparent due to propagation. */
2678 STRIP_USELESS_TYPE_CONVERSION (result);
2680 if (result != rhs && valid_gimple_rhs_p (result))
2681 return result;
2682 else
2683 /* It is possible that fold_stmt_r simplified the RHS.
2684 Make sure that the subcode of this statement still
2685 reflects the principal operator of the rhs operand. */
2686 return rhs;
2688 break;
2690 case GIMPLE_UNARY_RHS:
2692 tree rhs = gimple_assign_rhs1 (stmt);
2694 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2695 if (result)
2697 /* If the operation was a conversion do _not_ mark a
2698 resulting constant with TREE_OVERFLOW if the original
2699 constant was not. These conversions have implementation
2700 defined behavior and retaining the TREE_OVERFLOW flag
2701 here would confuse later passes such as VRP. */
2702 if (CONVERT_EXPR_CODE_P (subcode)
2703 && TREE_CODE (result) == INTEGER_CST
2704 && TREE_CODE (rhs) == INTEGER_CST)
2705 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2707 STRIP_USELESS_TYPE_CONVERSION (result);
2708 if (valid_gimple_rhs_p (result))
2709 return result;
2711 else if (CONVERT_EXPR_CODE_P (subcode)
2712 && POINTER_TYPE_P (gimple_expr_type (stmt))
2713 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2715 tree type = gimple_expr_type (stmt);
2716 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2717 integer_zero_node, type);
2718 if (t)
2719 return t;
2722 break;
2724 case GIMPLE_BINARY_RHS:
2725 /* Try to fold pointer addition. */
2726 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2728 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2729 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2731 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2732 if (!useless_type_conversion_p
2733 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2734 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2736 result = maybe_fold_stmt_addition (type,
2737 gimple_assign_rhs1 (stmt),
2738 gimple_assign_rhs2 (stmt));
2741 if (!result)
2742 result = fold_binary (subcode,
2743 TREE_TYPE (gimple_assign_lhs (stmt)),
2744 gimple_assign_rhs1 (stmt),
2745 gimple_assign_rhs2 (stmt));
2747 if (result)
2749 STRIP_USELESS_TYPE_CONVERSION (result);
2750 if (valid_gimple_rhs_p (result))
2751 return result;
2753 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2754 we lose canonicalization opportunities. Do not go again
2755 through fold here though, or the same non-GIMPLE will be
2756 produced. */
2757 if (commutative_tree_code (subcode)
2758 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2759 gimple_assign_rhs2 (stmt), false))
2760 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2761 gimple_assign_rhs2 (stmt),
2762 gimple_assign_rhs1 (stmt));
2764 break;
2766 case GIMPLE_INVALID_RHS:
2767 gcc_unreachable ();
2770 return NULL_TREE;
2773 /* Attempt to fold a conditional statement. Return true if any changes were
2774 made. We only attempt to fold the condition expression, and do not perform
2775 any transformation that would require alteration of the cfg. It is
2776 assumed that the operands have been previously folded. */
2778 static bool
2779 fold_gimple_cond (gimple stmt)
2781 tree result = fold_binary (gimple_cond_code (stmt),
2782 boolean_type_node,
2783 gimple_cond_lhs (stmt),
2784 gimple_cond_rhs (stmt));
2786 if (result)
2788 STRIP_USELESS_TYPE_CONVERSION (result);
2789 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2791 gimple_cond_set_condition_from_tree (stmt, result);
2792 return true;
2796 return false;
2800 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2801 The statement may be replaced by another statement, e.g., if the call
2802 simplifies to a constant value. Return true if any changes were made.
2803 It is assumed that the operands have been previously folded. */
2805 static bool
2806 fold_gimple_call (gimple_stmt_iterator *gsi)
2808 gimple stmt = gsi_stmt (*gsi);
2810 tree callee = gimple_call_fndecl (stmt);
2812 /* Check for builtins that CCP can handle using information not
2813 available in the generic fold routines. */
2814 if (callee && DECL_BUILT_IN (callee))
2816 tree result = ccp_fold_builtin (stmt);
2818 if (result)
2819 return update_call_from_tree (gsi, result);
2821 else
2823 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2824 here are when we've propagated the address of a decl into the
2825 object slot. */
2826 /* ??? Should perhaps do this in fold proper. However, doing it
2827 there requires that we create a new CALL_EXPR, and that requires
2828 copying EH region info to the new node. Easier to just do it
2829 here where we can just smash the call operand. */
2830 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2831 callee = gimple_call_fn (stmt);
2832 if (TREE_CODE (callee) == OBJ_TYPE_REF
2833 && lang_hooks.fold_obj_type_ref
2834 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2835 && DECL_P (TREE_OPERAND
2836 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2838 tree t;
2840 /* ??? Caution: Broken ADDR_EXPR semantics means that
2841 looking at the type of the operand of the addr_expr
2842 can yield an array type. See silly exception in
2843 check_pointer_types_r. */
2844 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2845 t = lang_hooks.fold_obj_type_ref (callee, t);
2846 if (t)
2848 gimple_call_set_fn (stmt, t);
2849 return true;
2854 return false;
2857 /* Fold the statement pointed to by GSI. In some cases, this function may
2858 replace the whole statement with a new one. Returns true iff folding
2859 makes any changes. */
2861 bool
2862 fold_stmt (gimple_stmt_iterator *gsi)
2864 tree res;
2865 struct fold_stmt_r_data fold_stmt_r_data;
2866 struct walk_stmt_info wi;
2868 bool changed = false;
2869 bool inside_addr_expr = false;
2871 gimple stmt = gsi_stmt (*gsi);
2873 fold_stmt_r_data.stmt = stmt;
2874 fold_stmt_r_data.changed_p = &changed;
2875 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2877 memset (&wi, 0, sizeof (wi));
2878 wi.info = &fold_stmt_r_data;
2880 /* Fold the individual operands.
2881 For example, fold instances of *&VAR into VAR, etc. */
2882 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2883 gcc_assert (!res);
2885 /* Fold the main computation performed by the statement. */
2886 switch (gimple_code (stmt))
2888 case GIMPLE_ASSIGN:
2890 tree new_rhs = fold_gimple_assign (gsi);
2891 if (new_rhs != NULL_TREE)
2893 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2894 changed = true;
2896 stmt = gsi_stmt (*gsi);
2897 break;
2899 case GIMPLE_COND:
2900 changed |= fold_gimple_cond (stmt);
2901 break;
2902 case GIMPLE_CALL:
2903 /* The entire statement may be replaced in this case. */
2904 changed |= fold_gimple_call (gsi);
2905 break;
2907 default:
2908 return changed;
2909 break;
2912 return changed;
2915 /* Perform the minimal folding on statement STMT. Only operations like
2916 *&x created by constant propagation are handled. The statement cannot
2917 be replaced with a new one. Return true if the statement was
2918 changed, false otherwise. */
2920 bool
2921 fold_stmt_inplace (gimple stmt)
2923 tree res;
2924 struct fold_stmt_r_data fold_stmt_r_data;
2925 struct walk_stmt_info wi;
2926 gimple_stmt_iterator si;
2928 bool changed = false;
2929 bool inside_addr_expr = false;
2931 fold_stmt_r_data.stmt = stmt;
2932 fold_stmt_r_data.changed_p = &changed;
2933 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2935 memset (&wi, 0, sizeof (wi));
2936 wi.info = &fold_stmt_r_data;
2938 /* Fold the individual operands.
2939 For example, fold instances of *&VAR into VAR, etc.
2941 It appears that, at one time, maybe_fold_stmt_indirect
2942 would cause the walk to return non-null in order to
2943 signal that the entire statement should be replaced with
2944 a call to _builtin_trap. This functionality is currently
2945 disabled, as noted in a FIXME, and cannot be supported here. */
2946 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2947 gcc_assert (!res);
2949 /* Fold the main computation performed by the statement. */
2950 switch (gimple_code (stmt))
2952 case GIMPLE_ASSIGN:
2954 unsigned old_num_ops;
2955 tree new_rhs;
2956 old_num_ops = gimple_num_ops (stmt);
2957 si = gsi_for_stmt (stmt);
2958 new_rhs = fold_gimple_assign (&si);
2959 if (new_rhs != NULL_TREE
2960 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2962 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2963 changed = true;
2965 gcc_assert (gsi_stmt (si) == stmt);
2966 break;
2968 case GIMPLE_COND:
2969 changed |= fold_gimple_cond (stmt);
2970 break;
2972 default:
2973 break;
2976 return changed;
2979 /* Try to optimize out __builtin_stack_restore. Optimize it out
2980 if there is another __builtin_stack_restore in the same basic
2981 block and no calls or ASM_EXPRs are in between, or if this block's
2982 only outgoing edge is to EXIT_BLOCK and there are no calls or
2983 ASM_EXPRs after this __builtin_stack_restore. */
2985 static tree
2986 optimize_stack_restore (gimple_stmt_iterator i)
2988 tree callee, rhs;
2989 gimple stmt, stack_save;
2990 gimple_stmt_iterator stack_save_gsi;
2992 basic_block bb = gsi_bb (i);
2993 gimple call = gsi_stmt (i);
2995 if (gimple_code (call) != GIMPLE_CALL
2996 || gimple_call_num_args (call) != 1
2997 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2998 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2999 return NULL_TREE;
3001 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
3003 stmt = gsi_stmt (i);
3004 if (gimple_code (stmt) == GIMPLE_ASM)
3005 return NULL_TREE;
3006 if (gimple_code (stmt) != GIMPLE_CALL)
3007 continue;
3009 callee = gimple_call_fndecl (stmt);
3010 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3011 return NULL_TREE;
3013 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3014 break;
3017 if (gsi_end_p (i)
3018 && (! single_succ_p (bb)
3019 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3020 return NULL_TREE;
3022 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3023 if (gimple_code (stack_save) != GIMPLE_CALL
3024 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3025 || stmt_could_throw_p (stack_save)
3026 || !has_single_use (gimple_call_arg (call, 0)))
3027 return NULL_TREE;
3029 callee = gimple_call_fndecl (stack_save);
3030 if (!callee
3031 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3032 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3033 || gimple_call_num_args (stack_save) != 0)
3034 return NULL_TREE;
3036 stack_save_gsi = gsi_for_stmt (stack_save);
3037 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3038 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3039 if (!update_call_from_tree (&stack_save_gsi, rhs))
3041 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3042 return NULL_TREE;
3044 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3046 /* No effect, so the statement will be deleted. */
3047 return integer_zero_node;
3050 /* If va_list type is a simple pointer and nothing special is needed,
3051 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3052 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3053 pointer assignment. */
3055 static tree
3056 optimize_stdarg_builtin (gimple call)
3058 tree callee, lhs, rhs, cfun_va_list;
3059 bool va_list_simple_ptr;
3061 if (gimple_code (call) != GIMPLE_CALL)
3062 return NULL_TREE;
3064 callee = gimple_call_fndecl (call);
3066 cfun_va_list = targetm.fn_abi_va_list (callee);
3067 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3068 && (TREE_TYPE (cfun_va_list) == void_type_node
3069 || TREE_TYPE (cfun_va_list) == char_type_node);
3071 switch (DECL_FUNCTION_CODE (callee))
3073 case BUILT_IN_VA_START:
3074 if (!va_list_simple_ptr
3075 || targetm.expand_builtin_va_start != NULL
3076 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3077 return NULL_TREE;
3079 if (gimple_call_num_args (call) != 2)
3080 return NULL_TREE;
3082 lhs = gimple_call_arg (call, 0);
3083 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3084 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3085 != TYPE_MAIN_VARIANT (cfun_va_list))
3086 return NULL_TREE;
3088 lhs = build_fold_indirect_ref (lhs);
3089 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3090 1, integer_zero_node);
3091 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3092 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3094 case BUILT_IN_VA_COPY:
3095 if (!va_list_simple_ptr)
3096 return NULL_TREE;
3098 if (gimple_call_num_args (call) != 2)
3099 return NULL_TREE;
3101 lhs = gimple_call_arg (call, 0);
3102 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3103 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3104 != TYPE_MAIN_VARIANT (cfun_va_list))
3105 return NULL_TREE;
3107 lhs = build_fold_indirect_ref (lhs);
3108 rhs = gimple_call_arg (call, 1);
3109 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3110 != TYPE_MAIN_VARIANT (cfun_va_list))
3111 return NULL_TREE;
3113 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3114 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3116 case BUILT_IN_VA_END:
3117 /* No effect, so the statement will be deleted. */
3118 return integer_zero_node;
3120 default:
3121 gcc_unreachable ();
3125 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3126 RHS of an assignment. Insert the necessary statements before
3127 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3128 is replaced. If the call is expected to produces a result, then it
3129 is replaced by an assignment of the new RHS to the result variable.
3130 If the result is to be ignored, then the call is replaced by a
3131 GIMPLE_NOP. */
3133 static void
3134 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3136 tree lhs;
3137 tree tmp = NULL_TREE; /* Silence warning. */
3138 gimple stmt, new_stmt;
3139 gimple_stmt_iterator i;
3140 gimple_seq stmts = gimple_seq_alloc();
3141 struct gimplify_ctx gctx;
3143 stmt = gsi_stmt (*si_p);
3145 gcc_assert (is_gimple_call (stmt));
3147 lhs = gimple_call_lhs (stmt);
3149 push_gimplify_context (&gctx);
3151 if (lhs == NULL_TREE)
3152 gimplify_and_add (expr, &stmts);
3153 else
3154 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3156 pop_gimplify_context (NULL);
3158 if (gimple_has_location (stmt))
3159 annotate_all_with_location (stmts, gimple_location (stmt));
3161 /* The replacement can expose previously unreferenced variables. */
3162 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3164 new_stmt = gsi_stmt (i);
3165 find_new_referenced_vars (new_stmt);
3166 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3167 mark_symbols_for_renaming (new_stmt);
3168 gsi_next (si_p);
3171 if (lhs == NULL_TREE)
3172 new_stmt = gimple_build_nop ();
3173 else
3175 new_stmt = gimple_build_assign (lhs, tmp);
3176 copy_virtual_operands (new_stmt, stmt);
3177 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3180 gimple_set_location (new_stmt, gimple_location (stmt));
3181 gsi_replace (si_p, new_stmt, false);
3184 /* A simple pass that attempts to fold all builtin functions. This pass
3185 is run after we've propagated as many constants as we can. */
3187 static unsigned int
3188 execute_fold_all_builtins (void)
3190 bool cfg_changed = false;
3191 basic_block bb;
3192 unsigned int todoflags = 0;
3194 FOR_EACH_BB (bb)
3196 gimple_stmt_iterator i;
3197 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3199 gimple stmt, old_stmt;
3200 tree callee, result;
3201 enum built_in_function fcode;
3203 stmt = gsi_stmt (i);
3205 if (gimple_code (stmt) != GIMPLE_CALL)
3207 gsi_next (&i);
3208 continue;
3210 callee = gimple_call_fndecl (stmt);
3211 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3213 gsi_next (&i);
3214 continue;
3216 fcode = DECL_FUNCTION_CODE (callee);
3218 result = ccp_fold_builtin (stmt);
3220 if (result)
3221 gimple_remove_stmt_histograms (cfun, stmt);
3223 if (!result)
3224 switch (DECL_FUNCTION_CODE (callee))
3226 case BUILT_IN_CONSTANT_P:
3227 /* Resolve __builtin_constant_p. If it hasn't been
3228 folded to integer_one_node by now, it's fairly
3229 certain that the value simply isn't constant. */
3230 result = integer_zero_node;
3231 break;
3233 case BUILT_IN_STACK_RESTORE:
3234 result = optimize_stack_restore (i);
3235 if (result)
3236 break;
3237 gsi_next (&i);
3238 continue;
3240 case BUILT_IN_VA_START:
3241 case BUILT_IN_VA_END:
3242 case BUILT_IN_VA_COPY:
3243 /* These shouldn't be folded before pass_stdarg. */
3244 result = optimize_stdarg_builtin (stmt);
3245 if (result)
3246 break;
3247 /* FALLTHRU */
3249 default:
3250 gsi_next (&i);
3251 continue;
3254 if (dump_file && (dump_flags & TDF_DETAILS))
3256 fprintf (dump_file, "Simplified\n ");
3257 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3260 old_stmt = stmt;
3261 push_stmt_changes (gsi_stmt_ptr (&i));
3263 if (!update_call_from_tree (&i, result))
3265 gimplify_and_update_call_from_tree (&i, result);
3266 todoflags |= TODO_rebuild_alias;
3269 stmt = gsi_stmt (i);
3270 pop_stmt_changes (gsi_stmt_ptr (&i));
3272 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3273 && gimple_purge_dead_eh_edges (bb))
3274 cfg_changed = true;
3276 if (dump_file && (dump_flags & TDF_DETAILS))
3278 fprintf (dump_file, "to\n ");
3279 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3280 fprintf (dump_file, "\n");
3283 /* Retry the same statement if it changed into another
3284 builtin, there might be new opportunities now. */
3285 if (gimple_code (stmt) != GIMPLE_CALL)
3287 gsi_next (&i);
3288 continue;
3290 callee = gimple_call_fndecl (stmt);
3291 if (!callee
3292 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3293 || DECL_FUNCTION_CODE (callee) == fcode)
3294 gsi_next (&i);
3298 /* Delete unreachable blocks. */
3299 if (cfg_changed)
3300 todoflags |= TODO_cleanup_cfg;
3302 return todoflags;
3306 struct gimple_opt_pass pass_fold_builtins =
3309 GIMPLE_PASS,
3310 "fab", /* name */
3311 NULL, /* gate */
3312 execute_fold_all_builtins, /* execute */
3313 NULL, /* sub */
3314 NULL, /* next */
3315 0, /* static_pass_number */
3316 0, /* tv_id */
3317 PROP_cfg | PROP_ssa, /* properties_required */
3318 0, /* properties_provided */
3319 0, /* properties_destroyed */
3320 0, /* todo_flags_start */
3321 TODO_dump_func
3322 | TODO_verify_ssa
3323 | TODO_update_ssa /* todo_flags_finish */