Fix for PR39557
[official-gcc.git] / gcc / tree-ssa-ccp.c
blob5080cc32ce7c7fa3d403794f842a668ecb36b0cf
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
111 virtual operands).
113 For instance, consider the following code fragment:
115 struct A a;
116 const int B = 42;
118 void foo (int i)
120 if (i > 10)
121 a.a = 42;
122 else
124 a.b = 21;
125 a.a = a.b + 21;
128 if (a.a != B)
129 never_executed ();
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
139 # a_5 = VDEF <a_4>
140 a.a = 2;
142 # VUSE <a_5>
143 x_3 = a.b;
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
152 int A;
154 foo (int i)
156 if (i_3 > 10)
157 A_4 = 3;
158 # A_5 = PHI (A_4, A_2);
160 # VUSE <A_5>
161 A.0_6 = A;
163 return A.0_6;
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
175 nodes.
177 References:
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
188 #include "config.h"
189 #include "system.h"
190 #include "coretypes.h"
191 #include "tm.h"
192 #include "tree.h"
193 #include "flags.h"
194 #include "rtl.h"
195 #include "tm_p.h"
196 #include "ggc.h"
197 #include "basic-block.h"
198 #include "output.h"
199 #include "expr.h"
200 #include "function.h"
201 #include "diagnostic.h"
202 #include "timevar.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
209 #include "target.h"
210 #include "toplev.h"
213 /* Possible lattice values. */
214 typedef enum
216 UNINITIALIZED,
217 UNDEFINED,
218 CONSTANT,
219 VARYING
220 } ccp_lattice_t;
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
227 doing the store). */
228 static prop_value_t *const_val;
230 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
232 static void
233 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
235 switch (val.lattice_val)
237 case UNINITIALIZED:
238 fprintf (outf, "%sUNINITIALIZED", prefix);
239 break;
240 case UNDEFINED:
241 fprintf (outf, "%sUNDEFINED", prefix);
242 break;
243 case VARYING:
244 fprintf (outf, "%sVARYING", prefix);
245 break;
246 case CONSTANT:
247 fprintf (outf, "%sCONSTANT ", prefix);
248 print_generic_expr (outf, val.value, dump_flags);
249 break;
250 default:
251 gcc_unreachable ();
256 /* Print lattice value VAL to stderr. */
258 void debug_lattice_value (prop_value_t val);
260 void
261 debug_lattice_value (prop_value_t val)
263 dump_lattice_value (stderr, "", val);
264 fprintf (stderr, "\n");
269 /* If SYM is a constant variable with known value, return the value.
270 NULL_TREE is returned otherwise. */
272 tree
273 get_symbol_constant_value (tree sym)
275 if (TREE_STATIC (sym)
276 && TREE_READONLY (sym)
277 && !MTAG_P (sym))
279 tree val = DECL_INITIAL (sym);
280 if (val)
282 STRIP_USELESS_TYPE_CONVERSION (val);
283 if (is_gimple_min_invariant (val))
284 return val;
286 /* Variables declared 'const' without an initializer
287 have zero as the initializer if they may not be
288 overridden at link or run time. */
289 if (!val
290 && !DECL_EXTERNAL (sym)
291 && targetm.binds_local_p (sym)
292 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
293 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
294 return fold_convert (TREE_TYPE (sym), integer_zero_node);
297 return NULL_TREE;
300 /* Compute a default value for variable VAR and store it in the
301 CONST_VAL array. The following rules are used to get default
302 values:
304 1- Global and static variables that are declared constant are
305 considered CONSTANT.
307 2- Any other value is considered UNDEFINED. This is useful when
308 considering PHI nodes. PHI arguments that are undefined do not
309 change the constant value of the PHI node, which allows for more
310 constants to be propagated.
312 3- Variables defined by statements other than assignments and PHI
313 nodes are considered VARYING.
315 4- Initial values of variables that are not GIMPLE registers are
316 considered VARYING. */
318 static prop_value_t
319 get_default_value (tree var)
321 tree sym = SSA_NAME_VAR (var);
322 prop_value_t val = { UNINITIALIZED, NULL_TREE };
323 tree cst_val;
325 if (!is_gimple_reg (var))
327 /* Short circuit for regular CCP. We are not interested in any
328 non-register when DO_STORE_CCP is false. */
329 val.lattice_val = VARYING;
331 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
333 /* Globals and static variables declared 'const' take their
334 initial value. */
335 val.lattice_val = CONSTANT;
336 val.value = cst_val;
338 else
340 gimple stmt = SSA_NAME_DEF_STMT (var);
342 if (gimple_nop_p (stmt))
344 /* Variables defined by an empty statement are those used
345 before being initialized. If VAR is a local variable, we
346 can assume initially that it is UNDEFINED, otherwise we must
347 consider it VARYING. */
348 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
349 val.lattice_val = UNDEFINED;
350 else
351 val.lattice_val = VARYING;
353 else if (is_gimple_assign (stmt)
354 /* Value-returning GIMPLE_CALL statements assign to
355 a variable, and are treated similarly to GIMPLE_ASSIGN. */
356 || (is_gimple_call (stmt)
357 && gimple_call_lhs (stmt) != NULL_TREE)
358 || gimple_code (stmt) == GIMPLE_PHI)
360 /* Any other variable defined by an assignment or a PHI node
361 is considered UNDEFINED. */
362 val.lattice_val = UNDEFINED;
364 else
366 /* Otherwise, VAR will never take on a constant value. */
367 val.lattice_val = VARYING;
371 return val;
375 /* Get the constant value associated with variable VAR. */
377 static inline prop_value_t *
378 get_value (tree var)
380 prop_value_t *val;
382 if (const_val == NULL)
383 return NULL;
385 val = &const_val[SSA_NAME_VERSION (var)];
386 if (val->lattice_val == UNINITIALIZED)
387 *val = get_default_value (var);
389 return val;
392 /* Sets the value associated with VAR to VARYING. */
394 static inline void
395 set_value_varying (tree var)
397 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
399 val->lattice_val = VARYING;
400 val->value = NULL_TREE;
403 /* For float types, modify the value of VAL to make ccp work correctly
404 for non-standard values (-0, NaN):
406 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
407 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
408 This is to fix the following problem (see PR 29921): Suppose we have
410 x = 0.0 * y
412 and we set value of y to NaN. This causes value of x to be set to NaN.
413 When we later determine that y is in fact VARYING, fold uses the fact
414 that HONOR_NANS is false, and we try to change the value of x to 0,
415 causing an ICE. With HONOR_NANS being false, the real appearance of
416 NaN would cause undefined behavior, though, so claiming that y (and x)
417 are UNDEFINED initially is correct. */
419 static void
420 canonicalize_float_value (prop_value_t *val)
422 enum machine_mode mode;
423 tree type;
424 REAL_VALUE_TYPE d;
426 if (val->lattice_val != CONSTANT
427 || TREE_CODE (val->value) != REAL_CST)
428 return;
430 d = TREE_REAL_CST (val->value);
431 type = TREE_TYPE (val->value);
432 mode = TYPE_MODE (type);
434 if (!HONOR_SIGNED_ZEROS (mode)
435 && REAL_VALUE_MINUS_ZERO (d))
437 val->value = build_real (type, dconst0);
438 return;
441 if (!HONOR_NANS (mode)
442 && REAL_VALUE_ISNAN (d))
444 val->lattice_val = UNDEFINED;
445 val->value = NULL;
446 return;
450 /* Set the value for variable VAR to NEW_VAL. Return true if the new
451 value is different from VAR's previous value. */
453 static bool
454 set_lattice_value (tree var, prop_value_t new_val)
456 prop_value_t *old_val = get_value (var);
458 canonicalize_float_value (&new_val);
460 /* Lattice transitions must always be monotonically increasing in
461 value. If *OLD_VAL and NEW_VAL are the same, return false to
462 inform the caller that this was a non-transition. */
464 gcc_assert (old_val->lattice_val < new_val.lattice_val
465 || (old_val->lattice_val == new_val.lattice_val
466 && ((!old_val->value && !new_val.value)
467 || operand_equal_p (old_val->value, new_val.value, 0))));
469 if (old_val->lattice_val != new_val.lattice_val)
471 if (dump_file && (dump_flags & TDF_DETAILS))
473 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
474 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
477 *old_val = new_val;
479 gcc_assert (new_val.lattice_val != UNDEFINED);
480 return true;
483 return false;
487 /* Return the likely CCP lattice value for STMT.
489 If STMT has no operands, then return CONSTANT.
491 Else if undefinedness of operands of STMT cause its value to be
492 undefined, then return UNDEFINED.
494 Else if any operands of STMT are constants, then return CONSTANT.
496 Else return VARYING. */
498 static ccp_lattice_t
499 likely_value (gimple stmt)
501 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
502 tree use;
503 ssa_op_iter iter;
505 enum gimple_code code = gimple_code (stmt);
507 /* This function appears to be called only for assignments, calls,
508 conditionals, and switches, due to the logic in visit_stmt. */
509 gcc_assert (code == GIMPLE_ASSIGN
510 || code == GIMPLE_CALL
511 || code == GIMPLE_COND
512 || code == GIMPLE_SWITCH);
514 /* If the statement has volatile operands, it won't fold to a
515 constant value. */
516 if (gimple_has_volatile_ops (stmt))
517 return VARYING;
519 /* If we are not doing store-ccp, statements with loads
520 and/or stores will never fold into a constant. */
521 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
522 return VARYING;
524 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
525 is_gimple_min_invariant, so we do not consider calls or
526 other forms of assignment. */
527 if (gimple_assign_single_p (stmt)
528 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
529 return CONSTANT;
531 if (code == GIMPLE_COND
532 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
533 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
534 return CONSTANT;
536 if (code == GIMPLE_SWITCH
537 && is_gimple_min_invariant (gimple_switch_index (stmt)))
538 return CONSTANT;
540 /* Arrive here for more complex cases. */
542 has_constant_operand = false;
543 has_undefined_operand = false;
544 all_undefined_operands = true;
545 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
547 prop_value_t *val = get_value (use);
549 if (val->lattice_val == UNDEFINED)
550 has_undefined_operand = true;
551 else
552 all_undefined_operands = false;
554 if (val->lattice_val == CONSTANT)
555 has_constant_operand = true;
558 /* If the operation combines operands like COMPLEX_EXPR make sure to
559 not mark the result UNDEFINED if only one part of the result is
560 undefined. */
561 if (has_undefined_operand && all_undefined_operands)
562 return UNDEFINED;
563 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
565 switch (gimple_assign_rhs_code (stmt))
567 /* Unary operators are handled with all_undefined_operands. */
568 case PLUS_EXPR:
569 case MINUS_EXPR:
570 case POINTER_PLUS_EXPR:
571 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
572 Not bitwise operators, one VARYING operand may specify the
573 result completely. Not logical operators for the same reason.
574 Not COMPLEX_EXPR as one VARYING operand makes the result partly
575 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
576 the undefined operand may be promoted. */
577 return UNDEFINED;
579 default:
583 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
584 fall back to VARYING even if there were CONSTANT operands. */
585 if (has_undefined_operand)
586 return VARYING;
588 if (has_constant_operand
589 /* We do not consider virtual operands here -- load from read-only
590 memory may have only VARYING virtual operands, but still be
591 constant. */
592 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
593 return CONSTANT;
595 return VARYING;
598 /* Returns true if STMT cannot be constant. */
600 static bool
601 surely_varying_stmt_p (gimple stmt)
603 /* If the statement has operands that we cannot handle, it cannot be
604 constant. */
605 if (gimple_has_volatile_ops (stmt))
606 return true;
608 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
609 return true;
611 /* If it is a call and does not return a value or is not a
612 builtin and not an indirect call, it is varying. */
613 if (is_gimple_call (stmt))
615 tree fndecl;
616 if (!gimple_call_lhs (stmt)
617 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
618 && !DECL_BUILT_IN (fndecl)))
619 return true;
622 /* Anything other than assignments and conditional jumps are not
623 interesting for CCP. */
624 if (gimple_code (stmt) != GIMPLE_ASSIGN
625 && gimple_code (stmt) != GIMPLE_COND
626 && gimple_code (stmt) != GIMPLE_SWITCH
627 && gimple_code (stmt) != GIMPLE_CALL)
628 return true;
630 return false;
633 /* Initialize local data structures for CCP. */
635 static void
636 ccp_initialize (void)
638 basic_block bb;
640 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
642 /* Initialize simulation flags for PHI nodes and statements. */
643 FOR_EACH_BB (bb)
645 gimple_stmt_iterator i;
647 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
649 gimple stmt = gsi_stmt (i);
650 bool is_varying = surely_varying_stmt_p (stmt);
652 if (is_varying)
654 tree def;
655 ssa_op_iter iter;
657 /* If the statement will not produce a constant, mark
658 all its outputs VARYING. */
659 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
661 if (is_varying)
662 set_value_varying (def);
665 prop_set_simulate_again (stmt, !is_varying);
669 /* Now process PHI nodes. We never clear the simulate_again flag on
670 phi nodes, since we do not know which edges are executable yet,
671 except for phi nodes for virtual operands when we do not do store ccp. */
672 FOR_EACH_BB (bb)
674 gimple_stmt_iterator i;
676 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
678 gimple phi = gsi_stmt (i);
680 if (!is_gimple_reg (gimple_phi_result (phi)))
681 prop_set_simulate_again (phi, false);
682 else
683 prop_set_simulate_again (phi, true);
689 /* Do final substitution of propagated values, cleanup the flowgraph and
690 free allocated storage.
692 Return TRUE when something was optimized. */
694 static bool
695 ccp_finalize (void)
697 /* Perform substitutions based on the known constant values. */
698 bool something_changed = substitute_and_fold (const_val, false);
700 free (const_val);
701 const_val = NULL;
702 return something_changed;;
706 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
707 in VAL1.
709 any M UNDEFINED = any
710 any M VARYING = VARYING
711 Ci M Cj = Ci if (i == j)
712 Ci M Cj = VARYING if (i != j)
715 static void
716 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
718 if (val1->lattice_val == UNDEFINED)
720 /* UNDEFINED M any = any */
721 *val1 = *val2;
723 else if (val2->lattice_val == UNDEFINED)
725 /* any M UNDEFINED = any
726 Nothing to do. VAL1 already contains the value we want. */
729 else if (val1->lattice_val == VARYING
730 || val2->lattice_val == VARYING)
732 /* any M VARYING = VARYING. */
733 val1->lattice_val = VARYING;
734 val1->value = NULL_TREE;
736 else if (val1->lattice_val == CONSTANT
737 && val2->lattice_val == CONSTANT
738 && simple_cst_equal (val1->value, val2->value) == 1)
740 /* Ci M Cj = Ci if (i == j)
741 Ci M Cj = VARYING if (i != j)
743 If these two values come from memory stores, make sure that
744 they come from the same memory reference. */
745 val1->lattice_val = CONSTANT;
746 val1->value = val1->value;
748 else
750 /* Any other combination is VARYING. */
751 val1->lattice_val = VARYING;
752 val1->value = NULL_TREE;
757 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
758 lattice values to determine PHI_NODE's lattice value. The value of a
759 PHI node is determined calling ccp_lattice_meet with all the arguments
760 of the PHI node that are incoming via executable edges. */
762 static enum ssa_prop_result
763 ccp_visit_phi_node (gimple phi)
765 unsigned i;
766 prop_value_t *old_val, new_val;
768 if (dump_file && (dump_flags & TDF_DETAILS))
770 fprintf (dump_file, "\nVisiting PHI node: ");
771 print_gimple_stmt (dump_file, phi, 0, dump_flags);
774 old_val = get_value (gimple_phi_result (phi));
775 switch (old_val->lattice_val)
777 case VARYING:
778 return SSA_PROP_VARYING;
780 case CONSTANT:
781 new_val = *old_val;
782 break;
784 case UNDEFINED:
785 new_val.lattice_val = UNDEFINED;
786 new_val.value = NULL_TREE;
787 break;
789 default:
790 gcc_unreachable ();
793 for (i = 0; i < gimple_phi_num_args (phi); i++)
795 /* Compute the meet operator over all the PHI arguments flowing
796 through executable edges. */
797 edge e = gimple_phi_arg_edge (phi, i);
799 if (dump_file && (dump_flags & TDF_DETAILS))
801 fprintf (dump_file,
802 "\n Argument #%d (%d -> %d %sexecutable)\n",
803 i, e->src->index, e->dest->index,
804 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
807 /* If the incoming edge is executable, Compute the meet operator for
808 the existing value of the PHI node and the current PHI argument. */
809 if (e->flags & EDGE_EXECUTABLE)
811 tree arg = gimple_phi_arg (phi, i)->def;
812 prop_value_t arg_val;
814 if (is_gimple_min_invariant (arg))
816 arg_val.lattice_val = CONSTANT;
817 arg_val.value = arg;
819 else
820 arg_val = *(get_value (arg));
822 ccp_lattice_meet (&new_val, &arg_val);
824 if (dump_file && (dump_flags & TDF_DETAILS))
826 fprintf (dump_file, "\t");
827 print_generic_expr (dump_file, arg, dump_flags);
828 dump_lattice_value (dump_file, "\tValue: ", arg_val);
829 fprintf (dump_file, "\n");
832 if (new_val.lattice_val == VARYING)
833 break;
837 if (dump_file && (dump_flags & TDF_DETAILS))
839 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
840 fprintf (dump_file, "\n\n");
843 /* Make the transition to the new value. */
844 if (set_lattice_value (gimple_phi_result (phi), new_val))
846 if (new_val.lattice_val == VARYING)
847 return SSA_PROP_VARYING;
848 else
849 return SSA_PROP_INTERESTING;
851 else
852 return SSA_PROP_NOT_INTERESTING;
855 /* Return true if we may propagate the address expression ADDR into the
856 dereference DEREF and cancel them. */
858 bool
859 may_propagate_address_into_dereference (tree addr, tree deref)
861 gcc_assert (INDIRECT_REF_P (deref)
862 && TREE_CODE (addr) == ADDR_EXPR);
864 /* Don't propagate if ADDR's operand has incomplete type. */
865 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
866 return false;
868 /* If the address is invariant then we do not need to preserve restrict
869 qualifications. But we do need to preserve volatile qualifiers until
870 we can annotate the folded dereference itself properly. */
871 if (is_gimple_min_invariant (addr)
872 && (!TREE_THIS_VOLATILE (deref)
873 || TYPE_VOLATILE (TREE_TYPE (addr))))
874 return useless_type_conversion_p (TREE_TYPE (deref),
875 TREE_TYPE (TREE_OPERAND (addr, 0)));
877 /* Else both the address substitution and the folding must result in
878 a valid useless type conversion sequence. */
879 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
880 TREE_TYPE (addr))
881 && useless_type_conversion_p (TREE_TYPE (deref),
882 TREE_TYPE (TREE_OPERAND (addr, 0))));
885 /* CCP specific front-end to the non-destructive constant folding
886 routines.
888 Attempt to simplify the RHS of STMT knowing that one or more
889 operands are constants.
891 If simplification is possible, return the simplified RHS,
892 otherwise return the original RHS or NULL_TREE. */
894 static tree
895 ccp_fold (gimple stmt)
897 switch (gimple_code (stmt))
899 case GIMPLE_ASSIGN:
901 enum tree_code subcode = gimple_assign_rhs_code (stmt);
903 switch (get_gimple_rhs_class (subcode))
905 case GIMPLE_SINGLE_RHS:
907 tree rhs = gimple_assign_rhs1 (stmt);
908 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
910 if (TREE_CODE (rhs) == SSA_NAME)
912 /* If the RHS is an SSA_NAME, return its known constant value,
913 if any. */
914 return get_value (rhs)->value;
916 /* Handle propagating invariant addresses into address operations.
917 The folding we do here matches that in tree-ssa-forwprop.c. */
918 else if (TREE_CODE (rhs) == ADDR_EXPR)
920 tree *base;
921 base = &TREE_OPERAND (rhs, 0);
922 while (handled_component_p (*base))
923 base = &TREE_OPERAND (*base, 0);
924 if (TREE_CODE (*base) == INDIRECT_REF
925 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
927 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
928 if (val->lattice_val == CONSTANT
929 && TREE_CODE (val->value) == ADDR_EXPR
930 && may_propagate_address_into_dereference
931 (val->value, *base))
933 /* We need to return a new tree, not modify the IL
934 or share parts of it. So play some tricks to
935 avoid manually building it. */
936 tree ret, save = *base;
937 *base = TREE_OPERAND (val->value, 0);
938 ret = unshare_expr (rhs);
939 recompute_tree_invariant_for_addr_expr (ret);
940 *base = save;
941 return ret;
946 if (kind == tcc_reference)
948 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
949 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
951 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
952 if (val->lattice_val == CONSTANT)
953 return fold_unary (VIEW_CONVERT_EXPR,
954 TREE_TYPE (rhs), val->value);
956 return fold_const_aggregate_ref (rhs);
958 else if (kind == tcc_declaration)
959 return get_symbol_constant_value (rhs);
960 return rhs;
963 case GIMPLE_UNARY_RHS:
965 /* Handle unary operators that can appear in GIMPLE form.
966 Note that we know the single operand must be a constant,
967 so this should almost always return a simplified RHS. */
968 tree lhs = gimple_assign_lhs (stmt);
969 tree op0 = gimple_assign_rhs1 (stmt);
971 /* Simplify the operand down to a constant. */
972 if (TREE_CODE (op0) == SSA_NAME)
974 prop_value_t *val = get_value (op0);
975 if (val->lattice_val == CONSTANT)
976 op0 = get_value (op0)->value;
979 /* Conversions are useless for CCP purposes if they are
980 value-preserving. Thus the restrictions that
981 useless_type_conversion_p places for pointer type conversions
982 do not apply here. Substitution later will only substitute to
983 allowed places. */
984 if (CONVERT_EXPR_CODE_P (subcode)
985 && POINTER_TYPE_P (TREE_TYPE (lhs))
986 && POINTER_TYPE_P (TREE_TYPE (op0))
987 /* Do not allow differences in volatile qualification
988 as this might get us confused as to whether a
989 propagation destination statement is volatile
990 or not. See PR36988. */
991 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
992 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
994 tree tem;
995 /* Still try to generate a constant of correct type. */
996 if (!useless_type_conversion_p (TREE_TYPE (lhs),
997 TREE_TYPE (op0))
998 && ((tem = maybe_fold_offset_to_address
999 (op0, integer_zero_node, TREE_TYPE (lhs)))
1000 != NULL_TREE))
1001 return tem;
1002 return op0;
1005 return fold_unary_ignore_overflow (subcode,
1006 gimple_expr_type (stmt), op0);
1009 case GIMPLE_BINARY_RHS:
1011 /* Handle binary operators that can appear in GIMPLE form. */
1012 tree op0 = gimple_assign_rhs1 (stmt);
1013 tree op1 = gimple_assign_rhs2 (stmt);
1015 /* Simplify the operands down to constants when appropriate. */
1016 if (TREE_CODE (op0) == SSA_NAME)
1018 prop_value_t *val = get_value (op0);
1019 if (val->lattice_val == CONSTANT)
1020 op0 = val->value;
1023 if (TREE_CODE (op1) == SSA_NAME)
1025 prop_value_t *val = get_value (op1);
1026 if (val->lattice_val == CONSTANT)
1027 op1 = val->value;
1030 /* Fold &foo + CST into an invariant reference if possible. */
1031 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1032 && TREE_CODE (op0) == ADDR_EXPR
1033 && TREE_CODE (op1) == INTEGER_CST)
1035 tree lhs = gimple_assign_lhs (stmt);
1036 tree tem = maybe_fold_offset_to_address (op0, op1,
1037 TREE_TYPE (lhs));
1038 if (tem != NULL_TREE)
1039 return tem;
1042 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1045 default:
1046 gcc_unreachable ();
1049 break;
1051 case GIMPLE_CALL:
1053 tree fn = gimple_call_fn (stmt);
1054 prop_value_t *val;
1056 if (TREE_CODE (fn) == SSA_NAME)
1058 val = get_value (fn);
1059 if (val->lattice_val == CONSTANT)
1060 fn = val->value;
1062 if (TREE_CODE (fn) == ADDR_EXPR
1063 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1064 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1066 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1067 tree call, retval;
1068 unsigned i;
1069 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1071 args[i] = gimple_call_arg (stmt, i);
1072 if (TREE_CODE (args[i]) == SSA_NAME)
1074 val = get_value (args[i]);
1075 if (val->lattice_val == CONSTANT)
1076 args[i] = val->value;
1079 call = build_call_array (gimple_call_return_type (stmt),
1080 fn, gimple_call_num_args (stmt), args);
1081 retval = fold_call_expr (call, false);
1082 if (retval)
1083 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1084 STRIP_NOPS (retval);
1085 return retval;
1087 return NULL_TREE;
1090 case GIMPLE_COND:
1092 /* Handle comparison operators that can appear in GIMPLE form. */
1093 tree op0 = gimple_cond_lhs (stmt);
1094 tree op1 = gimple_cond_rhs (stmt);
1095 enum tree_code code = gimple_cond_code (stmt);
1097 /* Simplify the operands down to constants when appropriate. */
1098 if (TREE_CODE (op0) == SSA_NAME)
1100 prop_value_t *val = get_value (op0);
1101 if (val->lattice_val == CONSTANT)
1102 op0 = val->value;
1105 if (TREE_CODE (op1) == SSA_NAME)
1107 prop_value_t *val = get_value (op1);
1108 if (val->lattice_val == CONSTANT)
1109 op1 = val->value;
1112 return fold_binary (code, boolean_type_node, op0, op1);
1115 case GIMPLE_SWITCH:
1117 tree rhs = gimple_switch_index (stmt);
1119 if (TREE_CODE (rhs) == SSA_NAME)
1121 /* If the RHS is an SSA_NAME, return its known constant value,
1122 if any. */
1123 return get_value (rhs)->value;
1126 return rhs;
1129 default:
1130 gcc_unreachable ();
1135 /* Return the tree representing the element referenced by T if T is an
1136 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1137 NULL_TREE otherwise. */
1139 tree
1140 fold_const_aggregate_ref (tree t)
1142 prop_value_t *value;
1143 tree base, ctor, idx, field;
1144 unsigned HOST_WIDE_INT cnt;
1145 tree cfield, cval;
1147 switch (TREE_CODE (t))
1149 case ARRAY_REF:
1150 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1151 DECL_INITIAL. If BASE is a nested reference into another
1152 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1153 the inner reference. */
1154 base = TREE_OPERAND (t, 0);
1155 switch (TREE_CODE (base))
1157 case VAR_DECL:
1158 if (!TREE_READONLY (base)
1159 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1160 || !targetm.binds_local_p (base))
1161 return NULL_TREE;
1163 ctor = DECL_INITIAL (base);
1164 break;
1166 case ARRAY_REF:
1167 case COMPONENT_REF:
1168 ctor = fold_const_aggregate_ref (base);
1169 break;
1171 case STRING_CST:
1172 case CONSTRUCTOR:
1173 ctor = base;
1174 break;
1176 default:
1177 return NULL_TREE;
1180 if (ctor == NULL_TREE
1181 || (TREE_CODE (ctor) != CONSTRUCTOR
1182 && TREE_CODE (ctor) != STRING_CST)
1183 || !TREE_STATIC (ctor))
1184 return NULL_TREE;
1186 /* Get the index. If we have an SSA_NAME, try to resolve it
1187 with the current lattice value for the SSA_NAME. */
1188 idx = TREE_OPERAND (t, 1);
1189 switch (TREE_CODE (idx))
1191 case SSA_NAME:
1192 if ((value = get_value (idx))
1193 && value->lattice_val == CONSTANT
1194 && TREE_CODE (value->value) == INTEGER_CST)
1195 idx = value->value;
1196 else
1197 return NULL_TREE;
1198 break;
1200 case INTEGER_CST:
1201 break;
1203 default:
1204 return NULL_TREE;
1207 /* Fold read from constant string. */
1208 if (TREE_CODE (ctor) == STRING_CST)
1210 if ((TYPE_MODE (TREE_TYPE (t))
1211 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1212 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1213 == MODE_INT)
1214 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1215 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1216 return build_int_cst_type (TREE_TYPE (t),
1217 (TREE_STRING_POINTER (ctor)
1218 [TREE_INT_CST_LOW (idx)]));
1219 return NULL_TREE;
1222 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1223 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1224 if (tree_int_cst_equal (cfield, idx))
1226 STRIP_USELESS_TYPE_CONVERSION (cval);
1227 return cval;
1229 break;
1231 case COMPONENT_REF:
1232 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1233 DECL_INITIAL. If BASE is a nested reference into another
1234 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1235 the inner reference. */
1236 base = TREE_OPERAND (t, 0);
1237 switch (TREE_CODE (base))
1239 case VAR_DECL:
1240 if (!TREE_READONLY (base)
1241 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1242 || !targetm.binds_local_p (base))
1243 return NULL_TREE;
1245 ctor = DECL_INITIAL (base);
1246 break;
1248 case ARRAY_REF:
1249 case COMPONENT_REF:
1250 ctor = fold_const_aggregate_ref (base);
1251 break;
1253 default:
1254 return NULL_TREE;
1257 if (ctor == NULL_TREE
1258 || TREE_CODE (ctor) != CONSTRUCTOR
1259 || !TREE_STATIC (ctor))
1260 return NULL_TREE;
1262 field = TREE_OPERAND (t, 1);
1264 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1265 if (cfield == field
1266 /* FIXME: Handle bit-fields. */
1267 && ! DECL_BIT_FIELD (cfield))
1269 STRIP_USELESS_TYPE_CONVERSION (cval);
1270 return cval;
1272 break;
1274 case REALPART_EXPR:
1275 case IMAGPART_EXPR:
1277 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1278 if (c && TREE_CODE (c) == COMPLEX_CST)
1279 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1280 break;
1283 case INDIRECT_REF:
1285 tree base = TREE_OPERAND (t, 0);
1286 if (TREE_CODE (base) == SSA_NAME
1287 && (value = get_value (base))
1288 && value->lattice_val == CONSTANT
1289 && TREE_CODE (value->value) == ADDR_EXPR)
1290 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1291 break;
1294 default:
1295 break;
1298 return NULL_TREE;
1301 /* Evaluate statement STMT.
1302 Valid only for assignments, calls, conditionals, and switches. */
1304 static prop_value_t
1305 evaluate_stmt (gimple stmt)
1307 prop_value_t val;
1308 tree simplified = NULL_TREE;
1309 ccp_lattice_t likelyvalue = likely_value (stmt);
1310 bool is_constant;
1312 fold_defer_overflow_warnings ();
1314 /* If the statement is likely to have a CONSTANT result, then try
1315 to fold the statement to determine the constant value. */
1316 /* FIXME. This is the only place that we call ccp_fold.
1317 Since likely_value never returns CONSTANT for calls, we will
1318 not attempt to fold them, including builtins that may profit. */
1319 if (likelyvalue == CONSTANT)
1320 simplified = ccp_fold (stmt);
1321 /* If the statement is likely to have a VARYING result, then do not
1322 bother folding the statement. */
1323 else if (likelyvalue == VARYING)
1325 enum gimple_code code = gimple_code (stmt);
1326 if (code == GIMPLE_ASSIGN)
1328 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1330 /* Other cases cannot satisfy is_gimple_min_invariant
1331 without folding. */
1332 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1333 simplified = gimple_assign_rhs1 (stmt);
1335 else if (code == GIMPLE_SWITCH)
1336 simplified = gimple_switch_index (stmt);
1337 else
1338 /* These cannot satisfy is_gimple_min_invariant without folding. */
1339 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1342 is_constant = simplified && is_gimple_min_invariant (simplified);
1344 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1346 if (dump_file && (dump_flags & TDF_DETAILS))
1348 fprintf (dump_file, "which is likely ");
1349 switch (likelyvalue)
1351 case CONSTANT:
1352 fprintf (dump_file, "CONSTANT");
1353 break;
1354 case UNDEFINED:
1355 fprintf (dump_file, "UNDEFINED");
1356 break;
1357 case VARYING:
1358 fprintf (dump_file, "VARYING");
1359 break;
1360 default:;
1362 fprintf (dump_file, "\n");
1365 if (is_constant)
1367 /* The statement produced a constant value. */
1368 val.lattice_val = CONSTANT;
1369 val.value = simplified;
1371 else
1373 /* The statement produced a nonconstant value. If the statement
1374 had UNDEFINED operands, then the result of the statement
1375 should be UNDEFINED. Otherwise, the statement is VARYING. */
1376 if (likelyvalue == UNDEFINED)
1377 val.lattice_val = likelyvalue;
1378 else
1379 val.lattice_val = VARYING;
1381 val.value = NULL_TREE;
1384 return val;
1387 /* Visit the assignment statement STMT. Set the value of its LHS to the
1388 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1389 creates virtual definitions, set the value of each new name to that
1390 of the RHS (if we can derive a constant out of the RHS).
1391 Value-returning call statements also perform an assignment, and
1392 are handled here. */
1394 static enum ssa_prop_result
1395 visit_assignment (gimple stmt, tree *output_p)
1397 prop_value_t val;
1398 enum ssa_prop_result retval;
1400 tree lhs = gimple_get_lhs (stmt);
1402 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1403 || gimple_call_lhs (stmt) != NULL_TREE);
1405 if (gimple_assign_copy_p (stmt))
1407 tree rhs = gimple_assign_rhs1 (stmt);
1409 if (TREE_CODE (rhs) == SSA_NAME)
1411 /* For a simple copy operation, we copy the lattice values. */
1412 prop_value_t *nval = get_value (rhs);
1413 val = *nval;
1415 else
1416 val = evaluate_stmt (stmt);
1418 else
1419 /* Evaluate the statement, which could be
1420 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1421 val = evaluate_stmt (stmt);
1423 retval = SSA_PROP_NOT_INTERESTING;
1425 /* Set the lattice value of the statement's output. */
1426 if (TREE_CODE (lhs) == SSA_NAME)
1428 /* If STMT is an assignment to an SSA_NAME, we only have one
1429 value to set. */
1430 if (set_lattice_value (lhs, val))
1432 *output_p = lhs;
1433 if (val.lattice_val == VARYING)
1434 retval = SSA_PROP_VARYING;
1435 else
1436 retval = SSA_PROP_INTERESTING;
1440 return retval;
1444 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1445 if it can determine which edge will be taken. Otherwise, return
1446 SSA_PROP_VARYING. */
1448 static enum ssa_prop_result
1449 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1451 prop_value_t val;
1452 basic_block block;
1454 block = gimple_bb (stmt);
1455 val = evaluate_stmt (stmt);
1457 /* Find which edge out of the conditional block will be taken and add it
1458 to the worklist. If no single edge can be determined statically,
1459 return SSA_PROP_VARYING to feed all the outgoing edges to the
1460 propagation engine. */
1461 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1462 if (*taken_edge_p)
1463 return SSA_PROP_INTERESTING;
1464 else
1465 return SSA_PROP_VARYING;
1469 /* Evaluate statement STMT. If the statement produces an output value and
1470 its evaluation changes the lattice value of its output, return
1471 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1472 output value.
1474 If STMT is a conditional branch and we can determine its truth
1475 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1476 value, return SSA_PROP_VARYING. */
1478 static enum ssa_prop_result
1479 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1481 tree def;
1482 ssa_op_iter iter;
1484 if (dump_file && (dump_flags & TDF_DETAILS))
1486 fprintf (dump_file, "\nVisiting statement:\n");
1487 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1490 switch (gimple_code (stmt))
1492 case GIMPLE_ASSIGN:
1493 /* If the statement is an assignment that produces a single
1494 output value, evaluate its RHS to see if the lattice value of
1495 its output has changed. */
1496 return visit_assignment (stmt, output_p);
1498 case GIMPLE_CALL:
1499 /* A value-returning call also performs an assignment. */
1500 if (gimple_call_lhs (stmt) != NULL_TREE)
1501 return visit_assignment (stmt, output_p);
1502 break;
1504 case GIMPLE_COND:
1505 case GIMPLE_SWITCH:
1506 /* If STMT is a conditional branch, see if we can determine
1507 which branch will be taken. */
1508 /* FIXME. It appears that we should be able to optimize
1509 computed GOTOs here as well. */
1510 return visit_cond_stmt (stmt, taken_edge_p);
1512 default:
1513 break;
1516 /* Any other kind of statement is not interesting for constant
1517 propagation and, therefore, not worth simulating. */
1518 if (dump_file && (dump_flags & TDF_DETAILS))
1519 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1521 /* Definitions made by statements other than assignments to
1522 SSA_NAMEs represent unknown modifications to their outputs.
1523 Mark them VARYING. */
1524 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1526 prop_value_t v = { VARYING, NULL_TREE };
1527 set_lattice_value (def, v);
1530 return SSA_PROP_VARYING;
1534 /* Main entry point for SSA Conditional Constant Propagation. */
1536 static unsigned int
1537 do_ssa_ccp (void)
1539 ccp_initialize ();
1540 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1541 if (ccp_finalize ())
1542 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1543 else
1544 return 0;
1548 static bool
1549 gate_ccp (void)
1551 return flag_tree_ccp != 0;
1555 struct gimple_opt_pass pass_ccp =
1558 GIMPLE_PASS,
1559 "ccp", /* name */
1560 gate_ccp, /* gate */
1561 do_ssa_ccp, /* execute */
1562 NULL, /* sub */
1563 NULL, /* next */
1564 0, /* static_pass_number */
1565 TV_TREE_CCP, /* tv_id */
1566 PROP_cfg | PROP_ssa, /* properties_required */
1567 0, /* properties_provided */
1568 0, /* properties_destroyed */
1569 0, /* todo_flags_start */
1570 TODO_dump_func | TODO_verify_ssa
1571 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1576 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1577 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1578 is the desired result type. */
1580 static tree
1581 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1582 bool allow_negative_idx)
1584 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1585 tree array_type, elt_type, elt_size;
1586 tree domain_type;
1588 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1589 measured in units of the size of elements type) from that ARRAY_REF).
1590 We can't do anything if either is variable.
1592 The case we handle here is *(&A[N]+O). */
1593 if (TREE_CODE (base) == ARRAY_REF)
1595 tree low_bound = array_ref_low_bound (base);
1597 elt_offset = TREE_OPERAND (base, 1);
1598 if (TREE_CODE (low_bound) != INTEGER_CST
1599 || TREE_CODE (elt_offset) != INTEGER_CST)
1600 return NULL_TREE;
1602 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1603 base = TREE_OPERAND (base, 0);
1606 /* Ignore stupid user tricks of indexing non-array variables. */
1607 array_type = TREE_TYPE (base);
1608 if (TREE_CODE (array_type) != ARRAY_TYPE)
1609 return NULL_TREE;
1610 elt_type = TREE_TYPE (array_type);
1611 if (!useless_type_conversion_p (orig_type, elt_type))
1612 return NULL_TREE;
1614 /* Use signed size type for intermediate computation on the index. */
1615 idx_type = signed_type_for (size_type_node);
1617 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1618 element type (so we can use the alignment if it's not constant).
1619 Otherwise, compute the offset as an index by using a division. If the
1620 division isn't exact, then don't do anything. */
1621 elt_size = TYPE_SIZE_UNIT (elt_type);
1622 if (!elt_size)
1623 return NULL;
1624 if (integer_zerop (offset))
1626 if (TREE_CODE (elt_size) != INTEGER_CST)
1627 elt_size = size_int (TYPE_ALIGN (elt_type));
1629 idx = build_int_cst (idx_type, 0);
1631 else
1633 unsigned HOST_WIDE_INT lquo, lrem;
1634 HOST_WIDE_INT hquo, hrem;
1635 double_int soffset;
1637 /* The final array offset should be signed, so we need
1638 to sign-extend the (possibly pointer) offset here
1639 and use signed division. */
1640 soffset = double_int_sext (tree_to_double_int (offset),
1641 TYPE_PRECISION (TREE_TYPE (offset)));
1642 if (TREE_CODE (elt_size) != INTEGER_CST
1643 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1644 soffset.low, soffset.high,
1645 TREE_INT_CST_LOW (elt_size),
1646 TREE_INT_CST_HIGH (elt_size),
1647 &lquo, &hquo, &lrem, &hrem)
1648 || lrem || hrem)
1649 return NULL_TREE;
1651 idx = build_int_cst_wide (idx_type, lquo, hquo);
1654 /* Assume the low bound is zero. If there is a domain type, get the
1655 low bound, if any, convert the index into that type, and add the
1656 low bound. */
1657 min_idx = build_int_cst (idx_type, 0);
1658 domain_type = TYPE_DOMAIN (array_type);
1659 if (domain_type)
1661 idx_type = domain_type;
1662 if (TYPE_MIN_VALUE (idx_type))
1663 min_idx = TYPE_MIN_VALUE (idx_type);
1664 else
1665 min_idx = fold_convert (idx_type, min_idx);
1667 if (TREE_CODE (min_idx) != INTEGER_CST)
1668 return NULL_TREE;
1670 elt_offset = fold_convert (idx_type, elt_offset);
1673 if (!integer_zerop (min_idx))
1674 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1675 if (!integer_zerop (elt_offset))
1676 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1678 /* Make sure to possibly truncate late after offsetting. */
1679 idx = fold_convert (idx_type, idx);
1681 /* We don't want to construct access past array bounds. For example
1682 char *(c[4]);
1683 c[3][2];
1684 should not be simplified into (*c)[14] or tree-vrp will
1685 give false warnings. The same is true for
1686 struct A { long x; char d[0]; } *a;
1687 (char *)a - 4;
1688 which should be not folded to &a->d[-8]. */
1689 if (domain_type
1690 && TYPE_MAX_VALUE (domain_type)
1691 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1693 tree up_bound = TYPE_MAX_VALUE (domain_type);
1695 if (tree_int_cst_lt (up_bound, idx)
1696 /* Accesses after the end of arrays of size 0 (gcc
1697 extension) and 1 are likely intentional ("struct
1698 hack"). */
1699 && compare_tree_int (up_bound, 1) > 0)
1700 return NULL_TREE;
1702 if (domain_type
1703 && TYPE_MIN_VALUE (domain_type))
1705 if (!allow_negative_idx
1706 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1707 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1708 return NULL_TREE;
1710 else if (!allow_negative_idx
1711 && compare_tree_int (idx, 0) < 0)
1712 return NULL_TREE;
1714 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1718 /* Attempt to fold *(S+O) to S.X.
1719 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1720 is the desired result type. */
1722 static tree
1723 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1724 tree orig_type, bool base_is_ptr)
1726 tree f, t, field_type, tail_array_field, field_offset;
1727 tree ret;
1728 tree new_base;
1730 if (TREE_CODE (record_type) != RECORD_TYPE
1731 && TREE_CODE (record_type) != UNION_TYPE
1732 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1733 return NULL_TREE;
1735 /* Short-circuit silly cases. */
1736 if (useless_type_conversion_p (record_type, orig_type))
1737 return NULL_TREE;
1739 tail_array_field = NULL_TREE;
1740 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1742 int cmp;
1744 if (TREE_CODE (f) != FIELD_DECL)
1745 continue;
1746 if (DECL_BIT_FIELD (f))
1747 continue;
1749 if (!DECL_FIELD_OFFSET (f))
1750 continue;
1751 field_offset = byte_position (f);
1752 if (TREE_CODE (field_offset) != INTEGER_CST)
1753 continue;
1755 /* ??? Java creates "interesting" fields for representing base classes.
1756 They have no name, and have no context. With no context, we get into
1757 trouble with nonoverlapping_component_refs_p. Skip them. */
1758 if (!DECL_FIELD_CONTEXT (f))
1759 continue;
1761 /* The previous array field isn't at the end. */
1762 tail_array_field = NULL_TREE;
1764 /* Check to see if this offset overlaps with the field. */
1765 cmp = tree_int_cst_compare (field_offset, offset);
1766 if (cmp > 0)
1767 continue;
1769 field_type = TREE_TYPE (f);
1771 /* Here we exactly match the offset being checked. If the types match,
1772 then we can return that field. */
1773 if (cmp == 0
1774 && useless_type_conversion_p (orig_type, field_type))
1776 if (base_is_ptr)
1777 base = build1 (INDIRECT_REF, record_type, base);
1778 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1779 return t;
1782 /* Don't care about offsets into the middle of scalars. */
1783 if (!AGGREGATE_TYPE_P (field_type))
1784 continue;
1786 /* Check for array at the end of the struct. This is often
1787 used as for flexible array members. We should be able to
1788 turn this into an array access anyway. */
1789 if (TREE_CODE (field_type) == ARRAY_TYPE)
1790 tail_array_field = f;
1792 /* Check the end of the field against the offset. */
1793 if (!DECL_SIZE_UNIT (f)
1794 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1795 continue;
1796 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1797 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1798 continue;
1800 /* If we matched, then set offset to the displacement into
1801 this field. */
1802 if (base_is_ptr)
1803 new_base = build1 (INDIRECT_REF, record_type, base);
1804 else
1805 new_base = base;
1806 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1808 /* Recurse to possibly find the match. */
1809 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1810 f == TYPE_FIELDS (record_type));
1811 if (ret)
1812 return ret;
1813 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1814 orig_type, false);
1815 if (ret)
1816 return ret;
1819 if (!tail_array_field)
1820 return NULL_TREE;
1822 f = tail_array_field;
1823 field_type = TREE_TYPE (f);
1824 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1826 /* If we get here, we've got an aggregate field, and a possibly
1827 nonzero offset into them. Recurse and hope for a valid match. */
1828 if (base_is_ptr)
1829 base = build1 (INDIRECT_REF, record_type, base);
1830 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1832 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1833 f == TYPE_FIELDS (record_type));
1834 if (t)
1835 return t;
1836 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1837 orig_type, false);
1840 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1841 or BASE[index] or by combination of those.
1843 Before attempting the conversion strip off existing ADDR_EXPRs and
1844 handled component refs. */
1846 tree
1847 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1849 tree ret;
1850 tree type;
1851 bool base_is_ptr = true;
1853 STRIP_NOPS (base);
1854 if (TREE_CODE (base) == ADDR_EXPR)
1856 base_is_ptr = false;
1858 base = TREE_OPERAND (base, 0);
1860 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1861 so it needs to be removed and new COMPONENT_REF constructed.
1862 The wrong COMPONENT_REF are often constructed by folding the
1863 (type *)&object within the expression (type *)&object+offset */
1864 if (handled_component_p (base))
1866 HOST_WIDE_INT sub_offset, size, maxsize;
1867 tree newbase;
1868 newbase = get_ref_base_and_extent (base, &sub_offset,
1869 &size, &maxsize);
1870 gcc_assert (newbase);
1871 if (size == maxsize
1872 && size != -1
1873 && !(sub_offset & (BITS_PER_UNIT - 1)))
1875 base = newbase;
1876 if (sub_offset)
1877 offset = int_const_binop (PLUS_EXPR, offset,
1878 build_int_cst (TREE_TYPE (offset),
1879 sub_offset / BITS_PER_UNIT), 1);
1882 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1883 && integer_zerop (offset))
1884 return base;
1885 type = TREE_TYPE (base);
1887 else
1889 base_is_ptr = true;
1890 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1891 return NULL_TREE;
1892 type = TREE_TYPE (TREE_TYPE (base));
1894 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1895 orig_type, base_is_ptr);
1896 if (!ret)
1898 if (base_is_ptr)
1899 base = build1 (INDIRECT_REF, type, base);
1900 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1902 return ret;
1905 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1906 or &BASE[index] or by combination of those.
1908 Before attempting the conversion strip off existing component refs. */
1910 tree
1911 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1913 tree t;
1915 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1916 && POINTER_TYPE_P (orig_type));
1918 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1919 if (t != NULL_TREE)
1921 tree orig = addr;
1922 tree ptr_type;
1924 /* For __builtin_object_size to function correctly we need to
1925 make sure not to fold address arithmetic so that we change
1926 reference from one array to another. This would happen for
1927 example for
1929 struct X { char s1[10]; char s2[10] } s;
1930 char *foo (void) { return &s.s2[-4]; }
1932 where we need to avoid generating &s.s1[6]. As the C and
1933 C++ frontends create different initial trees
1934 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1935 sophisticated comparisons here. Note that checking for the
1936 condition after the fact is easier than trying to avoid doing
1937 the folding. */
1938 STRIP_NOPS (orig);
1939 if (TREE_CODE (orig) == ADDR_EXPR)
1940 orig = TREE_OPERAND (orig, 0);
1941 if ((TREE_CODE (orig) == ARRAY_REF
1942 || (TREE_CODE (orig) == COMPONENT_REF
1943 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1944 && (TREE_CODE (t) == ARRAY_REF
1945 || TREE_CODE (t) == COMPONENT_REF)
1946 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1947 ? TREE_OPERAND (orig, 0) : orig,
1948 TREE_CODE (t) == ARRAY_REF
1949 ? TREE_OPERAND (t, 0) : t, 0))
1950 return NULL_TREE;
1952 ptr_type = build_pointer_type (TREE_TYPE (t));
1953 if (!useless_type_conversion_p (orig_type, ptr_type))
1954 return NULL_TREE;
1955 return build_fold_addr_expr_with_type (t, ptr_type);
1958 return NULL_TREE;
1961 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1962 Return the simplified expression, or NULL if nothing could be done. */
1964 static tree
1965 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1967 tree t;
1968 bool volatile_p = TREE_THIS_VOLATILE (expr);
1970 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1971 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1972 are sometimes added. */
1973 base = fold (base);
1974 STRIP_TYPE_NOPS (base);
1975 TREE_OPERAND (expr, 0) = base;
1977 /* One possibility is that the address reduces to a string constant. */
1978 t = fold_read_from_constant_string (expr);
1979 if (t)
1980 return t;
1982 /* Add in any offset from a POINTER_PLUS_EXPR. */
1983 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
1985 tree offset2;
1987 offset2 = TREE_OPERAND (base, 1);
1988 if (TREE_CODE (offset2) != INTEGER_CST)
1989 return NULL_TREE;
1990 base = TREE_OPERAND (base, 0);
1992 offset = fold_convert (sizetype,
1993 int_const_binop (PLUS_EXPR, offset, offset2, 1));
1996 if (TREE_CODE (base) == ADDR_EXPR)
1998 tree base_addr = base;
2000 /* Strip the ADDR_EXPR. */
2001 base = TREE_OPERAND (base, 0);
2003 /* Fold away CONST_DECL to its value, if the type is scalar. */
2004 if (TREE_CODE (base) == CONST_DECL
2005 && is_gimple_min_invariant (DECL_INITIAL (base)))
2006 return DECL_INITIAL (base);
2008 /* Try folding *(&B+O) to B.X. */
2009 t = maybe_fold_offset_to_reference (base_addr, offset,
2010 TREE_TYPE (expr));
2011 if (t)
2013 /* Preserve volatileness of the original expression.
2014 We can end up with a plain decl here which is shared
2015 and we shouldn't mess with its flags. */
2016 if (!SSA_VAR_P (t))
2017 TREE_THIS_VOLATILE (t) = volatile_p;
2018 return t;
2021 else
2023 /* We can get here for out-of-range string constant accesses,
2024 such as "_"[3]. Bail out of the entire substitution search
2025 and arrange for the entire statement to be replaced by a
2026 call to __builtin_trap. In all likelihood this will all be
2027 constant-folded away, but in the meantime we can't leave with
2028 something that get_expr_operands can't understand. */
2030 t = base;
2031 STRIP_NOPS (t);
2032 if (TREE_CODE (t) == ADDR_EXPR
2033 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2035 /* FIXME: Except that this causes problems elsewhere with dead
2036 code not being deleted, and we die in the rtl expanders
2037 because we failed to remove some ssa_name. In the meantime,
2038 just return zero. */
2039 /* FIXME2: This condition should be signaled by
2040 fold_read_from_constant_string directly, rather than
2041 re-checking for it here. */
2042 return integer_zero_node;
2045 /* Try folding *(B+O) to B->X. Still an improvement. */
2046 if (POINTER_TYPE_P (TREE_TYPE (base)))
2048 t = maybe_fold_offset_to_reference (base, offset,
2049 TREE_TYPE (expr));
2050 if (t)
2051 return t;
2055 /* Otherwise we had an offset that we could not simplify. */
2056 return NULL_TREE;
2060 /* A quaint feature extant in our address arithmetic is that there
2061 can be hidden type changes here. The type of the result need
2062 not be the same as the type of the input pointer.
2064 What we're after here is an expression of the form
2065 (T *)(&array + const)
2066 where array is OP0, const is OP1, RES_TYPE is T and
2067 the cast doesn't actually exist, but is implicit in the
2068 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2069 &array[x]
2070 which may be able to propagate further. */
2072 tree
2073 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2075 tree ptd_type;
2076 tree t;
2078 /* It had better be a constant. */
2079 if (TREE_CODE (op1) != INTEGER_CST)
2080 return NULL_TREE;
2081 /* The first operand should be an ADDR_EXPR. */
2082 if (TREE_CODE (op0) != ADDR_EXPR)
2083 return NULL_TREE;
2084 op0 = TREE_OPERAND (op0, 0);
2086 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2087 the offset into it. */
2088 while (TREE_CODE (op0) == ARRAY_REF)
2090 tree array_obj = TREE_OPERAND (op0, 0);
2091 tree array_idx = TREE_OPERAND (op0, 1);
2092 tree elt_type = TREE_TYPE (op0);
2093 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2094 tree min_idx;
2096 if (TREE_CODE (array_idx) != INTEGER_CST)
2097 break;
2098 if (TREE_CODE (elt_size) != INTEGER_CST)
2099 break;
2101 /* Un-bias the index by the min index of the array type. */
2102 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2103 if (min_idx)
2105 min_idx = TYPE_MIN_VALUE (min_idx);
2106 if (min_idx)
2108 if (TREE_CODE (min_idx) != INTEGER_CST)
2109 break;
2111 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2112 if (!integer_zerop (min_idx))
2113 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2114 min_idx, 0);
2118 /* Convert the index to a byte offset. */
2119 array_idx = fold_convert (sizetype, array_idx);
2120 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2122 /* Update the operands for the next round, or for folding. */
2123 op1 = int_const_binop (PLUS_EXPR,
2124 array_idx, op1, 0);
2125 op0 = array_obj;
2128 ptd_type = TREE_TYPE (res_type);
2129 /* If we want a pointer to void, reconstruct the reference from the
2130 array element type. A pointer to that can be trivially converted
2131 to void *. This happens as we fold (void *)(ptr p+ off). */
2132 if (VOID_TYPE_P (ptd_type)
2133 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2134 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2136 /* At which point we can try some of the same things as for indirects. */
2137 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2138 if (!t)
2139 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2140 ptd_type, false);
2141 if (t)
2142 t = build1 (ADDR_EXPR, res_type, t);
2144 return t;
2147 /* For passing state through walk_tree into fold_stmt_r and its
2148 children. */
2150 struct fold_stmt_r_data
2152 gimple stmt;
2153 bool *changed_p;
2154 bool *inside_addr_expr_p;
2157 /* Subroutine of fold_stmt called via walk_tree. We perform several
2158 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2160 static tree
2161 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2163 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2164 struct fold_stmt_r_data *fold_stmt_r_data;
2165 bool *inside_addr_expr_p;
2166 bool *changed_p;
2167 tree expr = *expr_p, t;
2168 bool volatile_p = TREE_THIS_VOLATILE (expr);
2170 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2171 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2172 changed_p = fold_stmt_r_data->changed_p;
2174 /* ??? It'd be nice if walk_tree had a pre-order option. */
2175 switch (TREE_CODE (expr))
2177 case INDIRECT_REF:
2178 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2179 if (t)
2180 return t;
2181 *walk_subtrees = 0;
2183 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2184 integer_zero_node);
2185 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2186 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
2187 t = NULL_TREE;
2188 if (!t
2189 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2190 /* If we had a good reason for propagating the address here,
2191 make sure we end up with valid gimple. See PR34989. */
2192 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2193 break;
2195 case NOP_EXPR:
2196 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2197 if (t)
2198 return t;
2199 *walk_subtrees = 0;
2201 if (POINTER_TYPE_P (TREE_TYPE (expr))
2202 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
2203 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2204 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2205 integer_zero_node,
2206 TREE_TYPE (TREE_TYPE (expr)))))
2207 return t;
2208 break;
2210 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2211 We'd only want to bother decomposing an existing ARRAY_REF if
2212 the base array is found to have another offset contained within.
2213 Otherwise we'd be wasting time. */
2214 case ARRAY_REF:
2215 /* If we are not processing expressions found within an
2216 ADDR_EXPR, then we can fold constant array references.
2217 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2218 into 'a' = 5. */
2219 if (!*inside_addr_expr_p && !wi->is_lhs)
2220 t = fold_read_from_constant_string (expr);
2221 else
2222 t = NULL;
2223 break;
2225 case ADDR_EXPR:
2226 *inside_addr_expr_p = true;
2227 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2228 *inside_addr_expr_p = false;
2229 if (t)
2230 return t;
2231 *walk_subtrees = 0;
2233 /* Make sure the value is properly considered constant, and so gets
2234 propagated as expected. */
2235 if (*changed_p)
2236 recompute_tree_invariant_for_addr_expr (expr);
2237 return NULL_TREE;
2239 case COMPONENT_REF:
2240 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2241 if (t)
2242 return t;
2243 *walk_subtrees = 0;
2245 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2246 We've already checked that the records are compatible, so we should
2247 come up with a set of compatible fields. */
2249 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2250 tree expr_field = TREE_OPERAND (expr, 1);
2252 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2254 expr_field = find_compatible_field (expr_record, expr_field);
2255 TREE_OPERAND (expr, 1) = expr_field;
2258 break;
2260 case TARGET_MEM_REF:
2261 t = maybe_fold_tmr (expr);
2262 break;
2264 case POINTER_PLUS_EXPR:
2265 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2266 if (t)
2267 return t;
2268 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2269 if (t)
2270 return t;
2271 *walk_subtrees = 0;
2273 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2274 TREE_OPERAND (expr, 0),
2275 TREE_OPERAND (expr, 1));
2276 break;
2278 case COND_EXPR:
2279 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2281 tree op0 = TREE_OPERAND (expr, 0);
2282 tree tem;
2283 bool set;
2285 fold_defer_overflow_warnings ();
2286 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2287 TREE_OPERAND (op0, 0),
2288 TREE_OPERAND (op0, 1));
2289 /* This is actually a conditional expression, not a GIMPLE
2290 conditional statement, however, the valid_gimple_rhs_p
2291 test still applies. */
2292 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2293 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2294 if (set)
2296 COND_EXPR_COND (expr) = tem;
2297 t = expr;
2298 break;
2301 return NULL_TREE;
2303 default:
2304 return NULL_TREE;
2307 if (t)
2309 /* Preserve volatileness of the original expression.
2310 We can end up with a plain decl here which is shared
2311 and we shouldn't mess with its flags. */
2312 if (!SSA_VAR_P (t))
2313 TREE_THIS_VOLATILE (t) = volatile_p;
2314 *expr_p = t;
2315 *changed_p = true;
2318 return NULL_TREE;
2321 /* Return the string length, maximum string length or maximum value of
2322 ARG in LENGTH.
2323 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2324 is not NULL and, for TYPE == 0, its value is not equal to the length
2325 we determine or if we are unable to determine the length or value,
2326 return false. VISITED is a bitmap of visited variables.
2327 TYPE is 0 if string length should be returned, 1 for maximum string
2328 length and 2 for maximum value ARG can have. */
2330 static bool
2331 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2333 tree var, val;
2334 gimple def_stmt;
2336 if (TREE_CODE (arg) != SSA_NAME)
2338 if (TREE_CODE (arg) == COND_EXPR)
2339 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2340 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2341 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2342 else if (TREE_CODE (arg) == ADDR_EXPR
2343 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2344 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2346 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2347 if (TREE_CODE (aop0) == INDIRECT_REF
2348 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2349 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2350 length, visited, type);
2353 if (type == 2)
2355 val = arg;
2356 if (TREE_CODE (val) != INTEGER_CST
2357 || tree_int_cst_sgn (val) < 0)
2358 return false;
2360 else
2361 val = c_strlen (arg, 1);
2362 if (!val)
2363 return false;
2365 if (*length)
2367 if (type > 0)
2369 if (TREE_CODE (*length) != INTEGER_CST
2370 || TREE_CODE (val) != INTEGER_CST)
2371 return false;
2373 if (tree_int_cst_lt (*length, val))
2374 *length = val;
2375 return true;
2377 else if (simple_cst_equal (val, *length) != 1)
2378 return false;
2381 *length = val;
2382 return true;
2385 /* If we were already here, break the infinite cycle. */
2386 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2387 return true;
2388 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2390 var = arg;
2391 def_stmt = SSA_NAME_DEF_STMT (var);
2393 switch (gimple_code (def_stmt))
2395 case GIMPLE_ASSIGN:
2396 /* The RHS of the statement defining VAR must either have a
2397 constant length or come from another SSA_NAME with a constant
2398 length. */
2399 if (gimple_assign_single_p (def_stmt)
2400 || gimple_assign_unary_nop_p (def_stmt))
2402 tree rhs = gimple_assign_rhs1 (def_stmt);
2403 return get_maxval_strlen (rhs, length, visited, type);
2405 return false;
2407 case GIMPLE_PHI:
2409 /* All the arguments of the PHI node must have the same constant
2410 length. */
2411 unsigned i;
2413 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2415 tree arg = gimple_phi_arg (def_stmt, i)->def;
2417 /* If this PHI has itself as an argument, we cannot
2418 determine the string length of this argument. However,
2419 if we can find a constant string length for the other
2420 PHI args then we can still be sure that this is a
2421 constant string length. So be optimistic and just
2422 continue with the next argument. */
2423 if (arg == gimple_phi_result (def_stmt))
2424 continue;
2426 if (!get_maxval_strlen (arg, length, visited, type))
2427 return false;
2430 return true;
2432 default:
2433 return false;
2438 /* Fold builtin call in statement STMT. Returns a simplified tree.
2439 We may return a non-constant expression, including another call
2440 to a different function and with different arguments, e.g.,
2441 substituting memcpy for strcpy when the string length is known.
2442 Note that some builtins expand into inline code that may not
2443 be valid in GIMPLE. Callers must take care. */
2445 static tree
2446 ccp_fold_builtin (gimple stmt)
2448 tree result, val[3];
2449 tree callee, a;
2450 int arg_idx, type;
2451 bitmap visited;
2452 bool ignore;
2453 int nargs;
2455 gcc_assert (is_gimple_call (stmt));
2457 ignore = (gimple_call_lhs (stmt) == NULL);
2459 /* First try the generic builtin folder. If that succeeds, return the
2460 result directly. */
2461 result = fold_call_stmt (stmt, ignore);
2462 if (result)
2464 if (ignore)
2465 STRIP_NOPS (result);
2466 return result;
2469 /* Ignore MD builtins. */
2470 callee = gimple_call_fndecl (stmt);
2471 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2472 return NULL_TREE;
2474 /* If the builtin could not be folded, and it has no argument list,
2475 we're done. */
2476 nargs = gimple_call_num_args (stmt);
2477 if (nargs == 0)
2478 return NULL_TREE;
2480 /* Limit the work only for builtins we know how to simplify. */
2481 switch (DECL_FUNCTION_CODE (callee))
2483 case BUILT_IN_STRLEN:
2484 case BUILT_IN_FPUTS:
2485 case BUILT_IN_FPUTS_UNLOCKED:
2486 arg_idx = 0;
2487 type = 0;
2488 break;
2489 case BUILT_IN_STRCPY:
2490 case BUILT_IN_STRNCPY:
2491 arg_idx = 1;
2492 type = 0;
2493 break;
2494 case BUILT_IN_MEMCPY_CHK:
2495 case BUILT_IN_MEMPCPY_CHK:
2496 case BUILT_IN_MEMMOVE_CHK:
2497 case BUILT_IN_MEMSET_CHK:
2498 case BUILT_IN_STRNCPY_CHK:
2499 arg_idx = 2;
2500 type = 2;
2501 break;
2502 case BUILT_IN_STRCPY_CHK:
2503 case BUILT_IN_STPCPY_CHK:
2504 arg_idx = 1;
2505 type = 1;
2506 break;
2507 case BUILT_IN_SNPRINTF_CHK:
2508 case BUILT_IN_VSNPRINTF_CHK:
2509 arg_idx = 1;
2510 type = 2;
2511 break;
2512 default:
2513 return NULL_TREE;
2516 if (arg_idx >= nargs)
2517 return NULL_TREE;
2519 /* Try to use the dataflow information gathered by the CCP process. */
2520 visited = BITMAP_ALLOC (NULL);
2521 bitmap_clear (visited);
2523 memset (val, 0, sizeof (val));
2524 a = gimple_call_arg (stmt, arg_idx);
2525 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2526 val[arg_idx] = NULL_TREE;
2528 BITMAP_FREE (visited);
2530 result = NULL_TREE;
2531 switch (DECL_FUNCTION_CODE (callee))
2533 case BUILT_IN_STRLEN:
2534 if (val[0] && nargs == 1)
2536 tree new_val =
2537 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2539 /* If the result is not a valid gimple value, or not a cast
2540 of a valid gimple value, then we can not use the result. */
2541 if (is_gimple_val (new_val)
2542 || (is_gimple_cast (new_val)
2543 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2544 return new_val;
2546 break;
2548 case BUILT_IN_STRCPY:
2549 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2550 result = fold_builtin_strcpy (callee,
2551 gimple_call_arg (stmt, 0),
2552 gimple_call_arg (stmt, 1),
2553 val[1]);
2554 break;
2556 case BUILT_IN_STRNCPY:
2557 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2558 result = fold_builtin_strncpy (callee,
2559 gimple_call_arg (stmt, 0),
2560 gimple_call_arg (stmt, 1),
2561 gimple_call_arg (stmt, 2),
2562 val[1]);
2563 break;
2565 case BUILT_IN_FPUTS:
2566 if (nargs == 2)
2567 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2568 gimple_call_arg (stmt, 1),
2569 ignore, false, val[0]);
2570 break;
2572 case BUILT_IN_FPUTS_UNLOCKED:
2573 if (nargs == 2)
2574 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2575 gimple_call_arg (stmt, 1),
2576 ignore, true, val[0]);
2577 break;
2579 case BUILT_IN_MEMCPY_CHK:
2580 case BUILT_IN_MEMPCPY_CHK:
2581 case BUILT_IN_MEMMOVE_CHK:
2582 case BUILT_IN_MEMSET_CHK:
2583 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2584 result = fold_builtin_memory_chk (callee,
2585 gimple_call_arg (stmt, 0),
2586 gimple_call_arg (stmt, 1),
2587 gimple_call_arg (stmt, 2),
2588 gimple_call_arg (stmt, 3),
2589 val[2], ignore,
2590 DECL_FUNCTION_CODE (callee));
2591 break;
2593 case BUILT_IN_STRCPY_CHK:
2594 case BUILT_IN_STPCPY_CHK:
2595 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2596 result = fold_builtin_stxcpy_chk (callee,
2597 gimple_call_arg (stmt, 0),
2598 gimple_call_arg (stmt, 1),
2599 gimple_call_arg (stmt, 2),
2600 val[1], ignore,
2601 DECL_FUNCTION_CODE (callee));
2602 break;
2604 case BUILT_IN_STRNCPY_CHK:
2605 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2606 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2607 gimple_call_arg (stmt, 1),
2608 gimple_call_arg (stmt, 2),
2609 gimple_call_arg (stmt, 3),
2610 val[2]);
2611 break;
2613 case BUILT_IN_SNPRINTF_CHK:
2614 case BUILT_IN_VSNPRINTF_CHK:
2615 if (val[1] && is_gimple_val (val[1]))
2616 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2617 DECL_FUNCTION_CODE (callee));
2618 break;
2620 default:
2621 gcc_unreachable ();
2624 if (result && ignore)
2625 result = fold_ignored_result (result);
2626 return result;
2629 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2630 replacement rhs for the statement or NULL_TREE if no simplification
2631 could be made. It is assumed that the operands have been previously
2632 folded. */
2634 static tree
2635 fold_gimple_assign (gimple_stmt_iterator *si)
2637 gimple stmt = gsi_stmt (*si);
2638 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2640 tree result = NULL;
2642 switch (get_gimple_rhs_class (subcode))
2644 case GIMPLE_SINGLE_RHS:
2646 tree rhs = gimple_assign_rhs1 (stmt);
2648 /* Try to fold a conditional expression. */
2649 if (TREE_CODE (rhs) == COND_EXPR)
2651 tree temp = fold (COND_EXPR_COND (rhs));
2652 if (temp != COND_EXPR_COND (rhs))
2653 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2654 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2657 /* If we couldn't fold the RHS, hand over to the generic
2658 fold routines. */
2659 if (result == NULL_TREE)
2660 result = fold (rhs);
2662 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2663 that may have been added by fold, and "useless" type
2664 conversions that might now be apparent due to propagation. */
2665 STRIP_USELESS_TYPE_CONVERSION (result);
2667 if (result != rhs && valid_gimple_rhs_p (result))
2668 return result;
2669 else
2670 /* It is possible that fold_stmt_r simplified the RHS.
2671 Make sure that the subcode of this statement still
2672 reflects the principal operator of the rhs operand. */
2673 return rhs;
2675 break;
2677 case GIMPLE_UNARY_RHS:
2679 tree rhs = gimple_assign_rhs1 (stmt);
2681 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2682 if (result)
2684 /* If the operation was a conversion do _not_ mark a
2685 resulting constant with TREE_OVERFLOW if the original
2686 constant was not. These conversions have implementation
2687 defined behavior and retaining the TREE_OVERFLOW flag
2688 here would confuse later passes such as VRP. */
2689 if (CONVERT_EXPR_CODE_P (subcode)
2690 && TREE_CODE (result) == INTEGER_CST
2691 && TREE_CODE (rhs) == INTEGER_CST)
2692 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2694 STRIP_USELESS_TYPE_CONVERSION (result);
2695 if (valid_gimple_rhs_p (result))
2696 return result;
2698 else if (CONVERT_EXPR_CODE_P (subcode)
2699 && POINTER_TYPE_P (gimple_expr_type (stmt))
2700 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2702 tree type = gimple_expr_type (stmt);
2703 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2704 integer_zero_node, type);
2705 if (t)
2706 return t;
2709 break;
2711 case GIMPLE_BINARY_RHS:
2712 /* Try to fold pointer addition. */
2713 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2715 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2716 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2718 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2719 if (!useless_type_conversion_p
2720 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2721 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2723 result = maybe_fold_stmt_addition (type,
2724 gimple_assign_rhs1 (stmt),
2725 gimple_assign_rhs2 (stmt));
2728 if (!result)
2729 result = fold_binary (subcode,
2730 TREE_TYPE (gimple_assign_lhs (stmt)),
2731 gimple_assign_rhs1 (stmt),
2732 gimple_assign_rhs2 (stmt));
2734 if (result)
2736 STRIP_USELESS_TYPE_CONVERSION (result);
2737 if (valid_gimple_rhs_p (result))
2738 return result;
2740 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2741 we lose canonicalization opportunities. Do not go again
2742 through fold here though, or the same non-GIMPLE will be
2743 produced. */
2744 if (commutative_tree_code (subcode)
2745 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2746 gimple_assign_rhs2 (stmt), false))
2747 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2748 gimple_assign_rhs2 (stmt),
2749 gimple_assign_rhs1 (stmt));
2751 break;
2753 case GIMPLE_INVALID_RHS:
2754 gcc_unreachable ();
2757 return NULL_TREE;
2760 /* Attempt to fold a conditional statement. Return true if any changes were
2761 made. We only attempt to fold the condition expression, and do not perform
2762 any transformation that would require alteration of the cfg. It is
2763 assumed that the operands have been previously folded. */
2765 static bool
2766 fold_gimple_cond (gimple stmt)
2768 tree result = fold_binary (gimple_cond_code (stmt),
2769 boolean_type_node,
2770 gimple_cond_lhs (stmt),
2771 gimple_cond_rhs (stmt));
2773 if (result)
2775 STRIP_USELESS_TYPE_CONVERSION (result);
2776 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2778 gimple_cond_set_condition_from_tree (stmt, result);
2779 return true;
2783 return false;
2787 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2788 The statement may be replaced by another statement, e.g., if the call
2789 simplifies to a constant value. Return true if any changes were made.
2790 It is assumed that the operands have been previously folded. */
2792 static bool
2793 fold_gimple_call (gimple_stmt_iterator *gsi)
2795 gimple stmt = gsi_stmt (*gsi);
2797 tree callee = gimple_call_fndecl (stmt);
2799 /* Check for builtins that CCP can handle using information not
2800 available in the generic fold routines. */
2801 if (callee && DECL_BUILT_IN (callee))
2803 tree result = ccp_fold_builtin (stmt);
2805 if (result)
2806 return update_call_from_tree (gsi, result);
2808 else
2810 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2811 here are when we've propagated the address of a decl into the
2812 object slot. */
2813 /* ??? Should perhaps do this in fold proper. However, doing it
2814 there requires that we create a new CALL_EXPR, and that requires
2815 copying EH region info to the new node. Easier to just do it
2816 here where we can just smash the call operand. */
2817 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2818 callee = gimple_call_fn (stmt);
2819 if (TREE_CODE (callee) == OBJ_TYPE_REF
2820 && lang_hooks.fold_obj_type_ref
2821 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2822 && DECL_P (TREE_OPERAND
2823 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2825 tree t;
2827 /* ??? Caution: Broken ADDR_EXPR semantics means that
2828 looking at the type of the operand of the addr_expr
2829 can yield an array type. See silly exception in
2830 check_pointer_types_r. */
2831 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2832 t = lang_hooks.fold_obj_type_ref (callee, t);
2833 if (t)
2835 gimple_call_set_fn (stmt, t);
2836 return true;
2841 return false;
2844 /* Fold the statement pointed to by GSI. In some cases, this function may
2845 replace the whole statement with a new one. Returns true iff folding
2846 makes any changes. */
2848 bool
2849 fold_stmt (gimple_stmt_iterator *gsi)
2851 tree res;
2852 struct fold_stmt_r_data fold_stmt_r_data;
2853 struct walk_stmt_info wi;
2855 bool changed = false;
2856 bool inside_addr_expr = false;
2858 gimple stmt = gsi_stmt (*gsi);
2860 fold_stmt_r_data.stmt = stmt;
2861 fold_stmt_r_data.changed_p = &changed;
2862 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2864 memset (&wi, 0, sizeof (wi));
2865 wi.info = &fold_stmt_r_data;
2867 /* Fold the individual operands.
2868 For example, fold instances of *&VAR into VAR, etc. */
2869 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2870 gcc_assert (!res);
2872 /* Fold the main computation performed by the statement. */
2873 switch (gimple_code (stmt))
2875 case GIMPLE_ASSIGN:
2877 tree new_rhs = fold_gimple_assign (gsi);
2878 if (new_rhs != NULL_TREE)
2880 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2881 changed = true;
2883 stmt = gsi_stmt (*gsi);
2884 break;
2886 case GIMPLE_COND:
2887 changed |= fold_gimple_cond (stmt);
2888 break;
2889 case GIMPLE_CALL:
2890 /* The entire statement may be replaced in this case. */
2891 changed |= fold_gimple_call (gsi);
2892 break;
2894 default:
2895 return changed;
2896 break;
2899 return changed;
2902 /* Perform the minimal folding on statement STMT. Only operations like
2903 *&x created by constant propagation are handled. The statement cannot
2904 be replaced with a new one. Return true if the statement was
2905 changed, false otherwise. */
2907 bool
2908 fold_stmt_inplace (gimple stmt)
2910 tree res;
2911 struct fold_stmt_r_data fold_stmt_r_data;
2912 struct walk_stmt_info wi;
2913 gimple_stmt_iterator si;
2915 bool changed = false;
2916 bool inside_addr_expr = false;
2918 fold_stmt_r_data.stmt = stmt;
2919 fold_stmt_r_data.changed_p = &changed;
2920 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2922 memset (&wi, 0, sizeof (wi));
2923 wi.info = &fold_stmt_r_data;
2925 /* Fold the individual operands.
2926 For example, fold instances of *&VAR into VAR, etc.
2928 It appears that, at one time, maybe_fold_stmt_indirect
2929 would cause the walk to return non-null in order to
2930 signal that the entire statement should be replaced with
2931 a call to _builtin_trap. This functionality is currently
2932 disabled, as noted in a FIXME, and cannot be supported here. */
2933 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2934 gcc_assert (!res);
2936 /* Fold the main computation performed by the statement. */
2937 switch (gimple_code (stmt))
2939 case GIMPLE_ASSIGN:
2941 unsigned old_num_ops;
2942 tree new_rhs;
2943 old_num_ops = gimple_num_ops (stmt);
2944 si = gsi_for_stmt (stmt);
2945 new_rhs = fold_gimple_assign (&si);
2946 if (new_rhs != NULL_TREE
2947 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2949 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2950 changed = true;
2952 gcc_assert (gsi_stmt (si) == stmt);
2953 break;
2955 case GIMPLE_COND:
2956 changed |= fold_gimple_cond (stmt);
2957 break;
2959 default:
2960 break;
2963 return changed;
2966 /* Try to optimize out __builtin_stack_restore. Optimize it out
2967 if there is another __builtin_stack_restore in the same basic
2968 block and no calls or ASM_EXPRs are in between, or if this block's
2969 only outgoing edge is to EXIT_BLOCK and there are no calls or
2970 ASM_EXPRs after this __builtin_stack_restore. */
2972 static tree
2973 optimize_stack_restore (gimple_stmt_iterator i)
2975 tree callee, rhs;
2976 gimple stmt, stack_save;
2977 gimple_stmt_iterator stack_save_gsi;
2979 basic_block bb = gsi_bb (i);
2980 gimple call = gsi_stmt (i);
2982 if (gimple_code (call) != GIMPLE_CALL
2983 || gimple_call_num_args (call) != 1
2984 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2985 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2986 return NULL_TREE;
2988 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2990 stmt = gsi_stmt (i);
2991 if (gimple_code (stmt) == GIMPLE_ASM)
2992 return NULL_TREE;
2993 if (gimple_code (stmt) != GIMPLE_CALL)
2994 continue;
2996 callee = gimple_call_fndecl (stmt);
2997 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2998 return NULL_TREE;
3000 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3001 break;
3004 if (gsi_end_p (i)
3005 && (! single_succ_p (bb)
3006 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3007 return NULL_TREE;
3009 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3010 if (gimple_code (stack_save) != GIMPLE_CALL
3011 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3012 || stmt_could_throw_p (stack_save)
3013 || !has_single_use (gimple_call_arg (call, 0)))
3014 return NULL_TREE;
3016 callee = gimple_call_fndecl (stack_save);
3017 if (!callee
3018 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3019 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3020 || gimple_call_num_args (stack_save) != 0)
3021 return NULL_TREE;
3023 stack_save_gsi = gsi_for_stmt (stack_save);
3024 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3025 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3026 if (!update_call_from_tree (&stack_save_gsi, rhs))
3028 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3029 return NULL_TREE;
3031 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3033 /* No effect, so the statement will be deleted. */
3034 return integer_zero_node;
3037 /* If va_list type is a simple pointer and nothing special is needed,
3038 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3039 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3040 pointer assignment. */
3042 static tree
3043 optimize_stdarg_builtin (gimple call)
3045 tree callee, lhs, rhs, cfun_va_list;
3046 bool va_list_simple_ptr;
3048 if (gimple_code (call) != GIMPLE_CALL)
3049 return NULL_TREE;
3051 callee = gimple_call_fndecl (call);
3053 cfun_va_list = targetm.fn_abi_va_list (callee);
3054 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3055 && (TREE_TYPE (cfun_va_list) == void_type_node
3056 || TREE_TYPE (cfun_va_list) == char_type_node);
3058 switch (DECL_FUNCTION_CODE (callee))
3060 case BUILT_IN_VA_START:
3061 if (!va_list_simple_ptr
3062 || targetm.expand_builtin_va_start != NULL
3063 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3064 return NULL_TREE;
3066 if (gimple_call_num_args (call) != 2)
3067 return NULL_TREE;
3069 lhs = gimple_call_arg (call, 0);
3070 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3071 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3072 != TYPE_MAIN_VARIANT (cfun_va_list))
3073 return NULL_TREE;
3075 lhs = build_fold_indirect_ref (lhs);
3076 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3077 1, integer_zero_node);
3078 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3079 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3081 case BUILT_IN_VA_COPY:
3082 if (!va_list_simple_ptr)
3083 return NULL_TREE;
3085 if (gimple_call_num_args (call) != 2)
3086 return NULL_TREE;
3088 lhs = gimple_call_arg (call, 0);
3089 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3090 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3091 != TYPE_MAIN_VARIANT (cfun_va_list))
3092 return NULL_TREE;
3094 lhs = build_fold_indirect_ref (lhs);
3095 rhs = gimple_call_arg (call, 1);
3096 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3097 != TYPE_MAIN_VARIANT (cfun_va_list))
3098 return NULL_TREE;
3100 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3101 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3103 case BUILT_IN_VA_END:
3104 /* No effect, so the statement will be deleted. */
3105 return integer_zero_node;
3107 default:
3108 gcc_unreachable ();
3112 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3113 RHS of an assignment. Insert the necessary statements before
3114 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3115 is replaced. If the call is expected to produces a result, then it
3116 is replaced by an assignment of the new RHS to the result variable.
3117 If the result is to be ignored, then the call is replaced by a
3118 GIMPLE_NOP. */
3120 static void
3121 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3123 tree lhs;
3124 tree tmp = NULL_TREE; /* Silence warning. */
3125 gimple stmt, new_stmt;
3126 gimple_stmt_iterator i;
3127 gimple_seq stmts = gimple_seq_alloc();
3128 struct gimplify_ctx gctx;
3130 stmt = gsi_stmt (*si_p);
3132 gcc_assert (is_gimple_call (stmt));
3134 lhs = gimple_call_lhs (stmt);
3136 push_gimplify_context (&gctx);
3138 if (lhs == NULL_TREE)
3139 gimplify_and_add (expr, &stmts);
3140 else
3141 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3143 pop_gimplify_context (NULL);
3145 if (gimple_has_location (stmt))
3146 annotate_all_with_location (stmts, gimple_location (stmt));
3148 /* The replacement can expose previously unreferenced variables. */
3149 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3151 new_stmt = gsi_stmt (i);
3152 find_new_referenced_vars (new_stmt);
3153 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3154 mark_symbols_for_renaming (new_stmt);
3155 gsi_next (si_p);
3158 if (lhs == NULL_TREE)
3159 new_stmt = gimple_build_nop ();
3160 else
3162 new_stmt = gimple_build_assign (lhs, tmp);
3163 copy_virtual_operands (new_stmt, stmt);
3164 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3167 gimple_set_location (new_stmt, gimple_location (stmt));
3168 gsi_replace (si_p, new_stmt, false);
3171 /* A simple pass that attempts to fold all builtin functions. This pass
3172 is run after we've propagated as many constants as we can. */
3174 static unsigned int
3175 execute_fold_all_builtins (void)
3177 bool cfg_changed = false;
3178 basic_block bb;
3179 unsigned int todoflags = 0;
3181 FOR_EACH_BB (bb)
3183 gimple_stmt_iterator i;
3184 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3186 gimple stmt, old_stmt;
3187 tree callee, result;
3188 enum built_in_function fcode;
3190 stmt = gsi_stmt (i);
3192 if (gimple_code (stmt) != GIMPLE_CALL)
3194 gsi_next (&i);
3195 continue;
3197 callee = gimple_call_fndecl (stmt);
3198 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3200 gsi_next (&i);
3201 continue;
3203 fcode = DECL_FUNCTION_CODE (callee);
3205 result = ccp_fold_builtin (stmt);
3207 if (result)
3208 gimple_remove_stmt_histograms (cfun, stmt);
3210 if (!result)
3211 switch (DECL_FUNCTION_CODE (callee))
3213 case BUILT_IN_CONSTANT_P:
3214 /* Resolve __builtin_constant_p. If it hasn't been
3215 folded to integer_one_node by now, it's fairly
3216 certain that the value simply isn't constant. */
3217 result = integer_zero_node;
3218 break;
3220 case BUILT_IN_STACK_RESTORE:
3221 result = optimize_stack_restore (i);
3222 if (result)
3223 break;
3224 gsi_next (&i);
3225 continue;
3227 case BUILT_IN_VA_START:
3228 case BUILT_IN_VA_END:
3229 case BUILT_IN_VA_COPY:
3230 /* These shouldn't be folded before pass_stdarg. */
3231 result = optimize_stdarg_builtin (stmt);
3232 if (result)
3233 break;
3234 /* FALLTHRU */
3236 default:
3237 gsi_next (&i);
3238 continue;
3241 if (dump_file && (dump_flags & TDF_DETAILS))
3243 fprintf (dump_file, "Simplified\n ");
3244 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3247 old_stmt = stmt;
3248 push_stmt_changes (gsi_stmt_ptr (&i));
3250 if (!update_call_from_tree (&i, result))
3252 gimplify_and_update_call_from_tree (&i, result);
3253 todoflags |= TODO_rebuild_alias;
3256 stmt = gsi_stmt (i);
3257 pop_stmt_changes (gsi_stmt_ptr (&i));
3259 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3260 && gimple_purge_dead_eh_edges (bb))
3261 cfg_changed = true;
3263 if (dump_file && (dump_flags & TDF_DETAILS))
3265 fprintf (dump_file, "to\n ");
3266 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3267 fprintf (dump_file, "\n");
3270 /* Retry the same statement if it changed into another
3271 builtin, there might be new opportunities now. */
3272 if (gimple_code (stmt) != GIMPLE_CALL)
3274 gsi_next (&i);
3275 continue;
3277 callee = gimple_call_fndecl (stmt);
3278 if (!callee
3279 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3280 || DECL_FUNCTION_CODE (callee) == fcode)
3281 gsi_next (&i);
3285 /* Delete unreachable blocks. */
3286 if (cfg_changed)
3287 todoflags |= TODO_cleanup_cfg;
3289 return todoflags;
3293 struct gimple_opt_pass pass_fold_builtins =
3296 GIMPLE_PASS,
3297 "fab", /* name */
3298 NULL, /* gate */
3299 execute_fold_all_builtins, /* execute */
3300 NULL, /* sub */
3301 NULL, /* next */
3302 0, /* static_pass_number */
3303 0, /* tv_id */
3304 PROP_cfg | PROP_ssa, /* properties_required */
3305 0, /* properties_provided */
3306 0, /* properties_destroyed */
3307 0, /* todo_flags_start */
3308 TODO_dump_func
3309 | TODO_verify_ssa
3310 | TODO_update_ssa /* todo_flags_finish */