1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
228 static prop_value_t
*const_val
;
230 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
233 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
235 switch (val
.lattice_val
)
238 fprintf (outf
, "%sUNINITIALIZED", prefix
);
241 fprintf (outf
, "%sUNDEFINED", prefix
);
244 fprintf (outf
, "%sVARYING", prefix
);
247 fprintf (outf
, "%sCONSTANT ", prefix
);
248 print_generic_expr (outf
, val
.value
, dump_flags
);
256 /* Print lattice value VAL to stderr. */
258 void debug_lattice_value (prop_value_t val
);
261 debug_lattice_value (prop_value_t val
)
263 dump_lattice_value (stderr
, "", val
);
264 fprintf (stderr
, "\n");
269 /* If SYM is a constant variable with known value, return the value.
270 NULL_TREE is returned otherwise. */
273 get_symbol_constant_value (tree sym
)
275 if (TREE_STATIC (sym
)
276 && TREE_READONLY (sym
)
279 tree val
= DECL_INITIAL (sym
);
282 STRIP_USELESS_TYPE_CONVERSION (val
);
283 if (is_gimple_min_invariant (val
))
286 /* Variables declared 'const' without an initializer
287 have zero as the initializer if they may not be
288 overridden at link or run time. */
290 && targetm
.binds_local_p (sym
)
291 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
292 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
293 return fold_convert (TREE_TYPE (sym
), integer_zero_node
);
299 /* Compute a default value for variable VAR and store it in the
300 CONST_VAL array. The following rules are used to get default
303 1- Global and static variables that are declared constant are
306 2- Any other value is considered UNDEFINED. This is useful when
307 considering PHI nodes. PHI arguments that are undefined do not
308 change the constant value of the PHI node, which allows for more
309 constants to be propagated.
311 3- Variables defined by statements other than assignments and PHI
312 nodes are considered VARYING.
314 4- Initial values of variables that are not GIMPLE registers are
315 considered VARYING. */
318 get_default_value (tree var
)
320 tree sym
= SSA_NAME_VAR (var
);
321 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
};
324 if (!is_gimple_reg (var
))
326 /* Short circuit for regular CCP. We are not interested in any
327 non-register when DO_STORE_CCP is false. */
328 val
.lattice_val
= VARYING
;
330 else if ((cst_val
= get_symbol_constant_value (sym
)) != NULL_TREE
)
332 /* Globals and static variables declared 'const' take their
334 val
.lattice_val
= CONSTANT
;
339 gimple stmt
= SSA_NAME_DEF_STMT (var
);
341 if (gimple_nop_p (stmt
))
343 /* Variables defined by an empty statement are those used
344 before being initialized. If VAR is a local variable, we
345 can assume initially that it is UNDEFINED, otherwise we must
346 consider it VARYING. */
347 if (is_gimple_reg (sym
) && TREE_CODE (sym
) != PARM_DECL
)
348 val
.lattice_val
= UNDEFINED
;
350 val
.lattice_val
= VARYING
;
352 else if (is_gimple_assign (stmt
)
353 /* Value-returning GIMPLE_CALL statements assign to
354 a variable, and are treated similarly to GIMPLE_ASSIGN. */
355 || (is_gimple_call (stmt
)
356 && gimple_call_lhs (stmt
) != NULL_TREE
)
357 || gimple_code (stmt
) == GIMPLE_PHI
)
359 /* Any other variable defined by an assignment or a PHI node
360 is considered UNDEFINED. */
361 val
.lattice_val
= UNDEFINED
;
365 /* Otherwise, VAR will never take on a constant value. */
366 val
.lattice_val
= VARYING
;
374 /* Get the constant value associated with variable VAR. */
376 static inline prop_value_t
*
381 if (const_val
== NULL
)
384 val
= &const_val
[SSA_NAME_VERSION (var
)];
385 if (val
->lattice_val
== UNINITIALIZED
)
386 *val
= get_default_value (var
);
391 /* Sets the value associated with VAR to VARYING. */
394 set_value_varying (tree var
)
396 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
398 val
->lattice_val
= VARYING
;
399 val
->value
= NULL_TREE
;
402 /* For float types, modify the value of VAL to make ccp work correctly
403 for non-standard values (-0, NaN):
405 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
406 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
407 This is to fix the following problem (see PR 29921): Suppose we have
411 and we set value of y to NaN. This causes value of x to be set to NaN.
412 When we later determine that y is in fact VARYING, fold uses the fact
413 that HONOR_NANS is false, and we try to change the value of x to 0,
414 causing an ICE. With HONOR_NANS being false, the real appearance of
415 NaN would cause undefined behavior, though, so claiming that y (and x)
416 are UNDEFINED initially is correct. */
419 canonicalize_float_value (prop_value_t
*val
)
421 enum machine_mode mode
;
425 if (val
->lattice_val
!= CONSTANT
426 || TREE_CODE (val
->value
) != REAL_CST
)
429 d
= TREE_REAL_CST (val
->value
);
430 type
= TREE_TYPE (val
->value
);
431 mode
= TYPE_MODE (type
);
433 if (!HONOR_SIGNED_ZEROS (mode
)
434 && REAL_VALUE_MINUS_ZERO (d
))
436 val
->value
= build_real (type
, dconst0
);
440 if (!HONOR_NANS (mode
)
441 && REAL_VALUE_ISNAN (d
))
443 val
->lattice_val
= UNDEFINED
;
449 /* Set the value for variable VAR to NEW_VAL. Return true if the new
450 value is different from VAR's previous value. */
453 set_lattice_value (tree var
, prop_value_t new_val
)
455 prop_value_t
*old_val
= get_value (var
);
457 canonicalize_float_value (&new_val
);
459 /* Lattice transitions must always be monotonically increasing in
460 value. If *OLD_VAL and NEW_VAL are the same, return false to
461 inform the caller that this was a non-transition. */
463 gcc_assert (old_val
->lattice_val
< new_val
.lattice_val
464 || (old_val
->lattice_val
== new_val
.lattice_val
465 && ((!old_val
->value
&& !new_val
.value
)
466 || operand_equal_p (old_val
->value
, new_val
.value
, 0))));
468 if (old_val
->lattice_val
!= new_val
.lattice_val
)
470 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
472 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
473 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
478 gcc_assert (new_val
.lattice_val
!= UNDEFINED
);
486 /* Return the likely CCP lattice value for STMT.
488 If STMT has no operands, then return CONSTANT.
490 Else if undefinedness of operands of STMT cause its value to be
491 undefined, then return UNDEFINED.
493 Else if any operands of STMT are constants, then return CONSTANT.
495 Else return VARYING. */
498 likely_value (gimple stmt
)
500 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
504 enum gimple_code code
= gimple_code (stmt
);
506 /* This function appears to be called only for assignments, calls,
507 conditionals, and switches, due to the logic in visit_stmt. */
508 gcc_assert (code
== GIMPLE_ASSIGN
509 || code
== GIMPLE_CALL
510 || code
== GIMPLE_COND
511 || code
== GIMPLE_SWITCH
);
513 /* If the statement has volatile operands, it won't fold to a
515 if (gimple_has_volatile_ops (stmt
))
518 /* If we are not doing store-ccp, statements with loads
519 and/or stores will never fold into a constant. */
520 if (!ZERO_SSA_OPERANDS (stmt
, SSA_OP_ALL_VIRTUALS
))
523 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
524 is_gimple_min_invariant, so we do not consider calls or
525 other forms of assignment. */
526 if (gimple_assign_single_p (stmt
)
527 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt
)))
530 if (code
== GIMPLE_COND
531 && is_gimple_min_invariant (gimple_cond_lhs (stmt
))
532 && is_gimple_min_invariant (gimple_cond_rhs (stmt
)))
535 if (code
== GIMPLE_SWITCH
536 && is_gimple_min_invariant (gimple_switch_index (stmt
)))
539 /* Arrive here for more complex cases. */
541 has_constant_operand
= false;
542 has_undefined_operand
= false;
543 all_undefined_operands
= true;
544 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
| SSA_OP_VUSE
)
546 prop_value_t
*val
= get_value (use
);
548 if (val
->lattice_val
== UNDEFINED
)
549 has_undefined_operand
= true;
551 all_undefined_operands
= false;
553 if (val
->lattice_val
== CONSTANT
)
554 has_constant_operand
= true;
557 /* If the operation combines operands like COMPLEX_EXPR make sure to
558 not mark the result UNDEFINED if only one part of the result is
560 if (has_undefined_operand
&& all_undefined_operands
)
562 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
564 switch (gimple_assign_rhs_code (stmt
))
566 /* Unary operators are handled with all_undefined_operands. */
569 case POINTER_PLUS_EXPR
:
570 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
571 Not bitwise operators, one VARYING operand may specify the
572 result completely. Not logical operators for the same reason.
573 Not COMPLEX_EXPR as one VARYING operand makes the result partly
574 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
575 the undefined operand may be promoted. */
582 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
583 fall back to VARYING even if there were CONSTANT operands. */
584 if (has_undefined_operand
)
587 if (has_constant_operand
588 /* We do not consider virtual operands here -- load from read-only
589 memory may have only VARYING virtual operands, but still be
591 || ZERO_SSA_OPERANDS (stmt
, SSA_OP_USE
))
597 /* Returns true if STMT cannot be constant. */
600 surely_varying_stmt_p (gimple stmt
)
602 /* If the statement has operands that we cannot handle, it cannot be
604 if (gimple_has_volatile_ops (stmt
))
607 if (!ZERO_SSA_OPERANDS (stmt
, SSA_OP_ALL_VIRTUALS
))
610 /* If it is a call and does not return a value or is not a
611 builtin and not an indirect call, it is varying. */
612 if (is_gimple_call (stmt
))
615 if (!gimple_call_lhs (stmt
)
616 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
617 && !DECL_BUILT_IN (fndecl
)))
621 /* Anything other than assignments and conditional jumps are not
622 interesting for CCP. */
623 if (gimple_code (stmt
) != GIMPLE_ASSIGN
624 && gimple_code (stmt
) != GIMPLE_COND
625 && gimple_code (stmt
) != GIMPLE_SWITCH
626 && gimple_code (stmt
) != GIMPLE_CALL
)
632 /* Initialize local data structures for CCP. */
635 ccp_initialize (void)
639 const_val
= XCNEWVEC (prop_value_t
, num_ssa_names
);
641 /* Initialize simulation flags for PHI nodes and statements. */
644 gimple_stmt_iterator i
;
646 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
648 gimple stmt
= gsi_stmt (i
);
649 bool is_varying
= surely_varying_stmt_p (stmt
);
656 /* If the statement will not produce a constant, mark
657 all its outputs VARYING. */
658 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
661 set_value_varying (def
);
664 prop_set_simulate_again (stmt
, !is_varying
);
668 /* Now process PHI nodes. We never clear the simulate_again flag on
669 phi nodes, since we do not know which edges are executable yet,
670 except for phi nodes for virtual operands when we do not do store ccp. */
673 gimple_stmt_iterator i
;
675 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
677 gimple phi
= gsi_stmt (i
);
679 if (!is_gimple_reg (gimple_phi_result (phi
)))
680 prop_set_simulate_again (phi
, false);
682 prop_set_simulate_again (phi
, true);
688 /* Do final substitution of propagated values, cleanup the flowgraph and
689 free allocated storage.
691 Return TRUE when something was optimized. */
696 /* Perform substitutions based on the known constant values. */
697 bool something_changed
= substitute_and_fold (const_val
, false);
701 return something_changed
;;
705 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
708 any M UNDEFINED = any
709 any M VARYING = VARYING
710 Ci M Cj = Ci if (i == j)
711 Ci M Cj = VARYING if (i != j)
715 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
717 if (val1
->lattice_val
== UNDEFINED
)
719 /* UNDEFINED M any = any */
722 else if (val2
->lattice_val
== UNDEFINED
)
724 /* any M UNDEFINED = any
725 Nothing to do. VAL1 already contains the value we want. */
728 else if (val1
->lattice_val
== VARYING
729 || val2
->lattice_val
== VARYING
)
731 /* any M VARYING = VARYING. */
732 val1
->lattice_val
= VARYING
;
733 val1
->value
= NULL_TREE
;
735 else if (val1
->lattice_val
== CONSTANT
736 && val2
->lattice_val
== CONSTANT
737 && simple_cst_equal (val1
->value
, val2
->value
) == 1)
739 /* Ci M Cj = Ci if (i == j)
740 Ci M Cj = VARYING if (i != j)
742 If these two values come from memory stores, make sure that
743 they come from the same memory reference. */
744 val1
->lattice_val
= CONSTANT
;
745 val1
->value
= val1
->value
;
749 /* Any other combination is VARYING. */
750 val1
->lattice_val
= VARYING
;
751 val1
->value
= NULL_TREE
;
756 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
757 lattice values to determine PHI_NODE's lattice value. The value of a
758 PHI node is determined calling ccp_lattice_meet with all the arguments
759 of the PHI node that are incoming via executable edges. */
761 static enum ssa_prop_result
762 ccp_visit_phi_node (gimple phi
)
765 prop_value_t
*old_val
, new_val
;
767 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
769 fprintf (dump_file
, "\nVisiting PHI node: ");
770 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
773 old_val
= get_value (gimple_phi_result (phi
));
774 switch (old_val
->lattice_val
)
777 return SSA_PROP_VARYING
;
784 new_val
.lattice_val
= UNDEFINED
;
785 new_val
.value
= NULL_TREE
;
792 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
794 /* Compute the meet operator over all the PHI arguments flowing
795 through executable edges. */
796 edge e
= gimple_phi_arg_edge (phi
, i
);
798 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
801 "\n Argument #%d (%d -> %d %sexecutable)\n",
802 i
, e
->src
->index
, e
->dest
->index
,
803 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
806 /* If the incoming edge is executable, Compute the meet operator for
807 the existing value of the PHI node and the current PHI argument. */
808 if (e
->flags
& EDGE_EXECUTABLE
)
810 tree arg
= gimple_phi_arg (phi
, i
)->def
;
811 prop_value_t arg_val
;
813 if (is_gimple_min_invariant (arg
))
815 arg_val
.lattice_val
= CONSTANT
;
819 arg_val
= *(get_value (arg
));
821 ccp_lattice_meet (&new_val
, &arg_val
);
823 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
825 fprintf (dump_file
, "\t");
826 print_generic_expr (dump_file
, arg
, dump_flags
);
827 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
828 fprintf (dump_file
, "\n");
831 if (new_val
.lattice_val
== VARYING
)
836 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
838 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
839 fprintf (dump_file
, "\n\n");
842 /* Make the transition to the new value. */
843 if (set_lattice_value (gimple_phi_result (phi
), new_val
))
845 if (new_val
.lattice_val
== VARYING
)
846 return SSA_PROP_VARYING
;
848 return SSA_PROP_INTERESTING
;
851 return SSA_PROP_NOT_INTERESTING
;
854 /* Return true if we may propagate the address expression ADDR into the
855 dereference DEREF and cancel them. */
858 may_propagate_address_into_dereference (tree addr
, tree deref
)
860 gcc_assert (INDIRECT_REF_P (deref
)
861 && TREE_CODE (addr
) == ADDR_EXPR
);
863 /* Don't propagate if ADDR's operand has incomplete type. */
864 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
867 /* If the address is invariant then we do not need to preserve restrict
868 qualifications. But we do need to preserve volatile qualifiers until
869 we can annotate the folded dereference itself properly. */
870 if (is_gimple_min_invariant (addr
)
871 && (!TREE_THIS_VOLATILE (deref
)
872 || TYPE_VOLATILE (TREE_TYPE (addr
))))
873 return useless_type_conversion_p (TREE_TYPE (deref
),
874 TREE_TYPE (TREE_OPERAND (addr
, 0)));
876 /* Else both the address substitution and the folding must result in
877 a valid useless type conversion sequence. */
878 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
880 && useless_type_conversion_p (TREE_TYPE (deref
),
881 TREE_TYPE (TREE_OPERAND (addr
, 0))));
884 /* CCP specific front-end to the non-destructive constant folding
887 Attempt to simplify the RHS of STMT knowing that one or more
888 operands are constants.
890 If simplification is possible, return the simplified RHS,
891 otherwise return the original RHS or NULL_TREE. */
894 ccp_fold (gimple stmt
)
896 switch (gimple_code (stmt
))
900 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
902 switch (get_gimple_rhs_class (subcode
))
904 case GIMPLE_SINGLE_RHS
:
906 tree rhs
= gimple_assign_rhs1 (stmt
);
907 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
909 if (TREE_CODE (rhs
) == SSA_NAME
)
911 /* If the RHS is an SSA_NAME, return its known constant value,
913 return get_value (rhs
)->value
;
915 /* Handle propagating invariant addresses into address operations.
916 The folding we do here matches that in tree-ssa-forwprop.c. */
917 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
920 base
= &TREE_OPERAND (rhs
, 0);
921 while (handled_component_p (*base
))
922 base
= &TREE_OPERAND (*base
, 0);
923 if (TREE_CODE (*base
) == INDIRECT_REF
924 && TREE_CODE (TREE_OPERAND (*base
, 0)) == SSA_NAME
)
926 prop_value_t
*val
= get_value (TREE_OPERAND (*base
, 0));
927 if (val
->lattice_val
== CONSTANT
928 && TREE_CODE (val
->value
) == ADDR_EXPR
929 && may_propagate_address_into_dereference
932 /* We need to return a new tree, not modify the IL
933 or share parts of it. So play some tricks to
934 avoid manually building it. */
935 tree ret
, save
= *base
;
936 *base
= TREE_OPERAND (val
->value
, 0);
937 ret
= unshare_expr (rhs
);
938 recompute_tree_invariant_for_addr_expr (ret
);
945 if (kind
== tcc_reference
)
947 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
948 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
950 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
951 if (val
->lattice_val
== CONSTANT
)
952 return fold_unary (VIEW_CONVERT_EXPR
,
953 TREE_TYPE (rhs
), val
->value
);
955 return fold_const_aggregate_ref (rhs
);
957 else if (kind
== tcc_declaration
)
958 return get_symbol_constant_value (rhs
);
962 case GIMPLE_UNARY_RHS
:
964 /* Handle unary operators that can appear in GIMPLE form.
965 Note that we know the single operand must be a constant,
966 so this should almost always return a simplified RHS. */
967 tree lhs
= gimple_assign_lhs (stmt
);
968 tree op0
= gimple_assign_rhs1 (stmt
);
971 /* Simplify the operand down to a constant. */
972 if (TREE_CODE (op0
) == SSA_NAME
)
974 prop_value_t
*val
= get_value (op0
);
975 if (val
->lattice_val
== CONSTANT
)
976 op0
= get_value (op0
)->value
;
979 /* Conversions are useless for CCP purposes if they are
980 value-preserving. Thus the restrictions that
981 useless_type_conversion_p places for pointer type conversions
982 do not apply here. Substitution later will only substitute to
984 if (CONVERT_EXPR_CODE_P (subcode
)
985 && POINTER_TYPE_P (TREE_TYPE (lhs
))
986 && POINTER_TYPE_P (TREE_TYPE (op0
))
987 /* Do not allow differences in volatile qualification
988 as this might get us confused as to whether a
989 propagation destination statement is volatile
990 or not. See PR36988. */
991 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs
)))
992 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0
)))))
995 /* Still try to generate a constant of correct type. */
996 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
998 && ((tem
= maybe_fold_offset_to_address
999 (op0
, integer_zero_node
, TREE_TYPE (lhs
)))
1005 res
= fold_unary (subcode
, gimple_expr_type (stmt
), op0
);
1007 /* If the operation was a conversion do _not_ mark a
1008 resulting constant with TREE_OVERFLOW if the original
1009 constant was not. These conversions have implementation
1010 defined behavior and retaining the TREE_OVERFLOW flag
1011 here would confuse later passes such as VRP. */
1013 && TREE_CODE (res
) == INTEGER_CST
1014 && TREE_CODE (op0
) == INTEGER_CST
1015 && CONVERT_EXPR_CODE_P (subcode
))
1016 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
1021 case GIMPLE_BINARY_RHS
:
1023 /* Handle binary operators that can appear in GIMPLE form. */
1024 tree op0
= gimple_assign_rhs1 (stmt
);
1025 tree op1
= gimple_assign_rhs2 (stmt
);
1027 /* Simplify the operands down to constants when appropriate. */
1028 if (TREE_CODE (op0
) == SSA_NAME
)
1030 prop_value_t
*val
= get_value (op0
);
1031 if (val
->lattice_val
== CONSTANT
)
1035 if (TREE_CODE (op1
) == SSA_NAME
)
1037 prop_value_t
*val
= get_value (op1
);
1038 if (val
->lattice_val
== CONSTANT
)
1042 /* Fold &foo + CST into an invariant reference if possible. */
1043 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1044 && TREE_CODE (op0
) == ADDR_EXPR
1045 && TREE_CODE (op1
) == INTEGER_CST
)
1047 tree lhs
= gimple_assign_lhs (stmt
);
1048 tree tem
= maybe_fold_offset_to_address (op0
, op1
,
1050 if (tem
!= NULL_TREE
)
1054 return fold_binary (subcode
, gimple_expr_type (stmt
), op0
, op1
);
1065 tree fn
= gimple_call_fn (stmt
);
1068 if (TREE_CODE (fn
) == SSA_NAME
)
1070 val
= get_value (fn
);
1071 if (val
->lattice_val
== CONSTANT
)
1074 if (TREE_CODE (fn
) == ADDR_EXPR
1075 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1076 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
1078 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
1081 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1083 args
[i
] = gimple_call_arg (stmt
, i
);
1084 if (TREE_CODE (args
[i
]) == SSA_NAME
)
1086 val
= get_value (args
[i
]);
1087 if (val
->lattice_val
== CONSTANT
)
1088 args
[i
] = val
->value
;
1091 call
= build_call_array (gimple_call_return_type (stmt
),
1092 fn
, gimple_call_num_args (stmt
), args
);
1093 retval
= fold_call_expr (call
, false);
1095 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1096 STRIP_NOPS (retval
);
1104 /* Handle comparison operators that can appear in GIMPLE form. */
1105 tree op0
= gimple_cond_lhs (stmt
);
1106 tree op1
= gimple_cond_rhs (stmt
);
1107 enum tree_code code
= gimple_cond_code (stmt
);
1109 /* Simplify the operands down to constants when appropriate. */
1110 if (TREE_CODE (op0
) == SSA_NAME
)
1112 prop_value_t
*val
= get_value (op0
);
1113 if (val
->lattice_val
== CONSTANT
)
1117 if (TREE_CODE (op1
) == SSA_NAME
)
1119 prop_value_t
*val
= get_value (op1
);
1120 if (val
->lattice_val
== CONSTANT
)
1124 return fold_binary (code
, boolean_type_node
, op0
, op1
);
1129 tree rhs
= gimple_switch_index (stmt
);
1131 if (TREE_CODE (rhs
) == SSA_NAME
)
1133 /* If the RHS is an SSA_NAME, return its known constant value,
1135 return get_value (rhs
)->value
;
1147 /* Return the tree representing the element referenced by T if T is an
1148 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1149 NULL_TREE otherwise. */
1152 fold_const_aggregate_ref (tree t
)
1154 prop_value_t
*value
;
1155 tree base
, ctor
, idx
, field
;
1156 unsigned HOST_WIDE_INT cnt
;
1159 switch (TREE_CODE (t
))
1162 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1163 DECL_INITIAL. If BASE is a nested reference into another
1164 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1165 the inner reference. */
1166 base
= TREE_OPERAND (t
, 0);
1167 switch (TREE_CODE (base
))
1170 if (!TREE_READONLY (base
)
1171 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
1172 || !targetm
.binds_local_p (base
))
1175 ctor
= DECL_INITIAL (base
);
1180 ctor
= fold_const_aggregate_ref (base
);
1192 if (ctor
== NULL_TREE
1193 || (TREE_CODE (ctor
) != CONSTRUCTOR
1194 && TREE_CODE (ctor
) != STRING_CST
)
1195 || !TREE_STATIC (ctor
))
1198 /* Get the index. If we have an SSA_NAME, try to resolve it
1199 with the current lattice value for the SSA_NAME. */
1200 idx
= TREE_OPERAND (t
, 1);
1201 switch (TREE_CODE (idx
))
1204 if ((value
= get_value (idx
))
1205 && value
->lattice_val
== CONSTANT
1206 && TREE_CODE (value
->value
) == INTEGER_CST
)
1219 /* Fold read from constant string. */
1220 if (TREE_CODE (ctor
) == STRING_CST
)
1222 if ((TYPE_MODE (TREE_TYPE (t
))
1223 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1224 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1226 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
1227 && compare_tree_int (idx
, TREE_STRING_LENGTH (ctor
)) < 0)
1228 return build_int_cst_type (TREE_TYPE (t
),
1229 (TREE_STRING_POINTER (ctor
)
1230 [TREE_INT_CST_LOW (idx
)]));
1234 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1235 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1236 if (tree_int_cst_equal (cfield
, idx
))
1238 STRIP_USELESS_TYPE_CONVERSION (cval
);
1244 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1245 DECL_INITIAL. If BASE is a nested reference into another
1246 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1247 the inner reference. */
1248 base
= TREE_OPERAND (t
, 0);
1249 switch (TREE_CODE (base
))
1252 if (!TREE_READONLY (base
)
1253 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1254 || !targetm
.binds_local_p (base
))
1257 ctor
= DECL_INITIAL (base
);
1262 ctor
= fold_const_aggregate_ref (base
);
1269 if (ctor
== NULL_TREE
1270 || TREE_CODE (ctor
) != CONSTRUCTOR
1271 || !TREE_STATIC (ctor
))
1274 field
= TREE_OPERAND (t
, 1);
1276 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1278 /* FIXME: Handle bit-fields. */
1279 && ! DECL_BIT_FIELD (cfield
))
1281 STRIP_USELESS_TYPE_CONVERSION (cval
);
1289 tree c
= fold_const_aggregate_ref (TREE_OPERAND (t
, 0));
1290 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
1291 return fold_build1 (TREE_CODE (t
), TREE_TYPE (t
), c
);
1297 tree base
= TREE_OPERAND (t
, 0);
1298 if (TREE_CODE (base
) == SSA_NAME
1299 && (value
= get_value (base
))
1300 && value
->lattice_val
== CONSTANT
1301 && TREE_CODE (value
->value
) == ADDR_EXPR
)
1302 return fold_const_aggregate_ref (TREE_OPERAND (value
->value
, 0));
1313 /* Evaluate statement STMT.
1314 Valid only for assignments, calls, conditionals, and switches. */
1317 evaluate_stmt (gimple stmt
)
1320 tree simplified
= NULL_TREE
;
1321 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1324 fold_defer_overflow_warnings ();
1326 /* If the statement is likely to have a CONSTANT result, then try
1327 to fold the statement to determine the constant value. */
1328 /* FIXME. This is the only place that we call ccp_fold.
1329 Since likely_value never returns CONSTANT for calls, we will
1330 not attempt to fold them, including builtins that may profit. */
1331 if (likelyvalue
== CONSTANT
)
1332 simplified
= ccp_fold (stmt
);
1333 /* If the statement is likely to have a VARYING result, then do not
1334 bother folding the statement. */
1335 else if (likelyvalue
== VARYING
)
1337 enum gimple_code code
= gimple_code (stmt
);
1338 if (code
== GIMPLE_ASSIGN
)
1340 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1342 /* Other cases cannot satisfy is_gimple_min_invariant
1344 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1345 simplified
= gimple_assign_rhs1 (stmt
);
1347 else if (code
== GIMPLE_SWITCH
)
1348 simplified
= gimple_switch_index (stmt
);
1350 /* These cannot satisfy is_gimple_min_invariant without folding. */
1351 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1354 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1356 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1358 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1360 fprintf (dump_file
, "which is likely ");
1361 switch (likelyvalue
)
1364 fprintf (dump_file
, "CONSTANT");
1367 fprintf (dump_file
, "UNDEFINED");
1370 fprintf (dump_file
, "VARYING");
1374 fprintf (dump_file
, "\n");
1379 /* The statement produced a constant value. */
1380 val
.lattice_val
= CONSTANT
;
1381 val
.value
= simplified
;
1385 /* The statement produced a nonconstant value. If the statement
1386 had UNDEFINED operands, then the result of the statement
1387 should be UNDEFINED. Otherwise, the statement is VARYING. */
1388 if (likelyvalue
== UNDEFINED
)
1389 val
.lattice_val
= likelyvalue
;
1391 val
.lattice_val
= VARYING
;
1393 val
.value
= NULL_TREE
;
1399 /* Visit the assignment statement STMT. Set the value of its LHS to the
1400 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1401 creates virtual definitions, set the value of each new name to that
1402 of the RHS (if we can derive a constant out of the RHS).
1403 Value-returning call statements also perform an assignment, and
1404 are handled here. */
1406 static enum ssa_prop_result
1407 visit_assignment (gimple stmt
, tree
*output_p
)
1410 enum ssa_prop_result retval
;
1412 tree lhs
= gimple_get_lhs (stmt
);
1414 gcc_assert (gimple_code (stmt
) != GIMPLE_CALL
1415 || gimple_call_lhs (stmt
) != NULL_TREE
);
1417 if (gimple_assign_copy_p (stmt
))
1419 tree rhs
= gimple_assign_rhs1 (stmt
);
1421 if (TREE_CODE (rhs
) == SSA_NAME
)
1423 /* For a simple copy operation, we copy the lattice values. */
1424 prop_value_t
*nval
= get_value (rhs
);
1428 val
= evaluate_stmt (stmt
);
1431 /* Evaluate the statement, which could be
1432 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1433 val
= evaluate_stmt (stmt
);
1435 retval
= SSA_PROP_NOT_INTERESTING
;
1437 /* Set the lattice value of the statement's output. */
1438 if (TREE_CODE (lhs
) == SSA_NAME
)
1440 /* If STMT is an assignment to an SSA_NAME, we only have one
1442 if (set_lattice_value (lhs
, val
))
1445 if (val
.lattice_val
== VARYING
)
1446 retval
= SSA_PROP_VARYING
;
1448 retval
= SSA_PROP_INTERESTING
;
1456 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1457 if it can determine which edge will be taken. Otherwise, return
1458 SSA_PROP_VARYING. */
1460 static enum ssa_prop_result
1461 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
1466 block
= gimple_bb (stmt
);
1467 val
= evaluate_stmt (stmt
);
1469 /* Find which edge out of the conditional block will be taken and add it
1470 to the worklist. If no single edge can be determined statically,
1471 return SSA_PROP_VARYING to feed all the outgoing edges to the
1472 propagation engine. */
1473 *taken_edge_p
= val
.value
? find_taken_edge (block
, val
.value
) : 0;
1475 return SSA_PROP_INTERESTING
;
1477 return SSA_PROP_VARYING
;
1481 /* Evaluate statement STMT. If the statement produces an output value and
1482 its evaluation changes the lattice value of its output, return
1483 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1486 If STMT is a conditional branch and we can determine its truth
1487 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1488 value, return SSA_PROP_VARYING. */
1490 static enum ssa_prop_result
1491 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
1496 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1498 fprintf (dump_file
, "\nVisiting statement:\n");
1499 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1502 switch (gimple_code (stmt
))
1505 /* If the statement is an assignment that produces a single
1506 output value, evaluate its RHS to see if the lattice value of
1507 its output has changed. */
1508 return visit_assignment (stmt
, output_p
);
1511 /* A value-returning call also performs an assignment. */
1512 if (gimple_call_lhs (stmt
) != NULL_TREE
)
1513 return visit_assignment (stmt
, output_p
);
1518 /* If STMT is a conditional branch, see if we can determine
1519 which branch will be taken. */
1520 /* FIXME. It appears that we should be able to optimize
1521 computed GOTOs here as well. */
1522 return visit_cond_stmt (stmt
, taken_edge_p
);
1528 /* Any other kind of statement is not interesting for constant
1529 propagation and, therefore, not worth simulating. */
1530 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1531 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1533 /* Definitions made by statements other than assignments to
1534 SSA_NAMEs represent unknown modifications to their outputs.
1535 Mark them VARYING. */
1536 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1538 prop_value_t v
= { VARYING
, NULL_TREE
};
1539 set_lattice_value (def
, v
);
1542 return SSA_PROP_VARYING
;
1546 /* Main entry point for SSA Conditional Constant Propagation. */
1552 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1553 if (ccp_finalize ())
1554 return (TODO_cleanup_cfg
| TODO_update_ssa
| TODO_remove_unused_locals
);
1563 return flag_tree_ccp
!= 0;
1567 struct gimple_opt_pass pass_ccp
=
1572 gate_ccp
, /* gate */
1573 do_ssa_ccp
, /* execute */
1576 0, /* static_pass_number */
1577 TV_TREE_CCP
, /* tv_id */
1578 PROP_cfg
| PROP_ssa
, /* properties_required */
1579 0, /* properties_provided */
1580 0, /* properties_destroyed */
1581 0, /* todo_flags_start */
1582 TODO_dump_func
| TODO_verify_ssa
1583 | TODO_verify_stmts
| TODO_ggc_collect
/* todo_flags_finish */
1588 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1589 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1590 is the desired result type. */
1593 maybe_fold_offset_to_array_ref (tree base
, tree offset
, tree orig_type
,
1594 bool allow_negative_idx
)
1596 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
1597 tree array_type
, elt_type
, elt_size
;
1600 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1601 measured in units of the size of elements type) from that ARRAY_REF).
1602 We can't do anything if either is variable.
1604 The case we handle here is *(&A[N]+O). */
1605 if (TREE_CODE (base
) == ARRAY_REF
)
1607 tree low_bound
= array_ref_low_bound (base
);
1609 elt_offset
= TREE_OPERAND (base
, 1);
1610 if (TREE_CODE (low_bound
) != INTEGER_CST
1611 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1614 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1615 base
= TREE_OPERAND (base
, 0);
1618 /* Ignore stupid user tricks of indexing non-array variables. */
1619 array_type
= TREE_TYPE (base
);
1620 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1622 elt_type
= TREE_TYPE (array_type
);
1623 if (!useless_type_conversion_p (orig_type
, elt_type
))
1626 /* Use signed size type for intermediate computation on the index. */
1627 idx_type
= signed_type_for (size_type_node
);
1629 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1630 element type (so we can use the alignment if it's not constant).
1631 Otherwise, compute the offset as an index by using a division. If the
1632 division isn't exact, then don't do anything. */
1633 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1636 if (integer_zerop (offset
))
1638 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1639 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1641 idx
= build_int_cst (idx_type
, 0);
1645 unsigned HOST_WIDE_INT lquo
, lrem
;
1646 HOST_WIDE_INT hquo
, hrem
;
1649 /* The final array offset should be signed, so we need
1650 to sign-extend the (possibly pointer) offset here
1651 and use signed division. */
1652 soffset
= double_int_sext (tree_to_double_int (offset
),
1653 TYPE_PRECISION (TREE_TYPE (offset
)));
1654 if (TREE_CODE (elt_size
) != INTEGER_CST
1655 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
1656 soffset
.low
, soffset
.high
,
1657 TREE_INT_CST_LOW (elt_size
),
1658 TREE_INT_CST_HIGH (elt_size
),
1659 &lquo
, &hquo
, &lrem
, &hrem
)
1663 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
1666 /* Assume the low bound is zero. If there is a domain type, get the
1667 low bound, if any, convert the index into that type, and add the
1669 min_idx
= build_int_cst (idx_type
, 0);
1670 domain_type
= TYPE_DOMAIN (array_type
);
1673 idx_type
= domain_type
;
1674 if (TYPE_MIN_VALUE (idx_type
))
1675 min_idx
= TYPE_MIN_VALUE (idx_type
);
1677 min_idx
= fold_convert (idx_type
, min_idx
);
1679 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1682 elt_offset
= fold_convert (idx_type
, elt_offset
);
1685 if (!integer_zerop (min_idx
))
1686 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1687 if (!integer_zerop (elt_offset
))
1688 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1690 /* Make sure to possibly truncate late after offsetting. */
1691 idx
= fold_convert (idx_type
, idx
);
1693 /* We don't want to construct access past array bounds. For example
1696 should not be simplified into (*c)[14] or tree-vrp will
1697 give false warnings. The same is true for
1698 struct A { long x; char d[0]; } *a;
1700 which should be not folded to &a->d[-8]. */
1702 && TYPE_MAX_VALUE (domain_type
)
1703 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
)
1705 tree up_bound
= TYPE_MAX_VALUE (domain_type
);
1707 if (tree_int_cst_lt (up_bound
, idx
)
1708 /* Accesses after the end of arrays of size 0 (gcc
1709 extension) and 1 are likely intentional ("struct
1711 && compare_tree_int (up_bound
, 1) > 0)
1715 && TYPE_MIN_VALUE (domain_type
))
1717 if (!allow_negative_idx
1718 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
1719 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
1722 else if (!allow_negative_idx
1723 && compare_tree_int (idx
, 0) < 0)
1726 return build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
1730 /* Attempt to fold *(S+O) to S.X.
1731 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1732 is the desired result type. */
1735 maybe_fold_offset_to_component_ref (tree record_type
, tree base
, tree offset
,
1736 tree orig_type
, bool base_is_ptr
)
1738 tree f
, t
, field_type
, tail_array_field
, field_offset
;
1742 if (TREE_CODE (record_type
) != RECORD_TYPE
1743 && TREE_CODE (record_type
) != UNION_TYPE
1744 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1747 /* Short-circuit silly cases. */
1748 if (useless_type_conversion_p (record_type
, orig_type
))
1751 tail_array_field
= NULL_TREE
;
1752 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1756 if (TREE_CODE (f
) != FIELD_DECL
)
1758 if (DECL_BIT_FIELD (f
))
1761 if (!DECL_FIELD_OFFSET (f
))
1763 field_offset
= byte_position (f
);
1764 if (TREE_CODE (field_offset
) != INTEGER_CST
)
1767 /* ??? Java creates "interesting" fields for representing base classes.
1768 They have no name, and have no context. With no context, we get into
1769 trouble with nonoverlapping_component_refs_p. Skip them. */
1770 if (!DECL_FIELD_CONTEXT (f
))
1773 /* The previous array field isn't at the end. */
1774 tail_array_field
= NULL_TREE
;
1776 /* Check to see if this offset overlaps with the field. */
1777 cmp
= tree_int_cst_compare (field_offset
, offset
);
1781 field_type
= TREE_TYPE (f
);
1783 /* Here we exactly match the offset being checked. If the types match,
1784 then we can return that field. */
1786 && useless_type_conversion_p (orig_type
, field_type
))
1789 base
= build1 (INDIRECT_REF
, record_type
, base
);
1790 t
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1794 /* Don't care about offsets into the middle of scalars. */
1795 if (!AGGREGATE_TYPE_P (field_type
))
1798 /* Check for array at the end of the struct. This is often
1799 used as for flexible array members. We should be able to
1800 turn this into an array access anyway. */
1801 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1802 tail_array_field
= f
;
1804 /* Check the end of the field against the offset. */
1805 if (!DECL_SIZE_UNIT (f
)
1806 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1808 t
= int_const_binop (MINUS_EXPR
, offset
, field_offset
, 1);
1809 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1812 /* If we matched, then set offset to the displacement into
1815 new_base
= build1 (INDIRECT_REF
, record_type
, base
);
1818 new_base
= build3 (COMPONENT_REF
, field_type
, new_base
, f
, NULL_TREE
);
1820 /* Recurse to possibly find the match. */
1821 ret
= maybe_fold_offset_to_array_ref (new_base
, t
, orig_type
,
1822 f
== TYPE_FIELDS (record_type
));
1825 ret
= maybe_fold_offset_to_component_ref (field_type
, new_base
, t
,
1831 if (!tail_array_field
)
1834 f
= tail_array_field
;
1835 field_type
= TREE_TYPE (f
);
1836 offset
= int_const_binop (MINUS_EXPR
, offset
, byte_position (f
), 1);
1838 /* If we get here, we've got an aggregate field, and a possibly
1839 nonzero offset into them. Recurse and hope for a valid match. */
1841 base
= build1 (INDIRECT_REF
, record_type
, base
);
1842 base
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1844 t
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
,
1845 f
== TYPE_FIELDS (record_type
));
1848 return maybe_fold_offset_to_component_ref (field_type
, base
, offset
,
1852 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1853 or BASE[index] or by combination of those.
1855 Before attempting the conversion strip off existing ADDR_EXPRs and
1856 handled component refs. */
1859 maybe_fold_offset_to_reference (tree base
, tree offset
, tree orig_type
)
1863 bool base_is_ptr
= true;
1866 if (TREE_CODE (base
) == ADDR_EXPR
)
1868 base_is_ptr
= false;
1870 base
= TREE_OPERAND (base
, 0);
1872 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1873 so it needs to be removed and new COMPONENT_REF constructed.
1874 The wrong COMPONENT_REF are often constructed by folding the
1875 (type *)&object within the expression (type *)&object+offset */
1876 if (handled_component_p (base
))
1878 HOST_WIDE_INT sub_offset
, size
, maxsize
;
1880 newbase
= get_ref_base_and_extent (base
, &sub_offset
,
1882 gcc_assert (newbase
);
1885 && !(sub_offset
& (BITS_PER_UNIT
- 1)))
1889 offset
= int_const_binop (PLUS_EXPR
, offset
,
1890 build_int_cst (TREE_TYPE (offset
),
1891 sub_offset
/ BITS_PER_UNIT
), 1);
1894 if (useless_type_conversion_p (orig_type
, TREE_TYPE (base
))
1895 && integer_zerop (offset
))
1897 type
= TREE_TYPE (base
);
1902 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
1904 type
= TREE_TYPE (TREE_TYPE (base
));
1906 ret
= maybe_fold_offset_to_component_ref (type
, base
, offset
,
1907 orig_type
, base_is_ptr
);
1911 base
= build1 (INDIRECT_REF
, type
, base
);
1912 ret
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
, true);
1917 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1918 or &BASE[index] or by combination of those.
1920 Before attempting the conversion strip off existing component refs. */
1923 maybe_fold_offset_to_address (tree addr
, tree offset
, tree orig_type
)
1927 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr
))
1928 && POINTER_TYPE_P (orig_type
));
1930 t
= maybe_fold_offset_to_reference (addr
, offset
, TREE_TYPE (orig_type
));
1936 /* For __builtin_object_size to function correctly we need to
1937 make sure not to fold address arithmetic so that we change
1938 reference from one array to another. This would happen for
1941 struct X { char s1[10]; char s2[10] } s;
1942 char *foo (void) { return &s.s2[-4]; }
1944 where we need to avoid generating &s.s1[6]. As the C and
1945 C++ frontends create different initial trees
1946 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1947 sophisticated comparisons here. Note that checking for the
1948 condition after the fact is easier than trying to avoid doing
1951 if (TREE_CODE (orig
) == ADDR_EXPR
)
1952 orig
= TREE_OPERAND (orig
, 0);
1953 if ((TREE_CODE (orig
) == ARRAY_REF
1954 || (TREE_CODE (orig
) == COMPONENT_REF
1955 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig
, 1))) == ARRAY_TYPE
))
1956 && (TREE_CODE (t
) == ARRAY_REF
1957 || (TREE_CODE (t
) == COMPONENT_REF
1958 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 1))) == ARRAY_TYPE
))
1959 && !operand_equal_p (TREE_CODE (orig
) == ARRAY_REF
1960 ? TREE_OPERAND (orig
, 0) : orig
,
1961 TREE_CODE (t
) == ARRAY_REF
1962 ? TREE_OPERAND (t
, 0) : t
, 0))
1965 ptr_type
= build_pointer_type (TREE_TYPE (t
));
1966 if (!useless_type_conversion_p (orig_type
, ptr_type
))
1968 return build_fold_addr_expr_with_type (t
, ptr_type
);
1974 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1975 Return the simplified expression, or NULL if nothing could be done. */
1978 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
1981 bool volatile_p
= TREE_THIS_VOLATILE (expr
);
1983 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1984 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1985 are sometimes added. */
1987 STRIP_TYPE_NOPS (base
);
1988 TREE_OPERAND (expr
, 0) = base
;
1990 /* One possibility is that the address reduces to a string constant. */
1991 t
= fold_read_from_constant_string (expr
);
1995 /* Add in any offset from a POINTER_PLUS_EXPR. */
1996 if (TREE_CODE (base
) == POINTER_PLUS_EXPR
)
2000 offset2
= TREE_OPERAND (base
, 1);
2001 if (TREE_CODE (offset2
) != INTEGER_CST
)
2003 base
= TREE_OPERAND (base
, 0);
2005 offset
= fold_convert (sizetype
,
2006 int_const_binop (PLUS_EXPR
, offset
, offset2
, 1));
2009 if (TREE_CODE (base
) == ADDR_EXPR
)
2011 tree base_addr
= base
;
2013 /* Strip the ADDR_EXPR. */
2014 base
= TREE_OPERAND (base
, 0);
2016 /* Fold away CONST_DECL to its value, if the type is scalar. */
2017 if (TREE_CODE (base
) == CONST_DECL
2018 && is_gimple_min_invariant (DECL_INITIAL (base
)))
2019 return DECL_INITIAL (base
);
2021 /* Try folding *(&B+O) to B.X. */
2022 t
= maybe_fold_offset_to_reference (base_addr
, offset
,
2026 /* Preserve volatileness of the original expression.
2027 We can end up with a plain decl here which is shared
2028 and we shouldn't mess with its flags. */
2030 TREE_THIS_VOLATILE (t
) = volatile_p
;
2036 /* We can get here for out-of-range string constant accesses,
2037 such as "_"[3]. Bail out of the entire substitution search
2038 and arrange for the entire statement to be replaced by a
2039 call to __builtin_trap. In all likelihood this will all be
2040 constant-folded away, but in the meantime we can't leave with
2041 something that get_expr_operands can't understand. */
2045 if (TREE_CODE (t
) == ADDR_EXPR
2046 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
2048 /* FIXME: Except that this causes problems elsewhere with dead
2049 code not being deleted, and we die in the rtl expanders
2050 because we failed to remove some ssa_name. In the meantime,
2051 just return zero. */
2052 /* FIXME2: This condition should be signaled by
2053 fold_read_from_constant_string directly, rather than
2054 re-checking for it here. */
2055 return integer_zero_node
;
2058 /* Try folding *(B+O) to B->X. Still an improvement. */
2059 if (POINTER_TYPE_P (TREE_TYPE (base
)))
2061 t
= maybe_fold_offset_to_reference (base
, offset
,
2068 /* Otherwise we had an offset that we could not simplify. */
2073 /* A quaint feature extant in our address arithmetic is that there
2074 can be hidden type changes here. The type of the result need
2075 not be the same as the type of the input pointer.
2077 What we're after here is an expression of the form
2078 (T *)(&array + const)
2079 where array is OP0, const is OP1, RES_TYPE is T and
2080 the cast doesn't actually exist, but is implicit in the
2081 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2083 which may be able to propagate further. */
2086 maybe_fold_stmt_addition (tree res_type
, tree op0
, tree op1
)
2091 /* It had better be a constant. */
2092 if (TREE_CODE (op1
) != INTEGER_CST
)
2094 /* The first operand should be an ADDR_EXPR. */
2095 if (TREE_CODE (op0
) != ADDR_EXPR
)
2097 op0
= TREE_OPERAND (op0
, 0);
2099 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2100 the offset into it. */
2101 while (TREE_CODE (op0
) == ARRAY_REF
)
2103 tree array_obj
= TREE_OPERAND (op0
, 0);
2104 tree array_idx
= TREE_OPERAND (op0
, 1);
2105 tree elt_type
= TREE_TYPE (op0
);
2106 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
2109 if (TREE_CODE (array_idx
) != INTEGER_CST
)
2111 if (TREE_CODE (elt_size
) != INTEGER_CST
)
2114 /* Un-bias the index by the min index of the array type. */
2115 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
2118 min_idx
= TYPE_MIN_VALUE (min_idx
);
2121 if (TREE_CODE (min_idx
) != INTEGER_CST
)
2124 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
2125 if (!integer_zerop (min_idx
))
2126 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
2131 /* Convert the index to a byte offset. */
2132 array_idx
= fold_convert (sizetype
, array_idx
);
2133 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
2135 /* Update the operands for the next round, or for folding. */
2136 op1
= int_const_binop (PLUS_EXPR
,
2141 ptd_type
= TREE_TYPE (res_type
);
2142 /* If we want a pointer to void, reconstruct the reference from the
2143 array element type. A pointer to that can be trivially converted
2144 to void *. This happens as we fold (void *)(ptr p+ off). */
2145 if (VOID_TYPE_P (ptd_type
)
2146 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
2147 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
2149 /* At which point we can try some of the same things as for indirects. */
2150 t
= maybe_fold_offset_to_array_ref (op0
, op1
, ptd_type
, true);
2152 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (op0
), op0
, op1
,
2155 t
= build1 (ADDR_EXPR
, res_type
, t
);
2160 /* For passing state through walk_tree into fold_stmt_r and its
2163 struct fold_stmt_r_data
2167 bool *inside_addr_expr_p
;
2170 /* Subroutine of fold_stmt called via walk_tree. We perform several
2171 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2174 fold_stmt_r (tree
*expr_p
, int *walk_subtrees
, void *data
)
2176 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
2177 struct fold_stmt_r_data
*fold_stmt_r_data
;
2178 bool *inside_addr_expr_p
;
2180 tree expr
= *expr_p
, t
;
2181 bool volatile_p
= TREE_THIS_VOLATILE (expr
);
2183 fold_stmt_r_data
= (struct fold_stmt_r_data
*) wi
->info
;
2184 inside_addr_expr_p
= fold_stmt_r_data
->inside_addr_expr_p
;
2185 changed_p
= fold_stmt_r_data
->changed_p
;
2187 /* ??? It'd be nice if walk_tree had a pre-order option. */
2188 switch (TREE_CODE (expr
))
2191 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2196 t
= maybe_fold_stmt_indirect (expr
, TREE_OPERAND (expr
, 0),
2198 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2199 if (wi
->is_lhs
&& t
&& TREE_CODE (t
) == INTEGER_CST
)
2202 && TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
2203 /* If we had a good reason for propagating the address here,
2204 make sure we end up with valid gimple. See PR34989. */
2205 t
= TREE_OPERAND (TREE_OPERAND (expr
, 0), 0);
2209 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2214 if (POINTER_TYPE_P (TREE_TYPE (expr
))
2215 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr
)))
2216 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
2217 && (t
= maybe_fold_offset_to_address (TREE_OPERAND (expr
, 0),
2219 TREE_TYPE (TREE_TYPE (expr
)))))
2223 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2224 We'd only want to bother decomposing an existing ARRAY_REF if
2225 the base array is found to have another offset contained within.
2226 Otherwise we'd be wasting time. */
2228 /* If we are not processing expressions found within an
2229 ADDR_EXPR, then we can fold constant array references.
2230 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2232 if (!*inside_addr_expr_p
&& !wi
->is_lhs
)
2233 t
= fold_read_from_constant_string (expr
);
2239 *inside_addr_expr_p
= true;
2240 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2241 *inside_addr_expr_p
= false;
2246 /* Make sure the value is properly considered constant, and so gets
2247 propagated as expected. */
2249 recompute_tree_invariant_for_addr_expr (expr
);
2253 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2258 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2259 We've already checked that the records are compatible, so we should
2260 come up with a set of compatible fields. */
2262 tree expr_record
= TREE_TYPE (TREE_OPERAND (expr
, 0));
2263 tree expr_field
= TREE_OPERAND (expr
, 1);
2265 if (DECL_FIELD_CONTEXT (expr_field
) != TYPE_MAIN_VARIANT (expr_record
))
2267 expr_field
= find_compatible_field (expr_record
, expr_field
);
2268 TREE_OPERAND (expr
, 1) = expr_field
;
2273 case TARGET_MEM_REF
:
2274 t
= maybe_fold_tmr (expr
);
2277 case POINTER_PLUS_EXPR
:
2278 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2281 t
= walk_tree (&TREE_OPERAND (expr
, 1), fold_stmt_r
, data
, NULL
);
2286 t
= maybe_fold_stmt_addition (TREE_TYPE (expr
),
2287 TREE_OPERAND (expr
, 0),
2288 TREE_OPERAND (expr
, 1));
2292 if (COMPARISON_CLASS_P (TREE_OPERAND (expr
, 0)))
2294 tree op0
= TREE_OPERAND (expr
, 0);
2298 fold_defer_overflow_warnings ();
2299 tem
= fold_binary (TREE_CODE (op0
), TREE_TYPE (op0
),
2300 TREE_OPERAND (op0
, 0),
2301 TREE_OPERAND (op0
, 1));
2302 /* This is actually a conditional expression, not a GIMPLE
2303 conditional statement, however, the valid_gimple_rhs_p
2304 test still applies. */
2305 set
= tem
&& is_gimple_condexpr (tem
) && valid_gimple_rhs_p (tem
);
2306 fold_undefer_overflow_warnings (set
, fold_stmt_r_data
->stmt
, 0);
2309 COND_EXPR_COND (expr
) = tem
;
2322 /* Preserve volatileness of the original expression.
2323 We can end up with a plain decl here which is shared
2324 and we shouldn't mess with its flags. */
2326 TREE_THIS_VOLATILE (t
) = volatile_p
;
2334 /* Return the string length, maximum string length or maximum value of
2336 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2337 is not NULL and, for TYPE == 0, its value is not equal to the length
2338 we determine or if we are unable to determine the length or value,
2339 return false. VISITED is a bitmap of visited variables.
2340 TYPE is 0 if string length should be returned, 1 for maximum string
2341 length and 2 for maximum value ARG can have. */
2344 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
2349 if (TREE_CODE (arg
) != SSA_NAME
)
2351 if (TREE_CODE (arg
) == COND_EXPR
)
2352 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
2353 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
2354 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2355 else if (TREE_CODE (arg
) == ADDR_EXPR
2356 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
2357 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
2359 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
2360 if (TREE_CODE (aop0
) == INDIRECT_REF
2361 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
2362 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
2363 length
, visited
, type
);
2369 if (TREE_CODE (val
) != INTEGER_CST
2370 || tree_int_cst_sgn (val
) < 0)
2374 val
= c_strlen (arg
, 1);
2382 if (TREE_CODE (*length
) != INTEGER_CST
2383 || TREE_CODE (val
) != INTEGER_CST
)
2386 if (tree_int_cst_lt (*length
, val
))
2390 else if (simple_cst_equal (val
, *length
) != 1)
2398 /* If we were already here, break the infinite cycle. */
2399 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
2401 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
2404 def_stmt
= SSA_NAME_DEF_STMT (var
);
2406 switch (gimple_code (def_stmt
))
2409 /* The RHS of the statement defining VAR must either have a
2410 constant length or come from another SSA_NAME with a constant
2412 if (gimple_assign_single_p (def_stmt
)
2413 || gimple_assign_unary_nop_p (def_stmt
))
2415 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2416 return get_maxval_strlen (rhs
, length
, visited
, type
);
2422 /* All the arguments of the PHI node must have the same constant
2426 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
2428 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
2430 /* If this PHI has itself as an argument, we cannot
2431 determine the string length of this argument. However,
2432 if we can find a constant string length for the other
2433 PHI args then we can still be sure that this is a
2434 constant string length. So be optimistic and just
2435 continue with the next argument. */
2436 if (arg
== gimple_phi_result (def_stmt
))
2439 if (!get_maxval_strlen (arg
, length
, visited
, type
))
2451 /* Fold builtin call in statement STMT. Returns a simplified tree.
2452 We may return a non-constant expression, including another call
2453 to a different function and with different arguments, e.g.,
2454 substituting memcpy for strcpy when the string length is known.
2455 Note that some builtins expand into inline code that may not
2456 be valid in GIMPLE. Callers must take care. */
2459 ccp_fold_builtin (gimple stmt
)
2461 tree result
, val
[3];
2468 gcc_assert (is_gimple_call (stmt
));
2470 ignore
= (gimple_call_lhs (stmt
) == NULL
);
2472 /* First try the generic builtin folder. If that succeeds, return the
2474 result
= fold_call_stmt (stmt
, ignore
);
2478 STRIP_NOPS (result
);
2482 /* Ignore MD builtins. */
2483 callee
= gimple_call_fndecl (stmt
);
2484 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
2487 /* If the builtin could not be folded, and it has no argument list,
2489 nargs
= gimple_call_num_args (stmt
);
2493 /* Limit the work only for builtins we know how to simplify. */
2494 switch (DECL_FUNCTION_CODE (callee
))
2496 case BUILT_IN_STRLEN
:
2497 case BUILT_IN_FPUTS
:
2498 case BUILT_IN_FPUTS_UNLOCKED
:
2502 case BUILT_IN_STRCPY
:
2503 case BUILT_IN_STRNCPY
:
2507 case BUILT_IN_MEMCPY_CHK
:
2508 case BUILT_IN_MEMPCPY_CHK
:
2509 case BUILT_IN_MEMMOVE_CHK
:
2510 case BUILT_IN_MEMSET_CHK
:
2511 case BUILT_IN_STRNCPY_CHK
:
2515 case BUILT_IN_STRCPY_CHK
:
2516 case BUILT_IN_STPCPY_CHK
:
2520 case BUILT_IN_SNPRINTF_CHK
:
2521 case BUILT_IN_VSNPRINTF_CHK
:
2529 if (arg_idx
>= nargs
)
2532 /* Try to use the dataflow information gathered by the CCP process. */
2533 visited
= BITMAP_ALLOC (NULL
);
2534 bitmap_clear (visited
);
2536 memset (val
, 0, sizeof (val
));
2537 a
= gimple_call_arg (stmt
, arg_idx
);
2538 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
2539 val
[arg_idx
] = NULL_TREE
;
2541 BITMAP_FREE (visited
);
2544 switch (DECL_FUNCTION_CODE (callee
))
2546 case BUILT_IN_STRLEN
:
2547 if (val
[0] && nargs
== 1)
2550 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
2552 /* If the result is not a valid gimple value, or not a cast
2553 of a valid gimple value, then we can not use the result. */
2554 if (is_gimple_val (new_val
)
2555 || (is_gimple_cast (new_val
)
2556 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
2561 case BUILT_IN_STRCPY
:
2562 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
2563 result
= fold_builtin_strcpy (callee
,
2564 gimple_call_arg (stmt
, 0),
2565 gimple_call_arg (stmt
, 1),
2569 case BUILT_IN_STRNCPY
:
2570 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2571 result
= fold_builtin_strncpy (callee
,
2572 gimple_call_arg (stmt
, 0),
2573 gimple_call_arg (stmt
, 1),
2574 gimple_call_arg (stmt
, 2),
2578 case BUILT_IN_FPUTS
:
2580 result
= fold_builtin_fputs (gimple_call_arg (stmt
, 0),
2581 gimple_call_arg (stmt
, 1),
2582 ignore
, false, val
[0]);
2585 case BUILT_IN_FPUTS_UNLOCKED
:
2587 result
= fold_builtin_fputs (gimple_call_arg (stmt
, 0),
2588 gimple_call_arg (stmt
, 1),
2589 ignore
, true, val
[0]);
2592 case BUILT_IN_MEMCPY_CHK
:
2593 case BUILT_IN_MEMPCPY_CHK
:
2594 case BUILT_IN_MEMMOVE_CHK
:
2595 case BUILT_IN_MEMSET_CHK
:
2596 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2597 result
= fold_builtin_memory_chk (callee
,
2598 gimple_call_arg (stmt
, 0),
2599 gimple_call_arg (stmt
, 1),
2600 gimple_call_arg (stmt
, 2),
2601 gimple_call_arg (stmt
, 3),
2603 DECL_FUNCTION_CODE (callee
));
2606 case BUILT_IN_STRCPY_CHK
:
2607 case BUILT_IN_STPCPY_CHK
:
2608 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2609 result
= fold_builtin_stxcpy_chk (callee
,
2610 gimple_call_arg (stmt
, 0),
2611 gimple_call_arg (stmt
, 1),
2612 gimple_call_arg (stmt
, 2),
2614 DECL_FUNCTION_CODE (callee
));
2617 case BUILT_IN_STRNCPY_CHK
:
2618 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2619 result
= fold_builtin_strncpy_chk (gimple_call_arg (stmt
, 0),
2620 gimple_call_arg (stmt
, 1),
2621 gimple_call_arg (stmt
, 2),
2622 gimple_call_arg (stmt
, 3),
2626 case BUILT_IN_SNPRINTF_CHK
:
2627 case BUILT_IN_VSNPRINTF_CHK
:
2628 if (val
[1] && is_gimple_val (val
[1]))
2629 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
2630 DECL_FUNCTION_CODE (callee
));
2637 if (result
&& ignore
)
2638 result
= fold_ignored_result (result
);
2642 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2643 replacement rhs for the statement or NULL_TREE if no simplification
2644 could be made. It is assumed that the operands have been previously
2648 fold_gimple_assign (gimple_stmt_iterator
*si
)
2650 gimple stmt
= gsi_stmt (*si
);
2651 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2655 switch (get_gimple_rhs_class (subcode
))
2657 case GIMPLE_SINGLE_RHS
:
2659 tree rhs
= gimple_assign_rhs1 (stmt
);
2661 /* Try to fold a conditional expression. */
2662 if (TREE_CODE (rhs
) == COND_EXPR
)
2664 tree temp
= fold (COND_EXPR_COND (rhs
));
2665 if (temp
!= COND_EXPR_COND (rhs
))
2666 result
= fold_build3 (COND_EXPR
, TREE_TYPE (rhs
), temp
,
2667 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
2670 /* If we couldn't fold the RHS, hand over to the generic
2672 if (result
== NULL_TREE
)
2673 result
= fold (rhs
);
2675 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2676 that may have been added by fold, and "useless" type
2677 conversions that might now be apparent due to propagation. */
2678 STRIP_USELESS_TYPE_CONVERSION (result
);
2680 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
2683 /* It is possible that fold_stmt_r simplified the RHS.
2684 Make sure that the subcode of this statement still
2685 reflects the principal operator of the rhs operand. */
2690 case GIMPLE_UNARY_RHS
:
2692 tree rhs
= gimple_assign_rhs1 (stmt
);
2694 result
= fold_unary (subcode
, gimple_expr_type (stmt
), rhs
);
2697 /* If the operation was a conversion do _not_ mark a
2698 resulting constant with TREE_OVERFLOW if the original
2699 constant was not. These conversions have implementation
2700 defined behavior and retaining the TREE_OVERFLOW flag
2701 here would confuse later passes such as VRP. */
2702 if (CONVERT_EXPR_CODE_P (subcode
)
2703 && TREE_CODE (result
) == INTEGER_CST
2704 && TREE_CODE (rhs
) == INTEGER_CST
)
2705 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
2707 STRIP_USELESS_TYPE_CONVERSION (result
);
2708 if (valid_gimple_rhs_p (result
))
2711 else if (CONVERT_EXPR_CODE_P (subcode
)
2712 && POINTER_TYPE_P (gimple_expr_type (stmt
))
2713 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
2715 tree type
= gimple_expr_type (stmt
);
2716 tree t
= maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt
),
2717 integer_zero_node
, type
);
2724 case GIMPLE_BINARY_RHS
:
2725 /* Try to fold pointer addition. */
2726 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2728 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2729 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
2731 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
2732 if (!useless_type_conversion_p
2733 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
2734 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2736 result
= maybe_fold_stmt_addition (type
,
2737 gimple_assign_rhs1 (stmt
),
2738 gimple_assign_rhs2 (stmt
));
2742 result
= fold_binary (subcode
,
2743 TREE_TYPE (gimple_assign_lhs (stmt
)),
2744 gimple_assign_rhs1 (stmt
),
2745 gimple_assign_rhs2 (stmt
));
2749 STRIP_USELESS_TYPE_CONVERSION (result
);
2750 if (valid_gimple_rhs_p (result
))
2753 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2754 we lose canonicalization opportunities. Do not go again
2755 through fold here though, or the same non-GIMPLE will be
2757 if (commutative_tree_code (subcode
)
2758 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
2759 gimple_assign_rhs2 (stmt
), false))
2760 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
2761 gimple_assign_rhs2 (stmt
),
2762 gimple_assign_rhs1 (stmt
));
2766 case GIMPLE_INVALID_RHS
:
2773 /* Attempt to fold a conditional statement. Return true if any changes were
2774 made. We only attempt to fold the condition expression, and do not perform
2775 any transformation that would require alteration of the cfg. It is
2776 assumed that the operands have been previously folded. */
2779 fold_gimple_cond (gimple stmt
)
2781 tree result
= fold_binary (gimple_cond_code (stmt
),
2783 gimple_cond_lhs (stmt
),
2784 gimple_cond_rhs (stmt
));
2788 STRIP_USELESS_TYPE_CONVERSION (result
);
2789 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
2791 gimple_cond_set_condition_from_tree (stmt
, result
);
2800 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2801 The statement may be replaced by another statement, e.g., if the call
2802 simplifies to a constant value. Return true if any changes were made.
2803 It is assumed that the operands have been previously folded. */
2806 fold_gimple_call (gimple_stmt_iterator
*gsi
)
2808 gimple stmt
= gsi_stmt (*gsi
);
2810 tree callee
= gimple_call_fndecl (stmt
);
2812 /* Check for builtins that CCP can handle using information not
2813 available in the generic fold routines. */
2814 if (callee
&& DECL_BUILT_IN (callee
))
2816 tree result
= ccp_fold_builtin (stmt
);
2819 return update_call_from_tree (gsi
, result
);
2823 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2824 here are when we've propagated the address of a decl into the
2826 /* ??? Should perhaps do this in fold proper. However, doing it
2827 there requires that we create a new CALL_EXPR, and that requires
2828 copying EH region info to the new node. Easier to just do it
2829 here where we can just smash the call operand. */
2830 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2831 callee
= gimple_call_fn (stmt
);
2832 if (TREE_CODE (callee
) == OBJ_TYPE_REF
2833 && lang_hooks
.fold_obj_type_ref
2834 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
2835 && DECL_P (TREE_OPERAND
2836 (OBJ_TYPE_REF_OBJECT (callee
), 0)))
2840 /* ??? Caution: Broken ADDR_EXPR semantics means that
2841 looking at the type of the operand of the addr_expr
2842 can yield an array type. See silly exception in
2843 check_pointer_types_r. */
2844 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
2845 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
2848 gimple_call_set_fn (stmt
, t
);
2857 /* Fold the statement pointed to by GSI. In some cases, this function may
2858 replace the whole statement with a new one. Returns true iff folding
2859 makes any changes. */
2862 fold_stmt (gimple_stmt_iterator
*gsi
)
2865 struct fold_stmt_r_data fold_stmt_r_data
;
2866 struct walk_stmt_info wi
;
2868 bool changed
= false;
2869 bool inside_addr_expr
= false;
2871 gimple stmt
= gsi_stmt (*gsi
);
2873 fold_stmt_r_data
.stmt
= stmt
;
2874 fold_stmt_r_data
.changed_p
= &changed
;
2875 fold_stmt_r_data
.inside_addr_expr_p
= &inside_addr_expr
;
2877 memset (&wi
, 0, sizeof (wi
));
2878 wi
.info
= &fold_stmt_r_data
;
2880 /* Fold the individual operands.
2881 For example, fold instances of *&VAR into VAR, etc. */
2882 res
= walk_gimple_op (stmt
, fold_stmt_r
, &wi
);
2885 /* Fold the main computation performed by the statement. */
2886 switch (gimple_code (stmt
))
2890 tree new_rhs
= fold_gimple_assign (gsi
);
2891 if (new_rhs
!= NULL_TREE
)
2893 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
2896 stmt
= gsi_stmt (*gsi
);
2900 changed
|= fold_gimple_cond (stmt
);
2903 /* The entire statement may be replaced in this case. */
2904 changed
|= fold_gimple_call (gsi
);
2915 /* Perform the minimal folding on statement STMT. Only operations like
2916 *&x created by constant propagation are handled. The statement cannot
2917 be replaced with a new one. Return true if the statement was
2918 changed, false otherwise. */
2921 fold_stmt_inplace (gimple stmt
)
2924 struct fold_stmt_r_data fold_stmt_r_data
;
2925 struct walk_stmt_info wi
;
2926 gimple_stmt_iterator si
;
2928 bool changed
= false;
2929 bool inside_addr_expr
= false;
2931 fold_stmt_r_data
.stmt
= stmt
;
2932 fold_stmt_r_data
.changed_p
= &changed
;
2933 fold_stmt_r_data
.inside_addr_expr_p
= &inside_addr_expr
;
2935 memset (&wi
, 0, sizeof (wi
));
2936 wi
.info
= &fold_stmt_r_data
;
2938 /* Fold the individual operands.
2939 For example, fold instances of *&VAR into VAR, etc.
2941 It appears that, at one time, maybe_fold_stmt_indirect
2942 would cause the walk to return non-null in order to
2943 signal that the entire statement should be replaced with
2944 a call to _builtin_trap. This functionality is currently
2945 disabled, as noted in a FIXME, and cannot be supported here. */
2946 res
= walk_gimple_op (stmt
, fold_stmt_r
, &wi
);
2949 /* Fold the main computation performed by the statement. */
2950 switch (gimple_code (stmt
))
2954 unsigned old_num_ops
;
2956 old_num_ops
= gimple_num_ops (stmt
);
2957 si
= gsi_for_stmt (stmt
);
2958 new_rhs
= fold_gimple_assign (&si
);
2959 if (new_rhs
!= NULL_TREE
2960 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
)
2962 gimple_assign_set_rhs_from_tree (&si
, new_rhs
);
2965 gcc_assert (gsi_stmt (si
) == stmt
);
2969 changed
|= fold_gimple_cond (stmt
);
2979 /* Try to optimize out __builtin_stack_restore. Optimize it out
2980 if there is another __builtin_stack_restore in the same basic
2981 block and no calls or ASM_EXPRs are in between, or if this block's
2982 only outgoing edge is to EXIT_BLOCK and there are no calls or
2983 ASM_EXPRs after this __builtin_stack_restore. */
2986 optimize_stack_restore (gimple_stmt_iterator i
)
2989 gimple stmt
, stack_save
;
2990 gimple_stmt_iterator stack_save_gsi
;
2992 basic_block bb
= gsi_bb (i
);
2993 gimple call
= gsi_stmt (i
);
2995 if (gimple_code (call
) != GIMPLE_CALL
2996 || gimple_call_num_args (call
) != 1
2997 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
2998 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
3001 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
3003 stmt
= gsi_stmt (i
);
3004 if (gimple_code (stmt
) == GIMPLE_ASM
)
3006 if (gimple_code (stmt
) != GIMPLE_CALL
)
3009 callee
= gimple_call_fndecl (stmt
);
3010 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
3013 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
3018 && (! single_succ_p (bb
)
3019 || single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR
))
3022 stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
3023 if (gimple_code (stack_save
) != GIMPLE_CALL
3024 || gimple_call_lhs (stack_save
) != gimple_call_arg (call
, 0)
3025 || stmt_could_throw_p (stack_save
)
3026 || !has_single_use (gimple_call_arg (call
, 0)))
3029 callee
= gimple_call_fndecl (stack_save
);
3031 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3032 || DECL_FUNCTION_CODE (callee
) != BUILT_IN_STACK_SAVE
3033 || gimple_call_num_args (stack_save
) != 0)
3036 stack_save_gsi
= gsi_for_stmt (stack_save
);
3037 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi
));
3038 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
3039 if (!update_call_from_tree (&stack_save_gsi
, rhs
))
3041 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi
));
3044 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi
));
3046 /* No effect, so the statement will be deleted. */
3047 return integer_zero_node
;
3050 /* If va_list type is a simple pointer and nothing special is needed,
3051 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3052 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3053 pointer assignment. */
3056 optimize_stdarg_builtin (gimple call
)
3058 tree callee
, lhs
, rhs
, cfun_va_list
;
3059 bool va_list_simple_ptr
;
3061 if (gimple_code (call
) != GIMPLE_CALL
)
3064 callee
= gimple_call_fndecl (call
);
3066 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
3067 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
3068 && (TREE_TYPE (cfun_va_list
) == void_type_node
3069 || TREE_TYPE (cfun_va_list
) == char_type_node
);
3071 switch (DECL_FUNCTION_CODE (callee
))
3073 case BUILT_IN_VA_START
:
3074 if (!va_list_simple_ptr
3075 || targetm
.expand_builtin_va_start
!= NULL
3076 || built_in_decls
[BUILT_IN_NEXT_ARG
] == NULL
)
3079 if (gimple_call_num_args (call
) != 2)
3082 lhs
= gimple_call_arg (call
, 0);
3083 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3084 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3085 != TYPE_MAIN_VARIANT (cfun_va_list
))
3088 lhs
= build_fold_indirect_ref (lhs
);
3089 rhs
= build_call_expr (built_in_decls
[BUILT_IN_NEXT_ARG
],
3090 1, integer_zero_node
);
3091 rhs
= fold_convert (TREE_TYPE (lhs
), rhs
);
3092 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3094 case BUILT_IN_VA_COPY
:
3095 if (!va_list_simple_ptr
)
3098 if (gimple_call_num_args (call
) != 2)
3101 lhs
= gimple_call_arg (call
, 0);
3102 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3103 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3104 != TYPE_MAIN_VARIANT (cfun_va_list
))
3107 lhs
= build_fold_indirect_ref (lhs
);
3108 rhs
= gimple_call_arg (call
, 1);
3109 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
3110 != TYPE_MAIN_VARIANT (cfun_va_list
))
3113 rhs
= fold_convert (TREE_TYPE (lhs
), rhs
);
3114 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3116 case BUILT_IN_VA_END
:
3117 /* No effect, so the statement will be deleted. */
3118 return integer_zero_node
;
3125 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3126 RHS of an assignment. Insert the necessary statements before
3127 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3128 is replaced. If the call is expected to produces a result, then it
3129 is replaced by an assignment of the new RHS to the result variable.
3130 If the result is to be ignored, then the call is replaced by a
3134 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
3137 tree tmp
= NULL_TREE
; /* Silence warning. */
3138 gimple stmt
, new_stmt
;
3139 gimple_stmt_iterator i
;
3140 gimple_seq stmts
= gimple_seq_alloc();
3141 struct gimplify_ctx gctx
;
3143 stmt
= gsi_stmt (*si_p
);
3145 gcc_assert (is_gimple_call (stmt
));
3147 lhs
= gimple_call_lhs (stmt
);
3149 push_gimplify_context (&gctx
);
3151 if (lhs
== NULL_TREE
)
3152 gimplify_and_add (expr
, &stmts
);
3154 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
3156 pop_gimplify_context (NULL
);
3158 if (gimple_has_location (stmt
))
3159 annotate_all_with_location (stmts
, gimple_location (stmt
));
3161 /* The replacement can expose previously unreferenced variables. */
3162 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
3164 new_stmt
= gsi_stmt (i
);
3165 find_new_referenced_vars (new_stmt
);
3166 gsi_insert_before (si_p
, new_stmt
, GSI_NEW_STMT
);
3167 mark_symbols_for_renaming (new_stmt
);
3171 if (lhs
== NULL_TREE
)
3172 new_stmt
= gimple_build_nop ();
3175 new_stmt
= gimple_build_assign (lhs
, tmp
);
3176 copy_virtual_operands (new_stmt
, stmt
);
3177 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
3180 gimple_set_location (new_stmt
, gimple_location (stmt
));
3181 gsi_replace (si_p
, new_stmt
, false);
3184 /* A simple pass that attempts to fold all builtin functions. This pass
3185 is run after we've propagated as many constants as we can. */
3188 execute_fold_all_builtins (void)
3190 bool cfg_changed
= false;
3192 unsigned int todoflags
= 0;
3196 gimple_stmt_iterator i
;
3197 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
3199 gimple stmt
, old_stmt
;
3200 tree callee
, result
;
3201 enum built_in_function fcode
;
3203 stmt
= gsi_stmt (i
);
3205 if (gimple_code (stmt
) != GIMPLE_CALL
)
3210 callee
= gimple_call_fndecl (stmt
);
3211 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
3216 fcode
= DECL_FUNCTION_CODE (callee
);
3218 result
= ccp_fold_builtin (stmt
);
3221 gimple_remove_stmt_histograms (cfun
, stmt
);
3224 switch (DECL_FUNCTION_CODE (callee
))
3226 case BUILT_IN_CONSTANT_P
:
3227 /* Resolve __builtin_constant_p. If it hasn't been
3228 folded to integer_one_node by now, it's fairly
3229 certain that the value simply isn't constant. */
3230 result
= integer_zero_node
;
3233 case BUILT_IN_STACK_RESTORE
:
3234 result
= optimize_stack_restore (i
);
3240 case BUILT_IN_VA_START
:
3241 case BUILT_IN_VA_END
:
3242 case BUILT_IN_VA_COPY
:
3243 /* These shouldn't be folded before pass_stdarg. */
3244 result
= optimize_stdarg_builtin (stmt
);
3254 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3256 fprintf (dump_file
, "Simplified\n ");
3257 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3261 push_stmt_changes (gsi_stmt_ptr (&i
));
3263 if (!update_call_from_tree (&i
, result
))
3265 gimplify_and_update_call_from_tree (&i
, result
);
3266 todoflags
|= TODO_rebuild_alias
;
3269 stmt
= gsi_stmt (i
);
3270 pop_stmt_changes (gsi_stmt_ptr (&i
));
3272 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
3273 && gimple_purge_dead_eh_edges (bb
))
3276 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3278 fprintf (dump_file
, "to\n ");
3279 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3280 fprintf (dump_file
, "\n");
3283 /* Retry the same statement if it changed into another
3284 builtin, there might be new opportunities now. */
3285 if (gimple_code (stmt
) != GIMPLE_CALL
)
3290 callee
= gimple_call_fndecl (stmt
);
3292 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3293 || DECL_FUNCTION_CODE (callee
) == fcode
)
3298 /* Delete unreachable blocks. */
3300 todoflags
|= TODO_cleanup_cfg
;
3306 struct gimple_opt_pass pass_fold_builtins
=
3312 execute_fold_all_builtins
, /* execute */
3315 0, /* static_pass_number */
3317 PROP_cfg
| PROP_ssa
, /* properties_required */
3318 0, /* properties_provided */
3319 0, /* properties_destroyed */
3320 0, /* todo_flags_start */
3323 | TODO_update_ssa
/* todo_flags_finish */