1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
214 /* Possible lattice values. */
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
229 static prop_value_t
*const_val
;
231 static void canonicalize_float_value (prop_value_t
*);
232 static bool ccp_fold_stmt (gimple_stmt_iterator
*);
234 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
237 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
239 switch (val
.lattice_val
)
242 fprintf (outf
, "%sUNINITIALIZED", prefix
);
245 fprintf (outf
, "%sUNDEFINED", prefix
);
248 fprintf (outf
, "%sVARYING", prefix
);
251 fprintf (outf
, "%sCONSTANT ", prefix
);
252 print_generic_expr (outf
, val
.value
, dump_flags
);
260 /* Print lattice value VAL to stderr. */
262 void debug_lattice_value (prop_value_t val
);
265 debug_lattice_value (prop_value_t val
)
267 dump_lattice_value (stderr
, "", val
);
268 fprintf (stderr
, "\n");
273 /* If SYM is a constant variable with known value, return the value.
274 NULL_TREE is returned otherwise. */
277 get_symbol_constant_value (tree sym
)
279 if (TREE_STATIC (sym
)
280 && (TREE_READONLY (sym
)
281 || TREE_CODE (sym
) == CONST_DECL
))
283 tree val
= DECL_INITIAL (sym
);
287 if (is_gimple_min_invariant (val
))
289 if (TREE_CODE (val
) == ADDR_EXPR
)
291 tree base
= get_base_address (TREE_OPERAND (val
, 0));
292 if (base
&& TREE_CODE (base
) == VAR_DECL
)
294 TREE_ADDRESSABLE (base
) = 1;
295 if (gimple_referenced_vars (cfun
))
296 add_referenced_var (base
);
302 /* Variables declared 'const' without an initializer
303 have zero as the initializer if they may not be
304 overridden at link or run time. */
306 && !DECL_EXTERNAL (sym
)
307 && targetm
.binds_local_p (sym
)
308 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
309 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
310 return fold_convert (TREE_TYPE (sym
), integer_zero_node
);
316 /* Compute a default value for variable VAR and store it in the
317 CONST_VAL array. The following rules are used to get default
320 1- Global and static variables that are declared constant are
323 2- Any other value is considered UNDEFINED. This is useful when
324 considering PHI nodes. PHI arguments that are undefined do not
325 change the constant value of the PHI node, which allows for more
326 constants to be propagated.
328 3- Variables defined by statements other than assignments and PHI
329 nodes are considered VARYING.
331 4- Initial values of variables that are not GIMPLE registers are
332 considered VARYING. */
335 get_default_value (tree var
)
337 tree sym
= SSA_NAME_VAR (var
);
338 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
};
341 stmt
= SSA_NAME_DEF_STMT (var
);
343 if (gimple_nop_p (stmt
))
345 /* Variables defined by an empty statement are those used
346 before being initialized. If VAR is a local variable, we
347 can assume initially that it is UNDEFINED, otherwise we must
348 consider it VARYING. */
349 if (is_gimple_reg (sym
) && TREE_CODE (sym
) != PARM_DECL
)
350 val
.lattice_val
= UNDEFINED
;
352 val
.lattice_val
= VARYING
;
354 else if (is_gimple_assign (stmt
)
355 /* Value-returning GIMPLE_CALL statements assign to
356 a variable, and are treated similarly to GIMPLE_ASSIGN. */
357 || (is_gimple_call (stmt
)
358 && gimple_call_lhs (stmt
) != NULL_TREE
)
359 || gimple_code (stmt
) == GIMPLE_PHI
)
362 if (gimple_assign_single_p (stmt
)
363 && DECL_P (gimple_assign_rhs1 (stmt
))
364 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
366 val
.lattice_val
= CONSTANT
;
370 /* Any other variable defined by an assignment or a PHI node
371 is considered UNDEFINED. */
372 val
.lattice_val
= UNDEFINED
;
376 /* Otherwise, VAR will never take on a constant value. */
377 val
.lattice_val
= VARYING
;
384 /* Get the constant value associated with variable VAR. */
386 static inline prop_value_t
*
391 if (const_val
== NULL
)
394 val
= &const_val
[SSA_NAME_VERSION (var
)];
395 if (val
->lattice_val
== UNINITIALIZED
)
396 *val
= get_default_value (var
);
398 canonicalize_float_value (val
);
403 /* Sets the value associated with VAR to VARYING. */
406 set_value_varying (tree var
)
408 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
410 val
->lattice_val
= VARYING
;
411 val
->value
= NULL_TREE
;
414 /* For float types, modify the value of VAL to make ccp work correctly
415 for non-standard values (-0, NaN):
417 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
418 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
419 This is to fix the following problem (see PR 29921): Suppose we have
423 and we set value of y to NaN. This causes value of x to be set to NaN.
424 When we later determine that y is in fact VARYING, fold uses the fact
425 that HONOR_NANS is false, and we try to change the value of x to 0,
426 causing an ICE. With HONOR_NANS being false, the real appearance of
427 NaN would cause undefined behavior, though, so claiming that y (and x)
428 are UNDEFINED initially is correct. */
431 canonicalize_float_value (prop_value_t
*val
)
433 enum machine_mode mode
;
437 if (val
->lattice_val
!= CONSTANT
438 || TREE_CODE (val
->value
) != REAL_CST
)
441 d
= TREE_REAL_CST (val
->value
);
442 type
= TREE_TYPE (val
->value
);
443 mode
= TYPE_MODE (type
);
445 if (!HONOR_SIGNED_ZEROS (mode
)
446 && REAL_VALUE_MINUS_ZERO (d
))
448 val
->value
= build_real (type
, dconst0
);
452 if (!HONOR_NANS (mode
)
453 && REAL_VALUE_ISNAN (d
))
455 val
->lattice_val
= UNDEFINED
;
461 /* Set the value for variable VAR to NEW_VAL. Return true if the new
462 value is different from VAR's previous value. */
465 set_lattice_value (tree var
, prop_value_t new_val
)
467 prop_value_t
*old_val
= get_value (var
);
469 canonicalize_float_value (&new_val
);
471 /* Lattice transitions must always be monotonically increasing in
472 value. If *OLD_VAL and NEW_VAL are the same, return false to
473 inform the caller that this was a non-transition. */
475 gcc_assert (old_val
->lattice_val
< new_val
.lattice_val
476 || (old_val
->lattice_val
== new_val
.lattice_val
477 && ((!old_val
->value
&& !new_val
.value
)
478 || operand_equal_p (old_val
->value
, new_val
.value
, 0))));
480 if (old_val
->lattice_val
!= new_val
.lattice_val
)
482 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
484 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
485 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
490 gcc_assert (new_val
.lattice_val
!= UNDEFINED
);
498 /* Return the likely CCP lattice value for STMT.
500 If STMT has no operands, then return CONSTANT.
502 Else if undefinedness of operands of STMT cause its value to be
503 undefined, then return UNDEFINED.
505 Else if any operands of STMT are constants, then return CONSTANT.
507 Else return VARYING. */
510 likely_value (gimple stmt
)
512 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
517 enum gimple_code code
= gimple_code (stmt
);
519 /* This function appears to be called only for assignments, calls,
520 conditionals, and switches, due to the logic in visit_stmt. */
521 gcc_assert (code
== GIMPLE_ASSIGN
522 || code
== GIMPLE_CALL
523 || code
== GIMPLE_COND
524 || code
== GIMPLE_SWITCH
);
526 /* If the statement has volatile operands, it won't fold to a
528 if (gimple_has_volatile_ops (stmt
))
531 /* Arrive here for more complex cases. */
532 has_constant_operand
= false;
533 has_undefined_operand
= false;
534 all_undefined_operands
= true;
535 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
537 prop_value_t
*val
= get_value (use
);
539 if (val
->lattice_val
== UNDEFINED
)
540 has_undefined_operand
= true;
542 all_undefined_operands
= false;
544 if (val
->lattice_val
== CONSTANT
)
545 has_constant_operand
= true;
548 /* There may be constants in regular rhs operands. For calls we
549 have to ignore lhs, fndecl and static chain, otherwise only
551 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
552 i
< gimple_num_ops (stmt
); ++i
)
554 tree op
= gimple_op (stmt
, i
);
555 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
557 if (is_gimple_min_invariant (op
))
558 has_constant_operand
= true;
561 if (has_constant_operand
)
562 all_undefined_operands
= false;
564 /* If the operation combines operands like COMPLEX_EXPR make sure to
565 not mark the result UNDEFINED if only one part of the result is
567 if (has_undefined_operand
&& all_undefined_operands
)
569 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
571 switch (gimple_assign_rhs_code (stmt
))
573 /* Unary operators are handled with all_undefined_operands. */
576 case POINTER_PLUS_EXPR
:
577 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
578 Not bitwise operators, one VARYING operand may specify the
579 result completely. Not logical operators for the same reason.
580 Not COMPLEX_EXPR as one VARYING operand makes the result partly
581 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
582 the undefined operand may be promoted. */
589 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
590 fall back to VARYING even if there were CONSTANT operands. */
591 if (has_undefined_operand
)
594 /* We do not consider virtual operands here -- load from read-only
595 memory may have only VARYING virtual operands, but still be
597 if (has_constant_operand
598 || gimple_references_memory_p (stmt
))
604 /* Returns true if STMT cannot be constant. */
607 surely_varying_stmt_p (gimple stmt
)
609 /* If the statement has operands that we cannot handle, it cannot be
611 if (gimple_has_volatile_ops (stmt
))
614 /* If it is a call and does not return a value or is not a
615 builtin and not an indirect call, it is varying. */
616 if (is_gimple_call (stmt
))
619 if (!gimple_call_lhs (stmt
)
620 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
621 && !DECL_BUILT_IN (fndecl
)))
625 /* Any other store operation is not interesting. */
626 else if (gimple_vdef (stmt
))
629 /* Anything other than assignments and conditional jumps are not
630 interesting for CCP. */
631 if (gimple_code (stmt
) != GIMPLE_ASSIGN
632 && gimple_code (stmt
) != GIMPLE_COND
633 && gimple_code (stmt
) != GIMPLE_SWITCH
634 && gimple_code (stmt
) != GIMPLE_CALL
)
640 /* Initialize local data structures for CCP. */
643 ccp_initialize (void)
647 const_val
= XCNEWVEC (prop_value_t
, num_ssa_names
);
649 /* Initialize simulation flags for PHI nodes and statements. */
652 gimple_stmt_iterator i
;
654 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
656 gimple stmt
= gsi_stmt (i
);
659 /* If the statement is a control insn, then we do not
660 want to avoid simulating the statement once. Failure
661 to do so means that those edges will never get added. */
662 if (stmt_ends_bb_p (stmt
))
665 is_varying
= surely_varying_stmt_p (stmt
);
672 /* If the statement will not produce a constant, mark
673 all its outputs VARYING. */
674 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
675 set_value_varying (def
);
677 prop_set_simulate_again (stmt
, !is_varying
);
681 /* Now process PHI nodes. We never clear the simulate_again flag on
682 phi nodes, since we do not know which edges are executable yet,
683 except for phi nodes for virtual operands when we do not do store ccp. */
686 gimple_stmt_iterator i
;
688 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
690 gimple phi
= gsi_stmt (i
);
692 if (!is_gimple_reg (gimple_phi_result (phi
)))
693 prop_set_simulate_again (phi
, false);
695 prop_set_simulate_again (phi
, true);
700 /* Debug count support. Reset the values of ssa names
701 VARYING when the total number ssa names analyzed is
702 beyond the debug count specified. */
708 for (i
= 0; i
< num_ssa_names
; i
++)
712 const_val
[i
].lattice_val
= VARYING
;
713 const_val
[i
].value
= NULL_TREE
;
719 /* Do final substitution of propagated values, cleanup the flowgraph and
720 free allocated storage.
722 Return TRUE when something was optimized. */
727 bool something_changed
;
730 /* Perform substitutions based on the known constant values. */
731 something_changed
= substitute_and_fold (const_val
, ccp_fold_stmt
);
735 return something_changed
;;
739 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
742 any M UNDEFINED = any
743 any M VARYING = VARYING
744 Ci M Cj = Ci if (i == j)
745 Ci M Cj = VARYING if (i != j)
749 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
751 if (val1
->lattice_val
== UNDEFINED
)
753 /* UNDEFINED M any = any */
756 else if (val2
->lattice_val
== UNDEFINED
)
758 /* any M UNDEFINED = any
759 Nothing to do. VAL1 already contains the value we want. */
762 else if (val1
->lattice_val
== VARYING
763 || val2
->lattice_val
== VARYING
)
765 /* any M VARYING = VARYING. */
766 val1
->lattice_val
= VARYING
;
767 val1
->value
= NULL_TREE
;
769 else if (val1
->lattice_val
== CONSTANT
770 && val2
->lattice_val
== CONSTANT
771 && simple_cst_equal (val1
->value
, val2
->value
) == 1)
773 /* Ci M Cj = Ci if (i == j)
774 Ci M Cj = VARYING if (i != j)
776 If these two values come from memory stores, make sure that
777 they come from the same memory reference. */
778 val1
->lattice_val
= CONSTANT
;
779 val1
->value
= val1
->value
;
783 /* Any other combination is VARYING. */
784 val1
->lattice_val
= VARYING
;
785 val1
->value
= NULL_TREE
;
790 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
791 lattice values to determine PHI_NODE's lattice value. The value of a
792 PHI node is determined calling ccp_lattice_meet with all the arguments
793 of the PHI node that are incoming via executable edges. */
795 static enum ssa_prop_result
796 ccp_visit_phi_node (gimple phi
)
799 prop_value_t
*old_val
, new_val
;
801 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
803 fprintf (dump_file
, "\nVisiting PHI node: ");
804 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
807 old_val
= get_value (gimple_phi_result (phi
));
808 switch (old_val
->lattice_val
)
811 return SSA_PROP_VARYING
;
818 new_val
.lattice_val
= UNDEFINED
;
819 new_val
.value
= NULL_TREE
;
826 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
828 /* Compute the meet operator over all the PHI arguments flowing
829 through executable edges. */
830 edge e
= gimple_phi_arg_edge (phi
, i
);
832 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
835 "\n Argument #%d (%d -> %d %sexecutable)\n",
836 i
, e
->src
->index
, e
->dest
->index
,
837 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
840 /* If the incoming edge is executable, Compute the meet operator for
841 the existing value of the PHI node and the current PHI argument. */
842 if (e
->flags
& EDGE_EXECUTABLE
)
844 tree arg
= gimple_phi_arg (phi
, i
)->def
;
845 prop_value_t arg_val
;
847 if (is_gimple_min_invariant (arg
))
849 arg_val
.lattice_val
= CONSTANT
;
853 arg_val
= *(get_value (arg
));
855 ccp_lattice_meet (&new_val
, &arg_val
);
857 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
859 fprintf (dump_file
, "\t");
860 print_generic_expr (dump_file
, arg
, dump_flags
);
861 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
862 fprintf (dump_file
, "\n");
865 if (new_val
.lattice_val
== VARYING
)
870 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
872 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
873 fprintf (dump_file
, "\n\n");
876 /* Make the transition to the new value. */
877 if (set_lattice_value (gimple_phi_result (phi
), new_val
))
879 if (new_val
.lattice_val
== VARYING
)
880 return SSA_PROP_VARYING
;
882 return SSA_PROP_INTERESTING
;
885 return SSA_PROP_NOT_INTERESTING
;
888 /* Return true if we may propagate the address expression ADDR into the
889 dereference DEREF and cancel them. */
892 may_propagate_address_into_dereference (tree addr
, tree deref
)
894 gcc_assert (INDIRECT_REF_P (deref
)
895 && TREE_CODE (addr
) == ADDR_EXPR
);
897 /* Don't propagate if ADDR's operand has incomplete type. */
898 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
901 /* If the address is invariant then we do not need to preserve restrict
902 qualifications. But we do need to preserve volatile qualifiers until
903 we can annotate the folded dereference itself properly. */
904 if (is_gimple_min_invariant (addr
)
905 && (!TREE_THIS_VOLATILE (deref
)
906 || TYPE_VOLATILE (TREE_TYPE (addr
))))
907 return useless_type_conversion_p (TREE_TYPE (deref
),
908 TREE_TYPE (TREE_OPERAND (addr
, 0)));
910 /* Else both the address substitution and the folding must result in
911 a valid useless type conversion sequence. */
912 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
914 && useless_type_conversion_p (TREE_TYPE (deref
),
915 TREE_TYPE (TREE_OPERAND (addr
, 0))));
918 /* CCP specific front-end to the non-destructive constant folding
921 Attempt to simplify the RHS of STMT knowing that one or more
922 operands are constants.
924 If simplification is possible, return the simplified RHS,
925 otherwise return the original RHS or NULL_TREE. */
928 ccp_fold (gimple stmt
)
930 location_t loc
= gimple_location (stmt
);
931 switch (gimple_code (stmt
))
935 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
937 switch (get_gimple_rhs_class (subcode
))
939 case GIMPLE_SINGLE_RHS
:
941 tree rhs
= gimple_assign_rhs1 (stmt
);
942 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
944 if (TREE_CODE (rhs
) == SSA_NAME
)
946 /* If the RHS is an SSA_NAME, return its known constant value,
948 return get_value (rhs
)->value
;
950 /* Handle propagating invariant addresses into address operations.
951 The folding we do here matches that in tree-ssa-forwprop.c. */
952 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
955 base
= &TREE_OPERAND (rhs
, 0);
956 while (handled_component_p (*base
))
957 base
= &TREE_OPERAND (*base
, 0);
958 if (TREE_CODE (*base
) == INDIRECT_REF
959 && TREE_CODE (TREE_OPERAND (*base
, 0)) == SSA_NAME
)
961 prop_value_t
*val
= get_value (TREE_OPERAND (*base
, 0));
962 if (val
->lattice_val
== CONSTANT
963 && TREE_CODE (val
->value
) == ADDR_EXPR
964 && may_propagate_address_into_dereference
967 /* We need to return a new tree, not modify the IL
968 or share parts of it. So play some tricks to
969 avoid manually building it. */
970 tree ret
, save
= *base
;
971 *base
= TREE_OPERAND (val
->value
, 0);
972 ret
= unshare_expr (rhs
);
973 recompute_tree_invariant_for_addr_expr (ret
);
979 else if (TREE_CODE (rhs
) == CONSTRUCTOR
980 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
981 && (CONSTRUCTOR_NELTS (rhs
)
982 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
988 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
990 if (TREE_CODE (val
) == SSA_NAME
991 && get_value (val
)->lattice_val
== CONSTANT
)
992 val
= get_value (val
)->value
;
993 if (TREE_CODE (val
) == INTEGER_CST
994 || TREE_CODE (val
) == REAL_CST
995 || TREE_CODE (val
) == FIXED_CST
)
996 list
= tree_cons (NULL_TREE
, val
, list
);
1001 return build_vector (TREE_TYPE (rhs
), nreverse (list
));
1004 if (kind
== tcc_reference
)
1006 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
1007 || TREE_CODE (rhs
) == REALPART_EXPR
1008 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
1009 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
1011 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
1012 if (val
->lattice_val
== CONSTANT
)
1013 return fold_unary_loc (EXPR_LOCATION (rhs
),
1015 TREE_TYPE (rhs
), val
->value
);
1017 else if (TREE_CODE (rhs
) == INDIRECT_REF
1018 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
1020 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
1021 if (val
->lattice_val
== CONSTANT
1022 && TREE_CODE (val
->value
) == ADDR_EXPR
1023 && useless_type_conversion_p (TREE_TYPE (rhs
),
1024 TREE_TYPE (TREE_TYPE (val
->value
))))
1025 rhs
= TREE_OPERAND (val
->value
, 0);
1027 return fold_const_aggregate_ref (rhs
);
1029 else if (kind
== tcc_declaration
)
1030 return get_symbol_constant_value (rhs
);
1034 case GIMPLE_UNARY_RHS
:
1036 /* Handle unary operators that can appear in GIMPLE form.
1037 Note that we know the single operand must be a constant,
1038 so this should almost always return a simplified RHS. */
1039 tree lhs
= gimple_assign_lhs (stmt
);
1040 tree op0
= gimple_assign_rhs1 (stmt
);
1042 /* Simplify the operand down to a constant. */
1043 if (TREE_CODE (op0
) == SSA_NAME
)
1045 prop_value_t
*val
= get_value (op0
);
1046 if (val
->lattice_val
== CONSTANT
)
1047 op0
= get_value (op0
)->value
;
1050 /* Conversions are useless for CCP purposes if they are
1051 value-preserving. Thus the restrictions that
1052 useless_type_conversion_p places for pointer type conversions
1053 do not apply here. Substitution later will only substitute to
1055 if (CONVERT_EXPR_CODE_P (subcode
)
1056 && POINTER_TYPE_P (TREE_TYPE (lhs
))
1057 && POINTER_TYPE_P (TREE_TYPE (op0
))
1058 /* Do not allow differences in volatile qualification
1059 as this might get us confused as to whether a
1060 propagation destination statement is volatile
1061 or not. See PR36988. */
1062 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs
)))
1063 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0
)))))
1066 /* Still try to generate a constant of correct type. */
1067 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
1069 && ((tem
= maybe_fold_offset_to_address
1071 op0
, integer_zero_node
, TREE_TYPE (lhs
)))
1078 fold_unary_ignore_overflow_loc (loc
, subcode
,
1079 gimple_expr_type (stmt
), op0
);
1082 case GIMPLE_BINARY_RHS
:
1084 /* Handle binary operators that can appear in GIMPLE form. */
1085 tree op0
= gimple_assign_rhs1 (stmt
);
1086 tree op1
= gimple_assign_rhs2 (stmt
);
1088 /* Simplify the operands down to constants when appropriate. */
1089 if (TREE_CODE (op0
) == SSA_NAME
)
1091 prop_value_t
*val
= get_value (op0
);
1092 if (val
->lattice_val
== CONSTANT
)
1096 if (TREE_CODE (op1
) == SSA_NAME
)
1098 prop_value_t
*val
= get_value (op1
);
1099 if (val
->lattice_val
== CONSTANT
)
1103 /* Fold &foo + CST into an invariant reference if possible. */
1104 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1105 && TREE_CODE (op0
) == ADDR_EXPR
1106 && TREE_CODE (op1
) == INTEGER_CST
)
1108 tree tem
= maybe_fold_offset_to_address
1109 (loc
, op0
, op1
, TREE_TYPE (op0
));
1110 if (tem
!= NULL_TREE
)
1114 return fold_binary_loc (loc
, subcode
,
1115 gimple_expr_type (stmt
), op0
, op1
);
1126 tree fn
= gimple_call_fn (stmt
);
1129 if (TREE_CODE (fn
) == SSA_NAME
)
1131 val
= get_value (fn
);
1132 if (val
->lattice_val
== CONSTANT
)
1135 if (TREE_CODE (fn
) == ADDR_EXPR
1136 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1137 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
1139 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
1142 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1144 args
[i
] = gimple_call_arg (stmt
, i
);
1145 if (TREE_CODE (args
[i
]) == SSA_NAME
)
1147 val
= get_value (args
[i
]);
1148 if (val
->lattice_val
== CONSTANT
)
1149 args
[i
] = val
->value
;
1152 call
= build_call_array_loc (loc
,
1153 gimple_call_return_type (stmt
),
1154 fn
, gimple_call_num_args (stmt
), args
);
1155 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
1157 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1158 STRIP_NOPS (retval
);
1166 /* Handle comparison operators that can appear in GIMPLE form. */
1167 tree op0
= gimple_cond_lhs (stmt
);
1168 tree op1
= gimple_cond_rhs (stmt
);
1169 enum tree_code code
= gimple_cond_code (stmt
);
1171 /* Simplify the operands down to constants when appropriate. */
1172 if (TREE_CODE (op0
) == SSA_NAME
)
1174 prop_value_t
*val
= get_value (op0
);
1175 if (val
->lattice_val
== CONSTANT
)
1179 if (TREE_CODE (op1
) == SSA_NAME
)
1181 prop_value_t
*val
= get_value (op1
);
1182 if (val
->lattice_val
== CONSTANT
)
1186 return fold_binary_loc (loc
, code
, boolean_type_node
, op0
, op1
);
1191 tree rhs
= gimple_switch_index (stmt
);
1193 if (TREE_CODE (rhs
) == SSA_NAME
)
1195 /* If the RHS is an SSA_NAME, return its known constant value,
1197 return get_value (rhs
)->value
;
1209 /* Return the tree representing the element referenced by T if T is an
1210 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1211 NULL_TREE otherwise. */
1214 fold_const_aggregate_ref (tree t
)
1216 prop_value_t
*value
;
1217 tree base
, ctor
, idx
, field
;
1218 unsigned HOST_WIDE_INT cnt
;
1221 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
1222 return get_symbol_constant_value (t
);
1224 switch (TREE_CODE (t
))
1227 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1228 DECL_INITIAL. If BASE is a nested reference into another
1229 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1230 the inner reference. */
1231 base
= TREE_OPERAND (t
, 0);
1232 switch (TREE_CODE (base
))
1235 if (!TREE_READONLY (base
)
1236 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
1237 || !targetm
.binds_local_p (base
))
1240 ctor
= DECL_INITIAL (base
);
1245 ctor
= fold_const_aggregate_ref (base
);
1257 if (ctor
== NULL_TREE
1258 || (TREE_CODE (ctor
) != CONSTRUCTOR
1259 && TREE_CODE (ctor
) != STRING_CST
)
1260 || !TREE_STATIC (ctor
))
1263 /* Get the index. If we have an SSA_NAME, try to resolve it
1264 with the current lattice value for the SSA_NAME. */
1265 idx
= TREE_OPERAND (t
, 1);
1266 switch (TREE_CODE (idx
))
1269 if ((value
= get_value (idx
))
1270 && value
->lattice_val
== CONSTANT
1271 && TREE_CODE (value
->value
) == INTEGER_CST
)
1284 /* Fold read from constant string. */
1285 if (TREE_CODE (ctor
) == STRING_CST
)
1287 if ((TYPE_MODE (TREE_TYPE (t
))
1288 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1289 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1291 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
1292 && compare_tree_int (idx
, TREE_STRING_LENGTH (ctor
)) < 0)
1293 return build_int_cst_type (TREE_TYPE (t
),
1294 (TREE_STRING_POINTER (ctor
)
1295 [TREE_INT_CST_LOW (idx
)]));
1299 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1300 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1301 if (tree_int_cst_equal (cfield
, idx
))
1304 if (TREE_CODE (cval
) == ADDR_EXPR
)
1306 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1307 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1308 add_referenced_var (base
);
1315 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1316 DECL_INITIAL. If BASE is a nested reference into another
1317 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1318 the inner reference. */
1319 base
= TREE_OPERAND (t
, 0);
1320 switch (TREE_CODE (base
))
1323 if (!TREE_READONLY (base
)
1324 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1325 || !targetm
.binds_local_p (base
))
1328 ctor
= DECL_INITIAL (base
);
1333 ctor
= fold_const_aggregate_ref (base
);
1340 if (ctor
== NULL_TREE
1341 || TREE_CODE (ctor
) != CONSTRUCTOR
1342 || !TREE_STATIC (ctor
))
1345 field
= TREE_OPERAND (t
, 1);
1347 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1349 /* FIXME: Handle bit-fields. */
1350 && ! DECL_BIT_FIELD (cfield
))
1353 if (TREE_CODE (cval
) == ADDR_EXPR
)
1355 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1356 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1357 add_referenced_var (base
);
1366 tree c
= fold_const_aggregate_ref (TREE_OPERAND (t
, 0));
1367 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
1368 return fold_build1_loc (EXPR_LOCATION (t
),
1369 TREE_CODE (t
), TREE_TYPE (t
), c
);
1375 tree base
= TREE_OPERAND (t
, 0);
1376 if (TREE_CODE (base
) == SSA_NAME
1377 && (value
= get_value (base
))
1378 && value
->lattice_val
== CONSTANT
1379 && TREE_CODE (value
->value
) == ADDR_EXPR
1380 && useless_type_conversion_p (TREE_TYPE (t
),
1381 TREE_TYPE (TREE_TYPE (value
->value
))))
1382 return fold_const_aggregate_ref (TREE_OPERAND (value
->value
, 0));
1393 /* Evaluate statement STMT.
1394 Valid only for assignments, calls, conditionals, and switches. */
1397 evaluate_stmt (gimple stmt
)
1400 tree simplified
= NULL_TREE
;
1401 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1404 fold_defer_overflow_warnings ();
1406 /* If the statement is likely to have a CONSTANT result, then try
1407 to fold the statement to determine the constant value. */
1408 /* FIXME. This is the only place that we call ccp_fold.
1409 Since likely_value never returns CONSTANT for calls, we will
1410 not attempt to fold them, including builtins that may profit. */
1411 if (likelyvalue
== CONSTANT
)
1412 simplified
= ccp_fold (stmt
);
1413 /* If the statement is likely to have a VARYING result, then do not
1414 bother folding the statement. */
1415 else if (likelyvalue
== VARYING
)
1417 enum gimple_code code
= gimple_code (stmt
);
1418 if (code
== GIMPLE_ASSIGN
)
1420 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1422 /* Other cases cannot satisfy is_gimple_min_invariant
1424 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1425 simplified
= gimple_assign_rhs1 (stmt
);
1427 else if (code
== GIMPLE_SWITCH
)
1428 simplified
= gimple_switch_index (stmt
);
1430 /* These cannot satisfy is_gimple_min_invariant without folding. */
1431 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1434 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1436 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1438 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1440 fprintf (dump_file
, "which is likely ");
1441 switch (likelyvalue
)
1444 fprintf (dump_file
, "CONSTANT");
1447 fprintf (dump_file
, "UNDEFINED");
1450 fprintf (dump_file
, "VARYING");
1454 fprintf (dump_file
, "\n");
1459 /* The statement produced a constant value. */
1460 val
.lattice_val
= CONSTANT
;
1461 val
.value
= simplified
;
1465 /* The statement produced a nonconstant value. If the statement
1466 had UNDEFINED operands, then the result of the statement
1467 should be UNDEFINED. Otherwise, the statement is VARYING. */
1468 if (likelyvalue
== UNDEFINED
)
1469 val
.lattice_val
= likelyvalue
;
1471 val
.lattice_val
= VARYING
;
1473 val
.value
= NULL_TREE
;
1479 /* Fold the stmt at *GSI with CCP specific information that propagating
1480 and regular folding does not catch. */
1483 ccp_fold_stmt (gimple_stmt_iterator
*gsi
)
1485 gimple stmt
= gsi_stmt (*gsi
);
1487 switch (gimple_code (stmt
))
1492 /* Statement evaluation will handle type mismatches in constants
1493 more gracefully than the final propagation. This allows us to
1494 fold more conditionals here. */
1495 val
= evaluate_stmt (stmt
);
1496 if (val
.lattice_val
!= CONSTANT
1497 || TREE_CODE (val
.value
) != INTEGER_CST
)
1500 if (integer_zerop (val
.value
))
1501 gimple_cond_make_false (stmt
);
1503 gimple_cond_make_true (stmt
);
1510 tree lhs
= gimple_call_lhs (stmt
);
1513 bool changed
= false;
1516 /* If the call was folded into a constant make sure it goes
1517 away even if we cannot propagate into all uses because of
1520 && TREE_CODE (lhs
) == SSA_NAME
1521 && (val
= get_value (lhs
))
1522 && val
->lattice_val
== CONSTANT
)
1524 tree new_rhs
= unshare_expr (val
->value
);
1526 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
1527 TREE_TYPE (new_rhs
)))
1528 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1529 res
= update_call_from_tree (gsi
, new_rhs
);
1534 /* Propagate into the call arguments. Compared to replace_uses_in
1535 this can use the argument slot types for type verification
1536 instead of the current argument type. We also can safely
1537 drop qualifiers here as we are dealing with constants anyway. */
1538 argt
= TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt
))));
1539 for (i
= 0; i
< gimple_call_num_args (stmt
) && argt
;
1540 ++i
, argt
= TREE_CHAIN (argt
))
1542 tree arg
= gimple_call_arg (stmt
, i
);
1543 if (TREE_CODE (arg
) == SSA_NAME
1544 && (val
= get_value (arg
))
1545 && val
->lattice_val
== CONSTANT
1546 && useless_type_conversion_p
1547 (TYPE_MAIN_VARIANT (TREE_VALUE (argt
)),
1548 TYPE_MAIN_VARIANT (TREE_TYPE (val
->value
))))
1550 gimple_call_set_arg (stmt
, i
, unshare_expr (val
->value
));
1560 tree lhs
= gimple_assign_lhs (stmt
);
1563 /* If we have a load that turned out to be constant replace it
1564 as we cannot propagate into all uses in all cases. */
1565 if (gimple_assign_single_p (stmt
)
1566 && TREE_CODE (lhs
) == SSA_NAME
1567 && (val
= get_value (lhs
))
1568 && val
->lattice_val
== CONSTANT
)
1570 tree rhs
= unshare_expr (val
->value
);
1571 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
1572 rhs
= fold_convert (TREE_TYPE (lhs
), rhs
);
1573 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
1585 /* Visit the assignment statement STMT. Set the value of its LHS to the
1586 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1587 creates virtual definitions, set the value of each new name to that
1588 of the RHS (if we can derive a constant out of the RHS).
1589 Value-returning call statements also perform an assignment, and
1590 are handled here. */
1592 static enum ssa_prop_result
1593 visit_assignment (gimple stmt
, tree
*output_p
)
1596 enum ssa_prop_result retval
;
1598 tree lhs
= gimple_get_lhs (stmt
);
1600 gcc_assert (gimple_code (stmt
) != GIMPLE_CALL
1601 || gimple_call_lhs (stmt
) != NULL_TREE
);
1603 if (gimple_assign_copy_p (stmt
))
1605 tree rhs
= gimple_assign_rhs1 (stmt
);
1607 if (TREE_CODE (rhs
) == SSA_NAME
)
1609 /* For a simple copy operation, we copy the lattice values. */
1610 prop_value_t
*nval
= get_value (rhs
);
1614 val
= evaluate_stmt (stmt
);
1617 /* Evaluate the statement, which could be
1618 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1619 val
= evaluate_stmt (stmt
);
1621 retval
= SSA_PROP_NOT_INTERESTING
;
1623 /* Set the lattice value of the statement's output. */
1624 if (TREE_CODE (lhs
) == SSA_NAME
)
1626 /* If STMT is an assignment to an SSA_NAME, we only have one
1628 if (set_lattice_value (lhs
, val
))
1631 if (val
.lattice_val
== VARYING
)
1632 retval
= SSA_PROP_VARYING
;
1634 retval
= SSA_PROP_INTERESTING
;
1642 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1643 if it can determine which edge will be taken. Otherwise, return
1644 SSA_PROP_VARYING. */
1646 static enum ssa_prop_result
1647 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
1652 block
= gimple_bb (stmt
);
1653 val
= evaluate_stmt (stmt
);
1655 /* Find which edge out of the conditional block will be taken and add it
1656 to the worklist. If no single edge can be determined statically,
1657 return SSA_PROP_VARYING to feed all the outgoing edges to the
1658 propagation engine. */
1659 *taken_edge_p
= val
.value
? find_taken_edge (block
, val
.value
) : 0;
1661 return SSA_PROP_INTERESTING
;
1663 return SSA_PROP_VARYING
;
1667 /* Evaluate statement STMT. If the statement produces an output value and
1668 its evaluation changes the lattice value of its output, return
1669 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1672 If STMT is a conditional branch and we can determine its truth
1673 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1674 value, return SSA_PROP_VARYING. */
1676 static enum ssa_prop_result
1677 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
1682 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1684 fprintf (dump_file
, "\nVisiting statement:\n");
1685 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1688 switch (gimple_code (stmt
))
1691 /* If the statement is an assignment that produces a single
1692 output value, evaluate its RHS to see if the lattice value of
1693 its output has changed. */
1694 return visit_assignment (stmt
, output_p
);
1697 /* A value-returning call also performs an assignment. */
1698 if (gimple_call_lhs (stmt
) != NULL_TREE
)
1699 return visit_assignment (stmt
, output_p
);
1704 /* If STMT is a conditional branch, see if we can determine
1705 which branch will be taken. */
1706 /* FIXME. It appears that we should be able to optimize
1707 computed GOTOs here as well. */
1708 return visit_cond_stmt (stmt
, taken_edge_p
);
1714 /* Any other kind of statement is not interesting for constant
1715 propagation and, therefore, not worth simulating. */
1716 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1717 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1719 /* Definitions made by statements other than assignments to
1720 SSA_NAMEs represent unknown modifications to their outputs.
1721 Mark them VARYING. */
1722 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1724 prop_value_t v
= { VARYING
, NULL_TREE
};
1725 set_lattice_value (def
, v
);
1728 return SSA_PROP_VARYING
;
1732 /* Main entry point for SSA Conditional Constant Propagation. */
1738 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1739 if (ccp_finalize ())
1740 return (TODO_cleanup_cfg
| TODO_update_ssa
| TODO_remove_unused_locals
);
1749 return flag_tree_ccp
!= 0;
1753 struct gimple_opt_pass pass_ccp
=
1758 gate_ccp
, /* gate */
1759 do_ssa_ccp
, /* execute */
1762 0, /* static_pass_number */
1763 TV_TREE_CCP
, /* tv_id */
1764 PROP_cfg
| PROP_ssa
, /* properties_required */
1765 0, /* properties_provided */
1766 0, /* properties_destroyed */
1767 0, /* todo_flags_start */
1768 TODO_dump_func
| TODO_verify_ssa
1769 | TODO_verify_stmts
| TODO_ggc_collect
/* todo_flags_finish */
1774 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
1775 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1776 is the desired result type.
1778 LOC is the location of the original expression. */
1781 maybe_fold_offset_to_array_ref (location_t loc
, tree base
, tree offset
,
1783 bool allow_negative_idx
)
1785 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
1786 tree array_type
, elt_type
, elt_size
;
1789 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1790 measured in units of the size of elements type) from that ARRAY_REF).
1791 We can't do anything if either is variable.
1793 The case we handle here is *(&A[N]+O). */
1794 if (TREE_CODE (base
) == ARRAY_REF
)
1796 tree low_bound
= array_ref_low_bound (base
);
1798 elt_offset
= TREE_OPERAND (base
, 1);
1799 if (TREE_CODE (low_bound
) != INTEGER_CST
1800 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1803 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1804 base
= TREE_OPERAND (base
, 0);
1807 /* Ignore stupid user tricks of indexing non-array variables. */
1808 array_type
= TREE_TYPE (base
);
1809 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1811 elt_type
= TREE_TYPE (array_type
);
1812 if (!useless_type_conversion_p (orig_type
, elt_type
))
1815 /* Use signed size type for intermediate computation on the index. */
1816 idx_type
= signed_type_for (size_type_node
);
1818 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1819 element type (so we can use the alignment if it's not constant).
1820 Otherwise, compute the offset as an index by using a division. If the
1821 division isn't exact, then don't do anything. */
1822 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1825 if (integer_zerop (offset
))
1827 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1828 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1830 idx
= build_int_cst (idx_type
, 0);
1834 unsigned HOST_WIDE_INT lquo
, lrem
;
1835 HOST_WIDE_INT hquo
, hrem
;
1838 /* The final array offset should be signed, so we need
1839 to sign-extend the (possibly pointer) offset here
1840 and use signed division. */
1841 soffset
= double_int_sext (tree_to_double_int (offset
),
1842 TYPE_PRECISION (TREE_TYPE (offset
)));
1843 if (TREE_CODE (elt_size
) != INTEGER_CST
1844 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
1845 soffset
.low
, soffset
.high
,
1846 TREE_INT_CST_LOW (elt_size
),
1847 TREE_INT_CST_HIGH (elt_size
),
1848 &lquo
, &hquo
, &lrem
, &hrem
)
1852 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
1855 /* Assume the low bound is zero. If there is a domain type, get the
1856 low bound, if any, convert the index into that type, and add the
1858 min_idx
= build_int_cst (idx_type
, 0);
1859 domain_type
= TYPE_DOMAIN (array_type
);
1862 idx_type
= domain_type
;
1863 if (TYPE_MIN_VALUE (idx_type
))
1864 min_idx
= TYPE_MIN_VALUE (idx_type
);
1866 min_idx
= fold_convert (idx_type
, min_idx
);
1868 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1871 elt_offset
= fold_convert (idx_type
, elt_offset
);
1874 if (!integer_zerop (min_idx
))
1875 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1876 if (!integer_zerop (elt_offset
))
1877 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1879 /* Make sure to possibly truncate late after offsetting. */
1880 idx
= fold_convert (idx_type
, idx
);
1882 /* We don't want to construct access past array bounds. For example
1885 should not be simplified into (*c)[14] or tree-vrp will
1886 give false warnings. The same is true for
1887 struct A { long x; char d[0]; } *a;
1889 which should be not folded to &a->d[-8]. */
1891 && TYPE_MAX_VALUE (domain_type
)
1892 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
)
1894 tree up_bound
= TYPE_MAX_VALUE (domain_type
);
1896 if (tree_int_cst_lt (up_bound
, idx
)
1897 /* Accesses after the end of arrays of size 0 (gcc
1898 extension) and 1 are likely intentional ("struct
1900 && compare_tree_int (up_bound
, 1) > 0)
1904 && TYPE_MIN_VALUE (domain_type
))
1906 if (!allow_negative_idx
1907 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
1908 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
1911 else if (!allow_negative_idx
1912 && compare_tree_int (idx
, 0) < 0)
1916 tree t
= build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
1917 SET_EXPR_LOCATION (t
, loc
);
1923 /* Attempt to fold *(S+O) to S.X.
1924 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1925 is the desired result type.
1927 LOC is the location of the original expression. */
1930 maybe_fold_offset_to_component_ref (location_t loc
, tree record_type
,
1931 tree base
, tree offset
, tree orig_type
)
1933 tree f
, t
, field_type
, tail_array_field
, field_offset
;
1937 if (TREE_CODE (record_type
) != RECORD_TYPE
1938 && TREE_CODE (record_type
) != UNION_TYPE
1939 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1942 /* Short-circuit silly cases. */
1943 if (useless_type_conversion_p (record_type
, orig_type
))
1946 tail_array_field
= NULL_TREE
;
1947 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1951 if (TREE_CODE (f
) != FIELD_DECL
)
1953 if (DECL_BIT_FIELD (f
))
1956 if (!DECL_FIELD_OFFSET (f
))
1958 field_offset
= byte_position (f
);
1959 if (TREE_CODE (field_offset
) != INTEGER_CST
)
1962 /* ??? Java creates "interesting" fields for representing base classes.
1963 They have no name, and have no context. With no context, we get into
1964 trouble with nonoverlapping_component_refs_p. Skip them. */
1965 if (!DECL_FIELD_CONTEXT (f
))
1968 /* The previous array field isn't at the end. */
1969 tail_array_field
= NULL_TREE
;
1971 /* Check to see if this offset overlaps with the field. */
1972 cmp
= tree_int_cst_compare (field_offset
, offset
);
1976 field_type
= TREE_TYPE (f
);
1978 /* Here we exactly match the offset being checked. If the types match,
1979 then we can return that field. */
1981 && useless_type_conversion_p (orig_type
, field_type
))
1983 t
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1987 /* Don't care about offsets into the middle of scalars. */
1988 if (!AGGREGATE_TYPE_P (field_type
))
1991 /* Check for array at the end of the struct. This is often
1992 used as for flexible array members. We should be able to
1993 turn this into an array access anyway. */
1994 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1995 tail_array_field
= f
;
1997 /* Check the end of the field against the offset. */
1998 if (!DECL_SIZE_UNIT (f
)
1999 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
2001 t
= int_const_binop (MINUS_EXPR
, offset
, field_offset
, 1);
2002 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
2005 /* If we matched, then set offset to the displacement into
2007 new_base
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
2008 SET_EXPR_LOCATION (new_base
, loc
);
2010 /* Recurse to possibly find the match. */
2011 ret
= maybe_fold_offset_to_array_ref (loc
, new_base
, t
, orig_type
,
2012 f
== TYPE_FIELDS (record_type
));
2015 ret
= maybe_fold_offset_to_component_ref (loc
, field_type
, new_base
, t
,
2021 if (!tail_array_field
)
2024 f
= tail_array_field
;
2025 field_type
= TREE_TYPE (f
);
2026 offset
= int_const_binop (MINUS_EXPR
, offset
, byte_position (f
), 1);
2028 /* If we get here, we've got an aggregate field, and a possibly
2029 nonzero offset into them. Recurse and hope for a valid match. */
2030 base
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
2031 SET_EXPR_LOCATION (base
, loc
);
2033 t
= maybe_fold_offset_to_array_ref (loc
, base
, offset
, orig_type
,
2034 f
== TYPE_FIELDS (record_type
));
2037 return maybe_fold_offset_to_component_ref (loc
, field_type
, base
, offset
,
2041 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
2042 or BASE[index] or by combination of those.
2044 LOC is the location of original expression.
2046 Before attempting the conversion strip off existing ADDR_EXPRs and
2047 handled component refs. */
2050 maybe_fold_offset_to_reference (location_t loc
, tree base
, tree offset
,
2057 if (TREE_CODE (base
) != ADDR_EXPR
)
2060 base
= TREE_OPERAND (base
, 0);
2062 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
2063 so it needs to be removed and new COMPONENT_REF constructed.
2064 The wrong COMPONENT_REF are often constructed by folding the
2065 (type *)&object within the expression (type *)&object+offset */
2066 if (handled_component_p (base
))
2068 HOST_WIDE_INT sub_offset
, size
, maxsize
;
2070 newbase
= get_ref_base_and_extent (base
, &sub_offset
,
2072 gcc_assert (newbase
);
2075 && !(sub_offset
& (BITS_PER_UNIT
- 1)))
2079 offset
= int_const_binop (PLUS_EXPR
, offset
,
2080 build_int_cst (TREE_TYPE (offset
),
2081 sub_offset
/ BITS_PER_UNIT
), 1);
2084 if (useless_type_conversion_p (orig_type
, TREE_TYPE (base
))
2085 && integer_zerop (offset
))
2087 type
= TREE_TYPE (base
);
2089 ret
= maybe_fold_offset_to_component_ref (loc
, type
, base
, offset
, orig_type
);
2091 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
, orig_type
, true);
2096 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
2097 or &BASE[index] or by combination of those.
2099 LOC is the location of the original expression.
2101 Before attempting the conversion strip off existing component refs. */
2104 maybe_fold_offset_to_address (location_t loc
, tree addr
, tree offset
,
2109 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr
))
2110 && POINTER_TYPE_P (orig_type
));
2112 t
= maybe_fold_offset_to_reference (loc
, addr
, offset
,
2113 TREE_TYPE (orig_type
));
2119 /* For __builtin_object_size to function correctly we need to
2120 make sure not to fold address arithmetic so that we change
2121 reference from one array to another. This would happen for
2124 struct X { char s1[10]; char s2[10] } s;
2125 char *foo (void) { return &s.s2[-4]; }
2127 where we need to avoid generating &s.s1[6]. As the C and
2128 C++ frontends create different initial trees
2129 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
2130 sophisticated comparisons here. Note that checking for the
2131 condition after the fact is easier than trying to avoid doing
2134 if (TREE_CODE (orig
) == ADDR_EXPR
)
2135 orig
= TREE_OPERAND (orig
, 0);
2136 if ((TREE_CODE (orig
) == ARRAY_REF
2137 || (TREE_CODE (orig
) == COMPONENT_REF
2138 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig
, 1))) == ARRAY_TYPE
))
2139 && (TREE_CODE (t
) == ARRAY_REF
2140 || TREE_CODE (t
) == COMPONENT_REF
)
2141 && !operand_equal_p (TREE_CODE (orig
) == ARRAY_REF
2142 ? TREE_OPERAND (orig
, 0) : orig
,
2143 TREE_CODE (t
) == ARRAY_REF
2144 ? TREE_OPERAND (t
, 0) : t
, 0))
2147 ptr_type
= build_pointer_type (TREE_TYPE (t
));
2148 if (!useless_type_conversion_p (orig_type
, ptr_type
))
2150 return build_fold_addr_expr_with_type_loc (loc
, t
, ptr_type
);
2156 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
2157 Return the simplified expression, or NULL if nothing could be done. */
2160 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
2163 bool volatile_p
= TREE_THIS_VOLATILE (expr
);
2164 location_t loc
= EXPR_LOCATION (expr
);
2166 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2167 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2168 are sometimes added. */
2170 STRIP_TYPE_NOPS (base
);
2171 TREE_OPERAND (expr
, 0) = base
;
2173 /* One possibility is that the address reduces to a string constant. */
2174 t
= fold_read_from_constant_string (expr
);
2178 /* Add in any offset from a POINTER_PLUS_EXPR. */
2179 if (TREE_CODE (base
) == POINTER_PLUS_EXPR
)
2183 offset2
= TREE_OPERAND (base
, 1);
2184 if (TREE_CODE (offset2
) != INTEGER_CST
)
2186 base
= TREE_OPERAND (base
, 0);
2188 offset
= fold_convert (sizetype
,
2189 int_const_binop (PLUS_EXPR
, offset
, offset2
, 1));
2192 if (TREE_CODE (base
) == ADDR_EXPR
)
2194 tree base_addr
= base
;
2196 /* Strip the ADDR_EXPR. */
2197 base
= TREE_OPERAND (base
, 0);
2199 /* Fold away CONST_DECL to its value, if the type is scalar. */
2200 if (TREE_CODE (base
) == CONST_DECL
2201 && is_gimple_min_invariant (DECL_INITIAL (base
)))
2202 return DECL_INITIAL (base
);
2204 /* If there is no offset involved simply return the folded base. */
2205 if (integer_zerop (offset
))
2208 /* Try folding *(&B+O) to B.X. */
2209 t
= maybe_fold_offset_to_reference (loc
, base_addr
, offset
,
2213 /* Preserve volatileness of the original expression.
2214 We can end up with a plain decl here which is shared
2215 and we shouldn't mess with its flags. */
2217 TREE_THIS_VOLATILE (t
) = volatile_p
;
2223 /* We can get here for out-of-range string constant accesses,
2224 such as "_"[3]. Bail out of the entire substitution search
2225 and arrange for the entire statement to be replaced by a
2226 call to __builtin_trap. In all likelihood this will all be
2227 constant-folded away, but in the meantime we can't leave with
2228 something that get_expr_operands can't understand. */
2232 if (TREE_CODE (t
) == ADDR_EXPR
2233 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
2235 /* FIXME: Except that this causes problems elsewhere with dead
2236 code not being deleted, and we die in the rtl expanders
2237 because we failed to remove some ssa_name. In the meantime,
2238 just return zero. */
2239 /* FIXME2: This condition should be signaled by
2240 fold_read_from_constant_string directly, rather than
2241 re-checking for it here. */
2242 return integer_zero_node
;
2245 /* Try folding *(B+O) to B->X. Still an improvement. */
2246 if (POINTER_TYPE_P (TREE_TYPE (base
)))
2248 t
= maybe_fold_offset_to_reference (loc
, base
, offset
,
2255 /* Otherwise we had an offset that we could not simplify. */
2260 /* A quaint feature extant in our address arithmetic is that there
2261 can be hidden type changes here. The type of the result need
2262 not be the same as the type of the input pointer.
2264 What we're after here is an expression of the form
2265 (T *)(&array + const)
2266 where array is OP0, const is OP1, RES_TYPE is T and
2267 the cast doesn't actually exist, but is implicit in the
2268 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2270 which may be able to propagate further. */
2273 maybe_fold_stmt_addition (location_t loc
, tree res_type
, tree op0
, tree op1
)
2278 /* The first operand should be an ADDR_EXPR. */
2279 if (TREE_CODE (op0
) != ADDR_EXPR
)
2281 op0
= TREE_OPERAND (op0
, 0);
2283 /* It had better be a constant. */
2284 if (TREE_CODE (op1
) != INTEGER_CST
)
2286 /* Or op0 should now be A[0] and the non-constant offset defined
2287 via a multiplication by the array element size. */
2288 if (TREE_CODE (op0
) == ARRAY_REF
2289 && integer_zerop (TREE_OPERAND (op0
, 1))
2290 && TREE_CODE (op1
) == SSA_NAME
2291 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0
)), 1))
2293 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
2294 if (!is_gimple_assign (offset_def
))
2297 /* As we will end up creating a variable index array access
2298 in the outermost array dimension make sure there isn't
2299 a more inner array that the index could overflow to. */
2300 if (TREE_CODE (TREE_OPERAND (op0
, 0)) == ARRAY_REF
)
2303 /* Do not build array references of something that we can't
2304 see the true number of array dimensions for. */
2305 if (!DECL_P (TREE_OPERAND (op0
, 0))
2306 && !handled_component_p (TREE_OPERAND (op0
, 0)))
2309 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
2310 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
2311 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
),
2312 TYPE_SIZE_UNIT (TREE_TYPE (op0
))))
2313 return build_fold_addr_expr
2314 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
2315 TREE_OPERAND (op0
, 0),
2316 gimple_assign_rhs1 (offset_def
),
2317 TREE_OPERAND (op0
, 2),
2318 TREE_OPERAND (op0
, 3)));
2319 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0
)))
2320 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
2321 return build_fold_addr_expr
2322 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
2323 TREE_OPERAND (op0
, 0),
2325 TREE_OPERAND (op0
, 2),
2326 TREE_OPERAND (op0
, 3)));
2331 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2332 the offset into it. */
2333 while (TREE_CODE (op0
) == ARRAY_REF
)
2335 tree array_obj
= TREE_OPERAND (op0
, 0);
2336 tree array_idx
= TREE_OPERAND (op0
, 1);
2337 tree elt_type
= TREE_TYPE (op0
);
2338 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
2341 if (TREE_CODE (array_idx
) != INTEGER_CST
)
2343 if (TREE_CODE (elt_size
) != INTEGER_CST
)
2346 /* Un-bias the index by the min index of the array type. */
2347 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
2350 min_idx
= TYPE_MIN_VALUE (min_idx
);
2353 if (TREE_CODE (min_idx
) != INTEGER_CST
)
2356 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
2357 if (!integer_zerop (min_idx
))
2358 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
2363 /* Convert the index to a byte offset. */
2364 array_idx
= fold_convert (sizetype
, array_idx
);
2365 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
2367 /* Update the operands for the next round, or for folding. */
2368 op1
= int_const_binop (PLUS_EXPR
,
2373 ptd_type
= TREE_TYPE (res_type
);
2374 /* If we want a pointer to void, reconstruct the reference from the
2375 array element type. A pointer to that can be trivially converted
2376 to void *. This happens as we fold (void *)(ptr p+ off). */
2377 if (VOID_TYPE_P (ptd_type
)
2378 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
2379 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
2381 /* At which point we can try some of the same things as for indirects. */
2382 t
= maybe_fold_offset_to_array_ref (loc
, op0
, op1
, ptd_type
, true);
2384 t
= maybe_fold_offset_to_component_ref (loc
, TREE_TYPE (op0
), op0
, op1
,
2388 t
= build1 (ADDR_EXPR
, res_type
, t
);
2389 SET_EXPR_LOCATION (t
, loc
);
2395 /* Subroutine of fold_stmt. We perform several simplifications of the
2396 memory reference tree EXPR and make sure to re-gimplify them properly
2397 after propagation of constant addresses. IS_LHS is true if the
2398 reference is supposed to be an lvalue. */
2401 maybe_fold_reference (tree expr
, bool is_lhs
)
2405 if (TREE_CODE (expr
) == ARRAY_REF
2408 tree tem
= fold_read_from_constant_string (expr
);
2413 /* ??? We might want to open-code the relevant remaining cases
2414 to avoid using the generic fold. */
2415 if (handled_component_p (*t
)
2416 && CONSTANT_CLASS_P (TREE_OPERAND (*t
, 0)))
2418 tree tem
= fold (*t
);
2423 while (handled_component_p (*t
))
2424 t
= &TREE_OPERAND (*t
, 0);
2426 if (TREE_CODE (*t
) == INDIRECT_REF
)
2428 tree tem
= maybe_fold_stmt_indirect (*t
, TREE_OPERAND (*t
, 0),
2430 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2431 if (is_lhs
&& tem
&& CONSTANT_CLASS_P (tem
))
2434 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
)
2435 /* If we had a good reason for propagating the address here,
2436 make sure we end up with valid gimple. See PR34989. */
2437 tem
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2442 tem
= maybe_fold_reference (expr
, is_lhs
);
2451 tree tem
= get_symbol_constant_value (*t
);
2453 && useless_type_conversion_p (TREE_TYPE (*t
), TREE_TYPE (tem
)))
2455 *t
= unshare_expr (tem
);
2456 tem
= maybe_fold_reference (expr
, is_lhs
);
2467 /* Return the string length, maximum string length or maximum value of
2469 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2470 is not NULL and, for TYPE == 0, its value is not equal to the length
2471 we determine or if we are unable to determine the length or value,
2472 return false. VISITED is a bitmap of visited variables.
2473 TYPE is 0 if string length should be returned, 1 for maximum string
2474 length and 2 for maximum value ARG can have. */
2477 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
2482 if (TREE_CODE (arg
) != SSA_NAME
)
2484 if (TREE_CODE (arg
) == COND_EXPR
)
2485 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
2486 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
2487 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2488 else if (TREE_CODE (arg
) == ADDR_EXPR
2489 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
2490 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
2492 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
2493 if (TREE_CODE (aop0
) == INDIRECT_REF
2494 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
2495 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
2496 length
, visited
, type
);
2502 if (TREE_CODE (val
) != INTEGER_CST
2503 || tree_int_cst_sgn (val
) < 0)
2507 val
= c_strlen (arg
, 1);
2515 if (TREE_CODE (*length
) != INTEGER_CST
2516 || TREE_CODE (val
) != INTEGER_CST
)
2519 if (tree_int_cst_lt (*length
, val
))
2523 else if (simple_cst_equal (val
, *length
) != 1)
2531 /* If we were already here, break the infinite cycle. */
2532 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
2534 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
2537 def_stmt
= SSA_NAME_DEF_STMT (var
);
2539 switch (gimple_code (def_stmt
))
2542 /* The RHS of the statement defining VAR must either have a
2543 constant length or come from another SSA_NAME with a constant
2545 if (gimple_assign_single_p (def_stmt
)
2546 || gimple_assign_unary_nop_p (def_stmt
))
2548 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2549 return get_maxval_strlen (rhs
, length
, visited
, type
);
2555 /* All the arguments of the PHI node must have the same constant
2559 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
2561 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
2563 /* If this PHI has itself as an argument, we cannot
2564 determine the string length of this argument. However,
2565 if we can find a constant string length for the other
2566 PHI args then we can still be sure that this is a
2567 constant string length. So be optimistic and just
2568 continue with the next argument. */
2569 if (arg
== gimple_phi_result (def_stmt
))
2572 if (!get_maxval_strlen (arg
, length
, visited
, type
))
2584 /* Fold builtin call in statement STMT. Returns a simplified tree.
2585 We may return a non-constant expression, including another call
2586 to a different function and with different arguments, e.g.,
2587 substituting memcpy for strcpy when the string length is known.
2588 Note that some builtins expand into inline code that may not
2589 be valid in GIMPLE. Callers must take care. */
2592 ccp_fold_builtin (gimple stmt
)
2594 tree result
, val
[3];
2600 location_t loc
= gimple_location (stmt
);
2602 gcc_assert (is_gimple_call (stmt
));
2604 ignore
= (gimple_call_lhs (stmt
) == NULL
);
2606 /* First try the generic builtin folder. If that succeeds, return the
2608 result
= fold_call_stmt (stmt
, ignore
);
2612 STRIP_NOPS (result
);
2616 /* Ignore MD builtins. */
2617 callee
= gimple_call_fndecl (stmt
);
2618 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
2621 /* If the builtin could not be folded, and it has no argument list,
2623 nargs
= gimple_call_num_args (stmt
);
2627 /* Limit the work only for builtins we know how to simplify. */
2628 switch (DECL_FUNCTION_CODE (callee
))
2630 case BUILT_IN_STRLEN
:
2631 case BUILT_IN_FPUTS
:
2632 case BUILT_IN_FPUTS_UNLOCKED
:
2636 case BUILT_IN_STRCPY
:
2637 case BUILT_IN_STRNCPY
:
2641 case BUILT_IN_MEMCPY_CHK
:
2642 case BUILT_IN_MEMPCPY_CHK
:
2643 case BUILT_IN_MEMMOVE_CHK
:
2644 case BUILT_IN_MEMSET_CHK
:
2645 case BUILT_IN_STRNCPY_CHK
:
2649 case BUILT_IN_STRCPY_CHK
:
2650 case BUILT_IN_STPCPY_CHK
:
2654 case BUILT_IN_SNPRINTF_CHK
:
2655 case BUILT_IN_VSNPRINTF_CHK
:
2663 if (arg_idx
>= nargs
)
2666 /* Try to use the dataflow information gathered by the CCP process. */
2667 visited
= BITMAP_ALLOC (NULL
);
2668 bitmap_clear (visited
);
2670 memset (val
, 0, sizeof (val
));
2671 a
= gimple_call_arg (stmt
, arg_idx
);
2672 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
2673 val
[arg_idx
] = NULL_TREE
;
2675 BITMAP_FREE (visited
);
2678 switch (DECL_FUNCTION_CODE (callee
))
2680 case BUILT_IN_STRLEN
:
2681 if (val
[0] && nargs
== 1)
2684 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
2686 /* If the result is not a valid gimple value, or not a cast
2687 of a valid gimple value, then we can not use the result. */
2688 if (is_gimple_val (new_val
)
2689 || (is_gimple_cast (new_val
)
2690 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
2695 case BUILT_IN_STRCPY
:
2696 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
2697 result
= fold_builtin_strcpy (loc
, callee
,
2698 gimple_call_arg (stmt
, 0),
2699 gimple_call_arg (stmt
, 1),
2703 case BUILT_IN_STRNCPY
:
2704 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2705 result
= fold_builtin_strncpy (loc
, callee
,
2706 gimple_call_arg (stmt
, 0),
2707 gimple_call_arg (stmt
, 1),
2708 gimple_call_arg (stmt
, 2),
2712 case BUILT_IN_FPUTS
:
2714 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
2715 gimple_call_arg (stmt
, 1),
2716 ignore
, false, val
[0]);
2719 case BUILT_IN_FPUTS_UNLOCKED
:
2721 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
2722 gimple_call_arg (stmt
, 1),
2723 ignore
, true, val
[0]);
2726 case BUILT_IN_MEMCPY_CHK
:
2727 case BUILT_IN_MEMPCPY_CHK
:
2728 case BUILT_IN_MEMMOVE_CHK
:
2729 case BUILT_IN_MEMSET_CHK
:
2730 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2731 result
= fold_builtin_memory_chk (loc
, callee
,
2732 gimple_call_arg (stmt
, 0),
2733 gimple_call_arg (stmt
, 1),
2734 gimple_call_arg (stmt
, 2),
2735 gimple_call_arg (stmt
, 3),
2737 DECL_FUNCTION_CODE (callee
));
2740 case BUILT_IN_STRCPY_CHK
:
2741 case BUILT_IN_STPCPY_CHK
:
2742 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2743 result
= fold_builtin_stxcpy_chk (loc
, callee
,
2744 gimple_call_arg (stmt
, 0),
2745 gimple_call_arg (stmt
, 1),
2746 gimple_call_arg (stmt
, 2),
2748 DECL_FUNCTION_CODE (callee
));
2751 case BUILT_IN_STRNCPY_CHK
:
2752 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2753 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
2754 gimple_call_arg (stmt
, 1),
2755 gimple_call_arg (stmt
, 2),
2756 gimple_call_arg (stmt
, 3),
2760 case BUILT_IN_SNPRINTF_CHK
:
2761 case BUILT_IN_VSNPRINTF_CHK
:
2762 if (val
[1] && is_gimple_val (val
[1]))
2763 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
2764 DECL_FUNCTION_CODE (callee
));
2771 if (result
&& ignore
)
2772 result
= fold_ignored_result (result
);
2776 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2777 replacement rhs for the statement or NULL_TREE if no simplification
2778 could be made. It is assumed that the operands have been previously
2782 fold_gimple_assign (gimple_stmt_iterator
*si
)
2784 gimple stmt
= gsi_stmt (*si
);
2785 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2786 location_t loc
= gimple_location (stmt
);
2788 tree result
= NULL_TREE
;
2790 switch (get_gimple_rhs_class (subcode
))
2792 case GIMPLE_SINGLE_RHS
:
2794 tree rhs
= gimple_assign_rhs1 (stmt
);
2796 /* Try to fold a conditional expression. */
2797 if (TREE_CODE (rhs
) == COND_EXPR
)
2799 tree op0
= COND_EXPR_COND (rhs
);
2802 location_t cond_loc
= EXPR_LOCATION (rhs
);
2804 if (COMPARISON_CLASS_P (op0
))
2806 fold_defer_overflow_warnings ();
2807 tem
= fold_binary_loc (cond_loc
,
2808 TREE_CODE (op0
), TREE_TYPE (op0
),
2809 TREE_OPERAND (op0
, 0),
2810 TREE_OPERAND (op0
, 1));
2811 /* This is actually a conditional expression, not a GIMPLE
2812 conditional statement, however, the valid_gimple_rhs_p
2813 test still applies. */
2814 set
= (tem
&& is_gimple_condexpr (tem
)
2815 && valid_gimple_rhs_p (tem
));
2816 fold_undefer_overflow_warnings (set
, stmt
, 0);
2818 else if (is_gimple_min_invariant (op0
))
2827 result
= fold_build3_loc (cond_loc
, COND_EXPR
, TREE_TYPE (rhs
), tem
,
2828 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
2831 else if (TREE_CODE (rhs
) == TARGET_MEM_REF
)
2832 return maybe_fold_tmr (rhs
);
2834 else if (REFERENCE_CLASS_P (rhs
))
2835 return maybe_fold_reference (rhs
, false);
2837 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
2839 tree tem
= maybe_fold_reference (TREE_OPERAND (rhs
, 0), true);
2841 result
= fold_convert (TREE_TYPE (rhs
),
2842 build_fold_addr_expr_loc (loc
, tem
));
2845 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2846 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2847 && (CONSTRUCTOR_NELTS (rhs
)
2848 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2850 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
2854 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2855 if (TREE_CODE (val
) != INTEGER_CST
2856 && TREE_CODE (val
) != REAL_CST
2857 && TREE_CODE (val
) != FIXED_CST
)
2860 return build_vector_from_ctor (TREE_TYPE (rhs
),
2861 CONSTRUCTOR_ELTS (rhs
));
2864 else if (DECL_P (rhs
))
2865 return unshare_expr (get_symbol_constant_value (rhs
));
2867 /* If we couldn't fold the RHS, hand over to the generic
2869 if (result
== NULL_TREE
)
2870 result
= fold (rhs
);
2872 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2873 that may have been added by fold, and "useless" type
2874 conversions that might now be apparent due to propagation. */
2875 STRIP_USELESS_TYPE_CONVERSION (result
);
2877 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
2884 case GIMPLE_UNARY_RHS
:
2886 tree rhs
= gimple_assign_rhs1 (stmt
);
2888 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
2891 /* If the operation was a conversion do _not_ mark a
2892 resulting constant with TREE_OVERFLOW if the original
2893 constant was not. These conversions have implementation
2894 defined behavior and retaining the TREE_OVERFLOW flag
2895 here would confuse later passes such as VRP. */
2896 if (CONVERT_EXPR_CODE_P (subcode
)
2897 && TREE_CODE (result
) == INTEGER_CST
2898 && TREE_CODE (rhs
) == INTEGER_CST
)
2899 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
2901 STRIP_USELESS_TYPE_CONVERSION (result
);
2902 if (valid_gimple_rhs_p (result
))
2905 else if (CONVERT_EXPR_CODE_P (subcode
)
2906 && POINTER_TYPE_P (gimple_expr_type (stmt
))
2907 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
2909 tree type
= gimple_expr_type (stmt
);
2910 tree t
= maybe_fold_offset_to_address (loc
,
2911 gimple_assign_rhs1 (stmt
),
2912 integer_zero_node
, type
);
2919 case GIMPLE_BINARY_RHS
:
2920 /* Try to fold pointer addition. */
2921 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2923 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2924 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
2926 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
2927 if (!useless_type_conversion_p
2928 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
2929 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2931 result
= maybe_fold_stmt_addition (gimple_location (stmt
),
2933 gimple_assign_rhs1 (stmt
),
2934 gimple_assign_rhs2 (stmt
));
2938 result
= fold_binary_loc (loc
, subcode
,
2939 TREE_TYPE (gimple_assign_lhs (stmt
)),
2940 gimple_assign_rhs1 (stmt
),
2941 gimple_assign_rhs2 (stmt
));
2945 STRIP_USELESS_TYPE_CONVERSION (result
);
2946 if (valid_gimple_rhs_p (result
))
2949 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2950 we lose canonicalization opportunities. Do not go again
2951 through fold here though, or the same non-GIMPLE will be
2953 if (commutative_tree_code (subcode
)
2954 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
2955 gimple_assign_rhs2 (stmt
), false))
2956 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
2957 gimple_assign_rhs2 (stmt
),
2958 gimple_assign_rhs1 (stmt
));
2962 case GIMPLE_INVALID_RHS
:
2969 /* Attempt to fold a conditional statement. Return true if any changes were
2970 made. We only attempt to fold the condition expression, and do not perform
2971 any transformation that would require alteration of the cfg. It is
2972 assumed that the operands have been previously folded. */
2975 fold_gimple_cond (gimple stmt
)
2977 tree result
= fold_binary_loc (gimple_location (stmt
),
2978 gimple_cond_code (stmt
),
2980 gimple_cond_lhs (stmt
),
2981 gimple_cond_rhs (stmt
));
2985 STRIP_USELESS_TYPE_CONVERSION (result
);
2986 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
2988 gimple_cond_set_condition_from_tree (stmt
, result
);
2996 static void gimplify_and_update_call_from_tree (gimple_stmt_iterator
*, tree
);
2998 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2999 The statement may be replaced by another statement, e.g., if the call
3000 simplifies to a constant value. Return true if any changes were made.
3001 It is assumed that the operands have been previously folded. */
3004 fold_gimple_call (gimple_stmt_iterator
*gsi
)
3006 gimple stmt
= gsi_stmt (*gsi
);
3008 tree callee
= gimple_call_fndecl (stmt
);
3010 /* Check for builtins that CCP can handle using information not
3011 available in the generic fold routines. */
3012 if (callee
&& DECL_BUILT_IN (callee
))
3014 tree result
= ccp_fold_builtin (stmt
);
3018 if (!update_call_from_tree (gsi
, result
))
3019 gimplify_and_update_call_from_tree (gsi
, result
);
3025 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
3026 here are when we've propagated the address of a decl into the
3028 /* ??? Should perhaps do this in fold proper. However, doing it
3029 there requires that we create a new CALL_EXPR, and that requires
3030 copying EH region info to the new node. Easier to just do it
3031 here where we can just smash the call operand. */
3032 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
3033 callee
= gimple_call_fn (stmt
);
3034 if (TREE_CODE (callee
) == OBJ_TYPE_REF
3035 && lang_hooks
.fold_obj_type_ref
3036 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
3037 && DECL_P (TREE_OPERAND
3038 (OBJ_TYPE_REF_OBJECT (callee
), 0)))
3042 /* ??? Caution: Broken ADDR_EXPR semantics means that
3043 looking at the type of the operand of the addr_expr
3044 can yield an array type. See silly exception in
3045 check_pointer_types_r. */
3046 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
3047 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
3050 gimple_call_set_fn (stmt
, t
);
3059 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3060 distinguishes both cases. */
3063 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
3065 bool changed
= false;
3066 gimple stmt
= gsi_stmt (*gsi
);
3069 /* Fold the main computation performed by the statement. */
3070 switch (gimple_code (stmt
))
3074 unsigned old_num_ops
= gimple_num_ops (stmt
);
3075 tree new_rhs
= fold_gimple_assign (gsi
);
3076 tree lhs
= gimple_assign_lhs (stmt
);
3078 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3079 TREE_TYPE (new_rhs
)))
3080 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3083 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3085 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3092 changed
|= fold_gimple_cond (stmt
);
3096 /* Fold *& in call arguments. */
3097 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3098 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3100 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3103 gimple_call_set_arg (stmt
, i
, tmp
);
3107 /* The entire statement may be replaced in this case. */
3109 changed
|= fold_gimple_call (gsi
);
3113 /* Fold *& in asm operands. */
3114 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
3116 tree link
= gimple_asm_output_op (stmt
, i
);
3117 tree op
= TREE_VALUE (link
);
3118 if (REFERENCE_CLASS_P (op
)
3119 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3121 TREE_VALUE (link
) = op
;
3125 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
3127 tree link
= gimple_asm_input_op (stmt
, i
);
3128 tree op
= TREE_VALUE (link
);
3129 if (REFERENCE_CLASS_P (op
)
3130 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
3132 TREE_VALUE (link
) = op
;
3141 stmt
= gsi_stmt (*gsi
);
3143 /* Fold *& on the lhs. */
3144 if (gimple_has_lhs (stmt
))
3146 tree lhs
= gimple_get_lhs (stmt
);
3147 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3149 tree new_lhs
= maybe_fold_reference (lhs
, true);
3152 gimple_set_lhs (stmt
, new_lhs
);
3161 /* Fold the statement pointed to by GSI. In some cases, this function may
3162 replace the whole statement with a new one. Returns true iff folding
3164 The statement pointed to by GSI should be in valid gimple form but may
3165 be in unfolded state as resulting from for example constant propagation
3166 which can produce *&x = 0. */
3169 fold_stmt (gimple_stmt_iterator
*gsi
)
3171 return fold_stmt_1 (gsi
, false);
3174 /* Perform the minimal folding on statement STMT. Only operations like
3175 *&x created by constant propagation are handled. The statement cannot
3176 be replaced with a new one. Return true if the statement was
3177 changed, false otherwise.
3178 The statement STMT should be in valid gimple form but may
3179 be in unfolded state as resulting from for example constant propagation
3180 which can produce *&x = 0. */
3183 fold_stmt_inplace (gimple stmt
)
3185 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3186 bool changed
= fold_stmt_1 (&gsi
, true);
3187 gcc_assert (gsi_stmt (gsi
) == stmt
);
3191 /* Try to optimize out __builtin_stack_restore. Optimize it out
3192 if there is another __builtin_stack_restore in the same basic
3193 block and no calls or ASM_EXPRs are in between, or if this block's
3194 only outgoing edge is to EXIT_BLOCK and there are no calls or
3195 ASM_EXPRs after this __builtin_stack_restore. */
3198 optimize_stack_restore (gimple_stmt_iterator i
)
3203 basic_block bb
= gsi_bb (i
);
3204 gimple call
= gsi_stmt (i
);
3206 if (gimple_code (call
) != GIMPLE_CALL
3207 || gimple_call_num_args (call
) != 1
3208 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
3209 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
3212 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
3214 stmt
= gsi_stmt (i
);
3215 if (gimple_code (stmt
) == GIMPLE_ASM
)
3217 if (gimple_code (stmt
) != GIMPLE_CALL
)
3220 callee
= gimple_call_fndecl (stmt
);
3222 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3223 /* All regular builtins are ok, just obviously not alloca. */
3224 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
)
3227 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
3228 goto second_stack_restore
;
3234 /* Allow one successor of the exit block, or zero successors. */
3235 switch (EDGE_COUNT (bb
->succs
))
3240 if (single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR
)
3246 second_stack_restore
:
3248 /* If there's exactly one use, then zap the call to __builtin_stack_save.
3249 If there are multiple uses, then the last one should remove the call.
3250 In any case, whether the call to __builtin_stack_save can be removed
3251 or not is irrelevant to removing the call to __builtin_stack_restore. */
3252 if (has_single_use (gimple_call_arg (call
, 0)))
3254 gimple stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
3255 if (is_gimple_call (stack_save
))
3257 callee
= gimple_call_fndecl (stack_save
);
3259 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
3260 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
)
3262 gimple_stmt_iterator stack_save_gsi
;
3265 stack_save_gsi
= gsi_for_stmt (stack_save
);
3266 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
3267 update_call_from_tree (&stack_save_gsi
, rhs
);
3272 /* No effect, so the statement will be deleted. */
3273 return integer_zero_node
;
3276 /* If va_list type is a simple pointer and nothing special is needed,
3277 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3278 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3279 pointer assignment. */
3282 optimize_stdarg_builtin (gimple call
)
3284 tree callee
, lhs
, rhs
, cfun_va_list
;
3285 bool va_list_simple_ptr
;
3286 location_t loc
= gimple_location (call
);
3288 if (gimple_code (call
) != GIMPLE_CALL
)
3291 callee
= gimple_call_fndecl (call
);
3293 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
3294 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
3295 && (TREE_TYPE (cfun_va_list
) == void_type_node
3296 || TREE_TYPE (cfun_va_list
) == char_type_node
);
3298 switch (DECL_FUNCTION_CODE (callee
))
3300 case BUILT_IN_VA_START
:
3301 if (!va_list_simple_ptr
3302 || targetm
.expand_builtin_va_start
!= NULL
3303 || built_in_decls
[BUILT_IN_NEXT_ARG
] == NULL
)
3306 if (gimple_call_num_args (call
) != 2)
3309 lhs
= gimple_call_arg (call
, 0);
3310 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3311 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3312 != TYPE_MAIN_VARIANT (cfun_va_list
))
3315 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
3316 rhs
= build_call_expr_loc (loc
, built_in_decls
[BUILT_IN_NEXT_ARG
],
3317 1, integer_zero_node
);
3318 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
3319 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3321 case BUILT_IN_VA_COPY
:
3322 if (!va_list_simple_ptr
)
3325 if (gimple_call_num_args (call
) != 2)
3328 lhs
= gimple_call_arg (call
, 0);
3329 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3330 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3331 != TYPE_MAIN_VARIANT (cfun_va_list
))
3334 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
3335 rhs
= gimple_call_arg (call
, 1);
3336 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
3337 != TYPE_MAIN_VARIANT (cfun_va_list
))
3340 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
3341 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3343 case BUILT_IN_VA_END
:
3344 /* No effect, so the statement will be deleted. */
3345 return integer_zero_node
;
3352 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3353 RHS of an assignment. Insert the necessary statements before
3354 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3355 is replaced. If the call is expected to produces a result, then it
3356 is replaced by an assignment of the new RHS to the result variable.
3357 If the result is to be ignored, then the call is replaced by a
3361 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
3364 tree tmp
= NULL_TREE
; /* Silence warning. */
3365 gimple stmt
, new_stmt
;
3366 gimple_stmt_iterator i
;
3367 gimple_seq stmts
= gimple_seq_alloc();
3368 struct gimplify_ctx gctx
;
3370 stmt
= gsi_stmt (*si_p
);
3372 gcc_assert (is_gimple_call (stmt
));
3374 lhs
= gimple_call_lhs (stmt
);
3376 push_gimplify_context (&gctx
);
3378 if (lhs
== NULL_TREE
)
3379 gimplify_and_add (expr
, &stmts
);
3381 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
3383 pop_gimplify_context (NULL
);
3385 if (gimple_has_location (stmt
))
3386 annotate_all_with_location (stmts
, gimple_location (stmt
));
3388 /* The replacement can expose previously unreferenced variables. */
3389 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
3391 new_stmt
= gsi_stmt (i
);
3392 find_new_referenced_vars (new_stmt
);
3393 gsi_insert_before (si_p
, new_stmt
, GSI_NEW_STMT
);
3394 mark_symbols_for_renaming (new_stmt
);
3398 if (lhs
== NULL_TREE
)
3400 new_stmt
= gimple_build_nop ();
3401 unlink_stmt_vdef (stmt
);
3402 release_defs (stmt
);
3406 new_stmt
= gimple_build_assign (lhs
, tmp
);
3407 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3408 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3409 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
3412 gimple_set_location (new_stmt
, gimple_location (stmt
));
3413 gsi_replace (si_p
, new_stmt
, false);
3416 /* A simple pass that attempts to fold all builtin functions. This pass
3417 is run after we've propagated as many constants as we can. */
3420 execute_fold_all_builtins (void)
3422 bool cfg_changed
= false;
3424 unsigned int todoflags
= 0;
3428 gimple_stmt_iterator i
;
3429 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
3431 gimple stmt
, old_stmt
;
3432 tree callee
, result
;
3433 enum built_in_function fcode
;
3435 stmt
= gsi_stmt (i
);
3437 if (gimple_code (stmt
) != GIMPLE_CALL
)
3442 callee
= gimple_call_fndecl (stmt
);
3443 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
3448 fcode
= DECL_FUNCTION_CODE (callee
);
3450 result
= ccp_fold_builtin (stmt
);
3453 gimple_remove_stmt_histograms (cfun
, stmt
);
3456 switch (DECL_FUNCTION_CODE (callee
))
3458 case BUILT_IN_CONSTANT_P
:
3459 /* Resolve __builtin_constant_p. If it hasn't been
3460 folded to integer_one_node by now, it's fairly
3461 certain that the value simply isn't constant. */
3462 result
= integer_zero_node
;
3465 case BUILT_IN_STACK_RESTORE
:
3466 result
= optimize_stack_restore (i
);
3472 case BUILT_IN_VA_START
:
3473 case BUILT_IN_VA_END
:
3474 case BUILT_IN_VA_COPY
:
3475 /* These shouldn't be folded before pass_stdarg. */
3476 result
= optimize_stdarg_builtin (stmt
);
3486 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3488 fprintf (dump_file
, "Simplified\n ");
3489 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3493 if (!update_call_from_tree (&i
, result
))
3495 gimplify_and_update_call_from_tree (&i
, result
);
3496 todoflags
|= TODO_update_address_taken
;
3499 stmt
= gsi_stmt (i
);
3502 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
3503 && gimple_purge_dead_eh_edges (bb
))
3506 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3508 fprintf (dump_file
, "to\n ");
3509 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3510 fprintf (dump_file
, "\n");
3513 /* Retry the same statement if it changed into another
3514 builtin, there might be new opportunities now. */
3515 if (gimple_code (stmt
) != GIMPLE_CALL
)
3520 callee
= gimple_call_fndecl (stmt
);
3522 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3523 || DECL_FUNCTION_CODE (callee
) == fcode
)
3528 /* Delete unreachable blocks. */
3530 todoflags
|= TODO_cleanup_cfg
;
3536 struct gimple_opt_pass pass_fold_builtins
=
3542 execute_fold_all_builtins
, /* execute */
3545 0, /* static_pass_number */
3546 TV_NONE
, /* tv_id */
3547 PROP_cfg
| PROP_ssa
, /* properties_required */
3548 0, /* properties_provided */
3549 0, /* properties_destroyed */
3550 0, /* todo_flags_start */
3553 | TODO_update_ssa
/* todo_flags_finish */