1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
214 /* Possible lattice values. */
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
229 static prop_value_t
*const_val
;
231 static void canonicalize_float_value (prop_value_t
*);
232 static bool ccp_fold_stmt (gimple_stmt_iterator
*);
234 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
237 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
239 switch (val
.lattice_val
)
242 fprintf (outf
, "%sUNINITIALIZED", prefix
);
245 fprintf (outf
, "%sUNDEFINED", prefix
);
248 fprintf (outf
, "%sVARYING", prefix
);
251 fprintf (outf
, "%sCONSTANT ", prefix
);
252 print_generic_expr (outf
, val
.value
, dump_flags
);
260 /* Print lattice value VAL to stderr. */
262 void debug_lattice_value (prop_value_t val
);
265 debug_lattice_value (prop_value_t val
)
267 dump_lattice_value (stderr
, "", val
);
268 fprintf (stderr
, "\n");
273 /* If SYM is a constant variable with known value, return the value.
274 NULL_TREE is returned otherwise. */
277 get_symbol_constant_value (tree sym
)
279 if (TREE_STATIC (sym
)
280 && (TREE_READONLY (sym
)
281 || TREE_CODE (sym
) == CONST_DECL
))
283 tree val
= DECL_INITIAL (sym
);
287 if (is_gimple_min_invariant (val
))
289 if (TREE_CODE (val
) == ADDR_EXPR
)
291 tree base
= get_base_address (TREE_OPERAND (val
, 0));
292 if (base
&& TREE_CODE (base
) == VAR_DECL
)
294 TREE_ADDRESSABLE (base
) = 1;
295 if (gimple_referenced_vars (cfun
))
296 add_referenced_var (base
);
302 /* Variables declared 'const' without an initializer
303 have zero as the initializer if they may not be
304 overridden at link or run time. */
306 && !DECL_EXTERNAL (sym
)
307 && targetm
.binds_local_p (sym
)
308 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
309 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
310 return fold_convert (TREE_TYPE (sym
), integer_zero_node
);
316 /* Compute a default value for variable VAR and store it in the
317 CONST_VAL array. The following rules are used to get default
320 1- Global and static variables that are declared constant are
323 2- Any other value is considered UNDEFINED. This is useful when
324 considering PHI nodes. PHI arguments that are undefined do not
325 change the constant value of the PHI node, which allows for more
326 constants to be propagated.
328 3- Variables defined by statements other than assignments and PHI
329 nodes are considered VARYING.
331 4- Initial values of variables that are not GIMPLE registers are
332 considered VARYING. */
335 get_default_value (tree var
)
337 tree sym
= SSA_NAME_VAR (var
);
338 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
};
341 stmt
= SSA_NAME_DEF_STMT (var
);
343 if (gimple_nop_p (stmt
))
345 /* Variables defined by an empty statement are those used
346 before being initialized. If VAR is a local variable, we
347 can assume initially that it is UNDEFINED, otherwise we must
348 consider it VARYING. */
349 if (is_gimple_reg (sym
) && TREE_CODE (sym
) != PARM_DECL
)
350 val
.lattice_val
= UNDEFINED
;
352 val
.lattice_val
= VARYING
;
354 else if (is_gimple_assign (stmt
)
355 /* Value-returning GIMPLE_CALL statements assign to
356 a variable, and are treated similarly to GIMPLE_ASSIGN. */
357 || (is_gimple_call (stmt
)
358 && gimple_call_lhs (stmt
) != NULL_TREE
)
359 || gimple_code (stmt
) == GIMPLE_PHI
)
362 if (gimple_assign_single_p (stmt
)
363 && DECL_P (gimple_assign_rhs1 (stmt
))
364 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
366 val
.lattice_val
= CONSTANT
;
370 /* Any other variable defined by an assignment or a PHI node
371 is considered UNDEFINED. */
372 val
.lattice_val
= UNDEFINED
;
376 /* Otherwise, VAR will never take on a constant value. */
377 val
.lattice_val
= VARYING
;
384 /* Get the constant value associated with variable VAR. */
386 static inline prop_value_t
*
391 if (const_val
== NULL
)
394 val
= &const_val
[SSA_NAME_VERSION (var
)];
395 if (val
->lattice_val
== UNINITIALIZED
)
396 *val
= get_default_value (var
);
398 canonicalize_float_value (val
);
403 /* Sets the value associated with VAR to VARYING. */
406 set_value_varying (tree var
)
408 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
410 val
->lattice_val
= VARYING
;
411 val
->value
= NULL_TREE
;
414 /* For float types, modify the value of VAL to make ccp work correctly
415 for non-standard values (-0, NaN):
417 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
418 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
419 This is to fix the following problem (see PR 29921): Suppose we have
423 and we set value of y to NaN. This causes value of x to be set to NaN.
424 When we later determine that y is in fact VARYING, fold uses the fact
425 that HONOR_NANS is false, and we try to change the value of x to 0,
426 causing an ICE. With HONOR_NANS being false, the real appearance of
427 NaN would cause undefined behavior, though, so claiming that y (and x)
428 are UNDEFINED initially is correct. */
431 canonicalize_float_value (prop_value_t
*val
)
433 enum machine_mode mode
;
437 if (val
->lattice_val
!= CONSTANT
438 || TREE_CODE (val
->value
) != REAL_CST
)
441 d
= TREE_REAL_CST (val
->value
);
442 type
= TREE_TYPE (val
->value
);
443 mode
= TYPE_MODE (type
);
445 if (!HONOR_SIGNED_ZEROS (mode
)
446 && REAL_VALUE_MINUS_ZERO (d
))
448 val
->value
= build_real (type
, dconst0
);
452 if (!HONOR_NANS (mode
)
453 && REAL_VALUE_ISNAN (d
))
455 val
->lattice_val
= UNDEFINED
;
461 /* Set the value for variable VAR to NEW_VAL. Return true if the new
462 value is different from VAR's previous value. */
465 set_lattice_value (tree var
, prop_value_t new_val
)
467 prop_value_t
*old_val
= get_value (var
);
469 canonicalize_float_value (&new_val
);
471 /* Lattice transitions must always be monotonically increasing in
472 value. If *OLD_VAL and NEW_VAL are the same, return false to
473 inform the caller that this was a non-transition. */
475 gcc_assert (old_val
->lattice_val
< new_val
.lattice_val
476 || (old_val
->lattice_val
== new_val
.lattice_val
477 && ((!old_val
->value
&& !new_val
.value
)
478 || operand_equal_p (old_val
->value
, new_val
.value
, 0))));
480 if (old_val
->lattice_val
!= new_val
.lattice_val
)
482 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
484 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
485 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
490 gcc_assert (new_val
.lattice_val
!= UNDEFINED
);
498 /* Return the likely CCP lattice value for STMT.
500 If STMT has no operands, then return CONSTANT.
502 Else if undefinedness of operands of STMT cause its value to be
503 undefined, then return UNDEFINED.
505 Else if any operands of STMT are constants, then return CONSTANT.
507 Else return VARYING. */
510 likely_value (gimple stmt
)
512 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
517 enum gimple_code code
= gimple_code (stmt
);
519 /* This function appears to be called only for assignments, calls,
520 conditionals, and switches, due to the logic in visit_stmt. */
521 gcc_assert (code
== GIMPLE_ASSIGN
522 || code
== GIMPLE_CALL
523 || code
== GIMPLE_COND
524 || code
== GIMPLE_SWITCH
);
526 /* If the statement has volatile operands, it won't fold to a
528 if (gimple_has_volatile_ops (stmt
))
531 /* Arrive here for more complex cases. */
532 has_constant_operand
= false;
533 has_undefined_operand
= false;
534 all_undefined_operands
= true;
535 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
537 prop_value_t
*val
= get_value (use
);
539 if (val
->lattice_val
== UNDEFINED
)
540 has_undefined_operand
= true;
542 all_undefined_operands
= false;
544 if (val
->lattice_val
== CONSTANT
)
545 has_constant_operand
= true;
548 /* There may be constants in regular rhs operands. For calls we
549 have to ignore lhs, fndecl and static chain, otherwise only
551 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
552 i
< gimple_num_ops (stmt
); ++i
)
554 tree op
= gimple_op (stmt
, i
);
555 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
557 if (is_gimple_min_invariant (op
))
558 has_constant_operand
= true;
561 /* If the operation combines operands like COMPLEX_EXPR make sure to
562 not mark the result UNDEFINED if only one part of the result is
564 if (has_undefined_operand
&& all_undefined_operands
)
566 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
568 switch (gimple_assign_rhs_code (stmt
))
570 /* Unary operators are handled with all_undefined_operands. */
573 case POINTER_PLUS_EXPR
:
574 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
575 Not bitwise operators, one VARYING operand may specify the
576 result completely. Not logical operators for the same reason.
577 Not COMPLEX_EXPR as one VARYING operand makes the result partly
578 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
579 the undefined operand may be promoted. */
586 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
587 fall back to VARYING even if there were CONSTANT operands. */
588 if (has_undefined_operand
)
591 /* We do not consider virtual operands here -- load from read-only
592 memory may have only VARYING virtual operands, but still be
594 if (has_constant_operand
595 || gimple_references_memory_p (stmt
))
601 /* Returns true if STMT cannot be constant. */
604 surely_varying_stmt_p (gimple stmt
)
606 /* If the statement has operands that we cannot handle, it cannot be
608 if (gimple_has_volatile_ops (stmt
))
611 /* If it is a call and does not return a value or is not a
612 builtin and not an indirect call, it is varying. */
613 if (is_gimple_call (stmt
))
616 if (!gimple_call_lhs (stmt
)
617 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
618 && !DECL_BUILT_IN (fndecl
)))
622 /* Any other store operation is not interesting. */
623 else if (gimple_vdef (stmt
))
626 /* Anything other than assignments and conditional jumps are not
627 interesting for CCP. */
628 if (gimple_code (stmt
) != GIMPLE_ASSIGN
629 && gimple_code (stmt
) != GIMPLE_COND
630 && gimple_code (stmt
) != GIMPLE_SWITCH
631 && gimple_code (stmt
) != GIMPLE_CALL
)
637 /* Initialize local data structures for CCP. */
640 ccp_initialize (void)
644 const_val
= XCNEWVEC (prop_value_t
, num_ssa_names
);
646 /* Initialize simulation flags for PHI nodes and statements. */
649 gimple_stmt_iterator i
;
651 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
653 gimple stmt
= gsi_stmt (i
);
656 /* If the statement is a control insn, then we do not
657 want to avoid simulating the statement once. Failure
658 to do so means that those edges will never get added. */
659 if (stmt_ends_bb_p (stmt
))
662 is_varying
= surely_varying_stmt_p (stmt
);
669 /* If the statement will not produce a constant, mark
670 all its outputs VARYING. */
671 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
672 set_value_varying (def
);
674 prop_set_simulate_again (stmt
, !is_varying
);
678 /* Now process PHI nodes. We never clear the simulate_again flag on
679 phi nodes, since we do not know which edges are executable yet,
680 except for phi nodes for virtual operands when we do not do store ccp. */
683 gimple_stmt_iterator i
;
685 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
687 gimple phi
= gsi_stmt (i
);
689 if (!is_gimple_reg (gimple_phi_result (phi
)))
690 prop_set_simulate_again (phi
, false);
692 prop_set_simulate_again (phi
, true);
697 /* Debug count support. Reset the values of ssa names
698 VARYING when the total number ssa names analyzed is
699 beyond the debug count specified. */
705 for (i
= 0; i
< num_ssa_names
; i
++)
709 const_val
[i
].lattice_val
= VARYING
;
710 const_val
[i
].value
= NULL_TREE
;
716 /* Do final substitution of propagated values, cleanup the flowgraph and
717 free allocated storage.
719 Return TRUE when something was optimized. */
724 bool something_changed
;
727 /* Perform substitutions based on the known constant values. */
728 something_changed
= substitute_and_fold (const_val
, ccp_fold_stmt
);
732 return something_changed
;;
736 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
739 any M UNDEFINED = any
740 any M VARYING = VARYING
741 Ci M Cj = Ci if (i == j)
742 Ci M Cj = VARYING if (i != j)
746 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
748 if (val1
->lattice_val
== UNDEFINED
)
750 /* UNDEFINED M any = any */
753 else if (val2
->lattice_val
== UNDEFINED
)
755 /* any M UNDEFINED = any
756 Nothing to do. VAL1 already contains the value we want. */
759 else if (val1
->lattice_val
== VARYING
760 || val2
->lattice_val
== VARYING
)
762 /* any M VARYING = VARYING. */
763 val1
->lattice_val
= VARYING
;
764 val1
->value
= NULL_TREE
;
766 else if (val1
->lattice_val
== CONSTANT
767 && val2
->lattice_val
== CONSTANT
768 && simple_cst_equal (val1
->value
, val2
->value
) == 1)
770 /* Ci M Cj = Ci if (i == j)
771 Ci M Cj = VARYING if (i != j)
773 If these two values come from memory stores, make sure that
774 they come from the same memory reference. */
775 val1
->lattice_val
= CONSTANT
;
776 val1
->value
= val1
->value
;
780 /* Any other combination is VARYING. */
781 val1
->lattice_val
= VARYING
;
782 val1
->value
= NULL_TREE
;
787 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
788 lattice values to determine PHI_NODE's lattice value. The value of a
789 PHI node is determined calling ccp_lattice_meet with all the arguments
790 of the PHI node that are incoming via executable edges. */
792 static enum ssa_prop_result
793 ccp_visit_phi_node (gimple phi
)
796 prop_value_t
*old_val
, new_val
;
798 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
800 fprintf (dump_file
, "\nVisiting PHI node: ");
801 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
804 old_val
= get_value (gimple_phi_result (phi
));
805 switch (old_val
->lattice_val
)
808 return SSA_PROP_VARYING
;
815 new_val
.lattice_val
= UNDEFINED
;
816 new_val
.value
= NULL_TREE
;
823 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
825 /* Compute the meet operator over all the PHI arguments flowing
826 through executable edges. */
827 edge e
= gimple_phi_arg_edge (phi
, i
);
829 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
832 "\n Argument #%d (%d -> %d %sexecutable)\n",
833 i
, e
->src
->index
, e
->dest
->index
,
834 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
837 /* If the incoming edge is executable, Compute the meet operator for
838 the existing value of the PHI node and the current PHI argument. */
839 if (e
->flags
& EDGE_EXECUTABLE
)
841 tree arg
= gimple_phi_arg (phi
, i
)->def
;
842 prop_value_t arg_val
;
844 if (is_gimple_min_invariant (arg
))
846 arg_val
.lattice_val
= CONSTANT
;
850 arg_val
= *(get_value (arg
));
852 ccp_lattice_meet (&new_val
, &arg_val
);
854 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
856 fprintf (dump_file
, "\t");
857 print_generic_expr (dump_file
, arg
, dump_flags
);
858 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
859 fprintf (dump_file
, "\n");
862 if (new_val
.lattice_val
== VARYING
)
867 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
869 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
870 fprintf (dump_file
, "\n\n");
873 /* Make the transition to the new value. */
874 if (set_lattice_value (gimple_phi_result (phi
), new_val
))
876 if (new_val
.lattice_val
== VARYING
)
877 return SSA_PROP_VARYING
;
879 return SSA_PROP_INTERESTING
;
882 return SSA_PROP_NOT_INTERESTING
;
885 /* Return true if we may propagate the address expression ADDR into the
886 dereference DEREF and cancel them. */
889 may_propagate_address_into_dereference (tree addr
, tree deref
)
891 gcc_assert (INDIRECT_REF_P (deref
)
892 && TREE_CODE (addr
) == ADDR_EXPR
);
894 /* Don't propagate if ADDR's operand has incomplete type. */
895 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
898 /* If the address is invariant then we do not need to preserve restrict
899 qualifications. But we do need to preserve volatile qualifiers until
900 we can annotate the folded dereference itself properly. */
901 if (is_gimple_min_invariant (addr
)
902 && (!TREE_THIS_VOLATILE (deref
)
903 || TYPE_VOLATILE (TREE_TYPE (addr
))))
904 return useless_type_conversion_p (TREE_TYPE (deref
),
905 TREE_TYPE (TREE_OPERAND (addr
, 0)));
907 /* Else both the address substitution and the folding must result in
908 a valid useless type conversion sequence. */
909 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
911 && useless_type_conversion_p (TREE_TYPE (deref
),
912 TREE_TYPE (TREE_OPERAND (addr
, 0))));
915 /* CCP specific front-end to the non-destructive constant folding
918 Attempt to simplify the RHS of STMT knowing that one or more
919 operands are constants.
921 If simplification is possible, return the simplified RHS,
922 otherwise return the original RHS or NULL_TREE. */
925 ccp_fold (gimple stmt
)
927 location_t loc
= gimple_location (stmt
);
928 switch (gimple_code (stmt
))
932 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
934 switch (get_gimple_rhs_class (subcode
))
936 case GIMPLE_SINGLE_RHS
:
938 tree rhs
= gimple_assign_rhs1 (stmt
);
939 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
941 if (TREE_CODE (rhs
) == SSA_NAME
)
943 /* If the RHS is an SSA_NAME, return its known constant value,
945 return get_value (rhs
)->value
;
947 /* Handle propagating invariant addresses into address operations.
948 The folding we do here matches that in tree-ssa-forwprop.c. */
949 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
952 base
= &TREE_OPERAND (rhs
, 0);
953 while (handled_component_p (*base
))
954 base
= &TREE_OPERAND (*base
, 0);
955 if (TREE_CODE (*base
) == INDIRECT_REF
956 && TREE_CODE (TREE_OPERAND (*base
, 0)) == SSA_NAME
)
958 prop_value_t
*val
= get_value (TREE_OPERAND (*base
, 0));
959 if (val
->lattice_val
== CONSTANT
960 && TREE_CODE (val
->value
) == ADDR_EXPR
961 && may_propagate_address_into_dereference
964 /* We need to return a new tree, not modify the IL
965 or share parts of it. So play some tricks to
966 avoid manually building it. */
967 tree ret
, save
= *base
;
968 *base
= TREE_OPERAND (val
->value
, 0);
969 ret
= unshare_expr (rhs
);
970 recompute_tree_invariant_for_addr_expr (ret
);
976 else if (TREE_CODE (rhs
) == CONSTRUCTOR
977 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
978 && (CONSTRUCTOR_NELTS (rhs
)
979 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
985 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
987 if (TREE_CODE (val
) == SSA_NAME
988 && get_value (val
)->lattice_val
== CONSTANT
)
989 val
= get_value (val
)->value
;
990 if (TREE_CODE (val
) == INTEGER_CST
991 || TREE_CODE (val
) == REAL_CST
992 || TREE_CODE (val
) == FIXED_CST
)
993 list
= tree_cons (NULL_TREE
, val
, list
);
998 return build_vector (TREE_TYPE (rhs
), nreverse (list
));
1001 if (kind
== tcc_reference
)
1003 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
1004 || TREE_CODE (rhs
) == REALPART_EXPR
1005 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
1006 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
1008 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
1009 if (val
->lattice_val
== CONSTANT
)
1010 return fold_unary_loc (EXPR_LOCATION (rhs
),
1012 TREE_TYPE (rhs
), val
->value
);
1014 else if (TREE_CODE (rhs
) == INDIRECT_REF
1015 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
1017 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
1018 if (val
->lattice_val
== CONSTANT
1019 && TREE_CODE (val
->value
) == ADDR_EXPR
1020 && useless_type_conversion_p (TREE_TYPE (rhs
),
1021 TREE_TYPE (TREE_TYPE (val
->value
))))
1022 rhs
= TREE_OPERAND (val
->value
, 0);
1024 return fold_const_aggregate_ref (rhs
);
1026 else if (kind
== tcc_declaration
)
1027 return get_symbol_constant_value (rhs
);
1031 case GIMPLE_UNARY_RHS
:
1033 /* Handle unary operators that can appear in GIMPLE form.
1034 Note that we know the single operand must be a constant,
1035 so this should almost always return a simplified RHS. */
1036 tree lhs
= gimple_assign_lhs (stmt
);
1037 tree op0
= gimple_assign_rhs1 (stmt
);
1039 /* Simplify the operand down to a constant. */
1040 if (TREE_CODE (op0
) == SSA_NAME
)
1042 prop_value_t
*val
= get_value (op0
);
1043 if (val
->lattice_val
== CONSTANT
)
1044 op0
= get_value (op0
)->value
;
1047 /* Conversions are useless for CCP purposes if they are
1048 value-preserving. Thus the restrictions that
1049 useless_type_conversion_p places for pointer type conversions
1050 do not apply here. Substitution later will only substitute to
1052 if (CONVERT_EXPR_CODE_P (subcode
)
1053 && POINTER_TYPE_P (TREE_TYPE (lhs
))
1054 && POINTER_TYPE_P (TREE_TYPE (op0
))
1055 /* Do not allow differences in volatile qualification
1056 as this might get us confused as to whether a
1057 propagation destination statement is volatile
1058 or not. See PR36988. */
1059 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs
)))
1060 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0
)))))
1063 /* Still try to generate a constant of correct type. */
1064 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
1066 && ((tem
= maybe_fold_offset_to_address
1068 op0
, integer_zero_node
, TREE_TYPE (lhs
)))
1075 fold_unary_ignore_overflow_loc (loc
, subcode
,
1076 gimple_expr_type (stmt
), op0
);
1079 case GIMPLE_BINARY_RHS
:
1081 /* Handle binary operators that can appear in GIMPLE form. */
1082 tree op0
= gimple_assign_rhs1 (stmt
);
1083 tree op1
= gimple_assign_rhs2 (stmt
);
1085 /* Simplify the operands down to constants when appropriate. */
1086 if (TREE_CODE (op0
) == SSA_NAME
)
1088 prop_value_t
*val
= get_value (op0
);
1089 if (val
->lattice_val
== CONSTANT
)
1093 if (TREE_CODE (op1
) == SSA_NAME
)
1095 prop_value_t
*val
= get_value (op1
);
1096 if (val
->lattice_val
== CONSTANT
)
1100 /* Fold &foo + CST into an invariant reference if possible. */
1101 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1102 && TREE_CODE (op0
) == ADDR_EXPR
1103 && TREE_CODE (op1
) == INTEGER_CST
)
1105 tree tem
= maybe_fold_offset_to_address
1106 (loc
, op0
, op1
, TREE_TYPE (op0
));
1107 if (tem
!= NULL_TREE
)
1111 return fold_binary_loc (loc
, subcode
,
1112 gimple_expr_type (stmt
), op0
, op1
);
1123 tree fn
= gimple_call_fn (stmt
);
1126 if (TREE_CODE (fn
) == SSA_NAME
)
1128 val
= get_value (fn
);
1129 if (val
->lattice_val
== CONSTANT
)
1132 if (TREE_CODE (fn
) == ADDR_EXPR
1133 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1134 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
1136 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
1139 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1141 args
[i
] = gimple_call_arg (stmt
, i
);
1142 if (TREE_CODE (args
[i
]) == SSA_NAME
)
1144 val
= get_value (args
[i
]);
1145 if (val
->lattice_val
== CONSTANT
)
1146 args
[i
] = val
->value
;
1149 call
= build_call_array_loc (loc
,
1150 gimple_call_return_type (stmt
),
1151 fn
, gimple_call_num_args (stmt
), args
);
1152 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
1154 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1155 STRIP_NOPS (retval
);
1163 /* Handle comparison operators that can appear in GIMPLE form. */
1164 tree op0
= gimple_cond_lhs (stmt
);
1165 tree op1
= gimple_cond_rhs (stmt
);
1166 enum tree_code code
= gimple_cond_code (stmt
);
1168 /* Simplify the operands down to constants when appropriate. */
1169 if (TREE_CODE (op0
) == SSA_NAME
)
1171 prop_value_t
*val
= get_value (op0
);
1172 if (val
->lattice_val
== CONSTANT
)
1176 if (TREE_CODE (op1
) == SSA_NAME
)
1178 prop_value_t
*val
= get_value (op1
);
1179 if (val
->lattice_val
== CONSTANT
)
1183 return fold_binary_loc (loc
, code
, boolean_type_node
, op0
, op1
);
1188 tree rhs
= gimple_switch_index (stmt
);
1190 if (TREE_CODE (rhs
) == SSA_NAME
)
1192 /* If the RHS is an SSA_NAME, return its known constant value,
1194 return get_value (rhs
)->value
;
1206 /* Return the tree representing the element referenced by T if T is an
1207 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1208 NULL_TREE otherwise. */
1211 fold_const_aggregate_ref (tree t
)
1213 prop_value_t
*value
;
1214 tree base
, ctor
, idx
, field
;
1215 unsigned HOST_WIDE_INT cnt
;
1218 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
1219 return get_symbol_constant_value (t
);
1221 switch (TREE_CODE (t
))
1224 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1225 DECL_INITIAL. If BASE is a nested reference into another
1226 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1227 the inner reference. */
1228 base
= TREE_OPERAND (t
, 0);
1229 switch (TREE_CODE (base
))
1232 if (!TREE_READONLY (base
)
1233 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
1234 || !targetm
.binds_local_p (base
))
1237 ctor
= DECL_INITIAL (base
);
1242 ctor
= fold_const_aggregate_ref (base
);
1254 if (ctor
== NULL_TREE
1255 || (TREE_CODE (ctor
) != CONSTRUCTOR
1256 && TREE_CODE (ctor
) != STRING_CST
)
1257 || !TREE_STATIC (ctor
))
1260 /* Get the index. If we have an SSA_NAME, try to resolve it
1261 with the current lattice value for the SSA_NAME. */
1262 idx
= TREE_OPERAND (t
, 1);
1263 switch (TREE_CODE (idx
))
1266 if ((value
= get_value (idx
))
1267 && value
->lattice_val
== CONSTANT
1268 && TREE_CODE (value
->value
) == INTEGER_CST
)
1281 /* Fold read from constant string. */
1282 if (TREE_CODE (ctor
) == STRING_CST
)
1284 if ((TYPE_MODE (TREE_TYPE (t
))
1285 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1286 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1288 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
1289 && compare_tree_int (idx
, TREE_STRING_LENGTH (ctor
)) < 0)
1290 return build_int_cst_type (TREE_TYPE (t
),
1291 (TREE_STRING_POINTER (ctor
)
1292 [TREE_INT_CST_LOW (idx
)]));
1296 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1297 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1298 if (tree_int_cst_equal (cfield
, idx
))
1301 if (TREE_CODE (cval
) == ADDR_EXPR
)
1303 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1304 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1305 add_referenced_var (base
);
1312 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1313 DECL_INITIAL. If BASE is a nested reference into another
1314 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1315 the inner reference. */
1316 base
= TREE_OPERAND (t
, 0);
1317 switch (TREE_CODE (base
))
1320 if (!TREE_READONLY (base
)
1321 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1322 || !targetm
.binds_local_p (base
))
1325 ctor
= DECL_INITIAL (base
);
1330 ctor
= fold_const_aggregate_ref (base
);
1337 if (ctor
== NULL_TREE
1338 || TREE_CODE (ctor
) != CONSTRUCTOR
1339 || !TREE_STATIC (ctor
))
1342 field
= TREE_OPERAND (t
, 1);
1344 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1346 /* FIXME: Handle bit-fields. */
1347 && ! DECL_BIT_FIELD (cfield
))
1350 if (TREE_CODE (cval
) == ADDR_EXPR
)
1352 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1353 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1354 add_referenced_var (base
);
1363 tree c
= fold_const_aggregate_ref (TREE_OPERAND (t
, 0));
1364 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
1365 return fold_build1_loc (EXPR_LOCATION (t
),
1366 TREE_CODE (t
), TREE_TYPE (t
), c
);
1372 tree base
= TREE_OPERAND (t
, 0);
1373 if (TREE_CODE (base
) == SSA_NAME
1374 && (value
= get_value (base
))
1375 && value
->lattice_val
== CONSTANT
1376 && TREE_CODE (value
->value
) == ADDR_EXPR
1377 && useless_type_conversion_p (TREE_TYPE (t
),
1378 TREE_TYPE (TREE_TYPE (value
->value
))))
1379 return fold_const_aggregate_ref (TREE_OPERAND (value
->value
, 0));
1390 /* Evaluate statement STMT.
1391 Valid only for assignments, calls, conditionals, and switches. */
1394 evaluate_stmt (gimple stmt
)
1397 tree simplified
= NULL_TREE
;
1398 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1401 fold_defer_overflow_warnings ();
1403 /* If the statement is likely to have a CONSTANT result, then try
1404 to fold the statement to determine the constant value. */
1405 /* FIXME. This is the only place that we call ccp_fold.
1406 Since likely_value never returns CONSTANT for calls, we will
1407 not attempt to fold them, including builtins that may profit. */
1408 if (likelyvalue
== CONSTANT
)
1409 simplified
= ccp_fold (stmt
);
1410 /* If the statement is likely to have a VARYING result, then do not
1411 bother folding the statement. */
1412 else if (likelyvalue
== VARYING
)
1414 enum gimple_code code
= gimple_code (stmt
);
1415 if (code
== GIMPLE_ASSIGN
)
1417 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1419 /* Other cases cannot satisfy is_gimple_min_invariant
1421 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1422 simplified
= gimple_assign_rhs1 (stmt
);
1424 else if (code
== GIMPLE_SWITCH
)
1425 simplified
= gimple_switch_index (stmt
);
1427 /* These cannot satisfy is_gimple_min_invariant without folding. */
1428 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1431 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1433 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1435 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1437 fprintf (dump_file
, "which is likely ");
1438 switch (likelyvalue
)
1441 fprintf (dump_file
, "CONSTANT");
1444 fprintf (dump_file
, "UNDEFINED");
1447 fprintf (dump_file
, "VARYING");
1451 fprintf (dump_file
, "\n");
1456 /* The statement produced a constant value. */
1457 val
.lattice_val
= CONSTANT
;
1458 val
.value
= simplified
;
1462 /* The statement produced a nonconstant value. If the statement
1463 had UNDEFINED operands, then the result of the statement
1464 should be UNDEFINED. Otherwise, the statement is VARYING. */
1465 if (likelyvalue
== UNDEFINED
)
1466 val
.lattice_val
= likelyvalue
;
1468 val
.lattice_val
= VARYING
;
1470 val
.value
= NULL_TREE
;
1476 /* Fold the stmt at *GSI with CCP specific information that propagating
1477 and regular folding does not catch. */
1480 ccp_fold_stmt (gimple_stmt_iterator
*gsi
)
1482 gimple stmt
= gsi_stmt (*gsi
);
1484 switch (gimple_code (stmt
))
1489 /* Statement evaluation will handle type mismatches in constants
1490 more gracefully than the final propagation. This allows us to
1491 fold more conditionals here. */
1492 val
= evaluate_stmt (stmt
);
1493 if (val
.lattice_val
!= CONSTANT
1494 || TREE_CODE (val
.value
) != INTEGER_CST
)
1497 if (integer_zerop (val
.value
))
1498 gimple_cond_make_false (stmt
);
1500 gimple_cond_make_true (stmt
);
1507 tree lhs
= gimple_call_lhs (stmt
);
1510 bool changed
= false;
1513 /* If the call was folded into a constant make sure it goes
1514 away even if we cannot propagate into all uses because of
1517 && TREE_CODE (lhs
) == SSA_NAME
1518 && (val
= get_value (lhs
))
1519 && val
->lattice_val
== CONSTANT
)
1521 tree new_rhs
= unshare_expr (val
->value
);
1523 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
1524 TREE_TYPE (new_rhs
)))
1525 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1526 res
= update_call_from_tree (gsi
, new_rhs
);
1531 /* Propagate into the call arguments. Compared to replace_uses_in
1532 this can use the argument slot types for type verification
1533 instead of the current argument type. We also can safely
1534 drop qualifiers here as we are dealing with constants anyway. */
1535 argt
= TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt
))));
1536 for (i
= 0; i
< gimple_call_num_args (stmt
) && argt
;
1537 ++i
, argt
= TREE_CHAIN (argt
))
1539 tree arg
= gimple_call_arg (stmt
, i
);
1540 if (TREE_CODE (arg
) == SSA_NAME
1541 && (val
= get_value (arg
))
1542 && val
->lattice_val
== CONSTANT
1543 && useless_type_conversion_p
1544 (TYPE_MAIN_VARIANT (TREE_VALUE (argt
)),
1545 TYPE_MAIN_VARIANT (TREE_TYPE (val
->value
))))
1547 gimple_call_set_arg (stmt
, i
, unshare_expr (val
->value
));
1557 tree lhs
= gimple_assign_lhs (stmt
);
1560 /* If we have a load that turned out to be constant replace it
1561 as we cannot propagate into all uses in all cases. */
1562 if (gimple_assign_single_p (stmt
)
1563 && TREE_CODE (lhs
) == SSA_NAME
1564 && (val
= get_value (lhs
))
1565 && val
->lattice_val
== CONSTANT
)
1567 tree rhs
= unshare_expr (val
->value
);
1568 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
1569 rhs
= fold_convert (TREE_TYPE (lhs
), rhs
);
1570 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
1582 /* Visit the assignment statement STMT. Set the value of its LHS to the
1583 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1584 creates virtual definitions, set the value of each new name to that
1585 of the RHS (if we can derive a constant out of the RHS).
1586 Value-returning call statements also perform an assignment, and
1587 are handled here. */
1589 static enum ssa_prop_result
1590 visit_assignment (gimple stmt
, tree
*output_p
)
1593 enum ssa_prop_result retval
;
1595 tree lhs
= gimple_get_lhs (stmt
);
1597 gcc_assert (gimple_code (stmt
) != GIMPLE_CALL
1598 || gimple_call_lhs (stmt
) != NULL_TREE
);
1600 if (gimple_assign_copy_p (stmt
))
1602 tree rhs
= gimple_assign_rhs1 (stmt
);
1604 if (TREE_CODE (rhs
) == SSA_NAME
)
1606 /* For a simple copy operation, we copy the lattice values. */
1607 prop_value_t
*nval
= get_value (rhs
);
1611 val
= evaluate_stmt (stmt
);
1614 /* Evaluate the statement, which could be
1615 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1616 val
= evaluate_stmt (stmt
);
1618 retval
= SSA_PROP_NOT_INTERESTING
;
1620 /* Set the lattice value of the statement's output. */
1621 if (TREE_CODE (lhs
) == SSA_NAME
)
1623 /* If STMT is an assignment to an SSA_NAME, we only have one
1625 if (set_lattice_value (lhs
, val
))
1628 if (val
.lattice_val
== VARYING
)
1629 retval
= SSA_PROP_VARYING
;
1631 retval
= SSA_PROP_INTERESTING
;
1639 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1640 if it can determine which edge will be taken. Otherwise, return
1641 SSA_PROP_VARYING. */
1643 static enum ssa_prop_result
1644 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
1649 block
= gimple_bb (stmt
);
1650 val
= evaluate_stmt (stmt
);
1652 /* Find which edge out of the conditional block will be taken and add it
1653 to the worklist. If no single edge can be determined statically,
1654 return SSA_PROP_VARYING to feed all the outgoing edges to the
1655 propagation engine. */
1656 *taken_edge_p
= val
.value
? find_taken_edge (block
, val
.value
) : 0;
1658 return SSA_PROP_INTERESTING
;
1660 return SSA_PROP_VARYING
;
1664 /* Evaluate statement STMT. If the statement produces an output value and
1665 its evaluation changes the lattice value of its output, return
1666 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1669 If STMT is a conditional branch and we can determine its truth
1670 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1671 value, return SSA_PROP_VARYING. */
1673 static enum ssa_prop_result
1674 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
1679 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1681 fprintf (dump_file
, "\nVisiting statement:\n");
1682 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1685 switch (gimple_code (stmt
))
1688 /* If the statement is an assignment that produces a single
1689 output value, evaluate its RHS to see if the lattice value of
1690 its output has changed. */
1691 return visit_assignment (stmt
, output_p
);
1694 /* A value-returning call also performs an assignment. */
1695 if (gimple_call_lhs (stmt
) != NULL_TREE
)
1696 return visit_assignment (stmt
, output_p
);
1701 /* If STMT is a conditional branch, see if we can determine
1702 which branch will be taken. */
1703 /* FIXME. It appears that we should be able to optimize
1704 computed GOTOs here as well. */
1705 return visit_cond_stmt (stmt
, taken_edge_p
);
1711 /* Any other kind of statement is not interesting for constant
1712 propagation and, therefore, not worth simulating. */
1713 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1714 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1716 /* Definitions made by statements other than assignments to
1717 SSA_NAMEs represent unknown modifications to their outputs.
1718 Mark them VARYING. */
1719 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1721 prop_value_t v
= { VARYING
, NULL_TREE
};
1722 set_lattice_value (def
, v
);
1725 return SSA_PROP_VARYING
;
1729 /* Main entry point for SSA Conditional Constant Propagation. */
1735 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1736 if (ccp_finalize ())
1737 return (TODO_cleanup_cfg
| TODO_update_ssa
| TODO_remove_unused_locals
);
1746 return flag_tree_ccp
!= 0;
1750 struct gimple_opt_pass pass_ccp
=
1755 gate_ccp
, /* gate */
1756 do_ssa_ccp
, /* execute */
1759 0, /* static_pass_number */
1760 TV_TREE_CCP
, /* tv_id */
1761 PROP_cfg
| PROP_ssa
, /* properties_required */
1762 0, /* properties_provided */
1763 0, /* properties_destroyed */
1764 0, /* todo_flags_start */
1765 TODO_dump_func
| TODO_verify_ssa
1766 | TODO_verify_stmts
| TODO_ggc_collect
/* todo_flags_finish */
1771 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
1772 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1773 is the desired result type.
1775 LOC is the location of the original expression. */
1778 maybe_fold_offset_to_array_ref (location_t loc
, tree base
, tree offset
,
1780 bool allow_negative_idx
)
1782 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
1783 tree array_type
, elt_type
, elt_size
;
1786 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1787 measured in units of the size of elements type) from that ARRAY_REF).
1788 We can't do anything if either is variable.
1790 The case we handle here is *(&A[N]+O). */
1791 if (TREE_CODE (base
) == ARRAY_REF
)
1793 tree low_bound
= array_ref_low_bound (base
);
1795 elt_offset
= TREE_OPERAND (base
, 1);
1796 if (TREE_CODE (low_bound
) != INTEGER_CST
1797 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1800 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1801 base
= TREE_OPERAND (base
, 0);
1804 /* Ignore stupid user tricks of indexing non-array variables. */
1805 array_type
= TREE_TYPE (base
);
1806 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1808 elt_type
= TREE_TYPE (array_type
);
1809 if (!useless_type_conversion_p (orig_type
, elt_type
))
1812 /* Use signed size type for intermediate computation on the index. */
1813 idx_type
= signed_type_for (size_type_node
);
1815 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1816 element type (so we can use the alignment if it's not constant).
1817 Otherwise, compute the offset as an index by using a division. If the
1818 division isn't exact, then don't do anything. */
1819 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1822 if (integer_zerop (offset
))
1824 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1825 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1827 idx
= build_int_cst (idx_type
, 0);
1831 unsigned HOST_WIDE_INT lquo
, lrem
;
1832 HOST_WIDE_INT hquo
, hrem
;
1835 /* The final array offset should be signed, so we need
1836 to sign-extend the (possibly pointer) offset here
1837 and use signed division. */
1838 soffset
= double_int_sext (tree_to_double_int (offset
),
1839 TYPE_PRECISION (TREE_TYPE (offset
)));
1840 if (TREE_CODE (elt_size
) != INTEGER_CST
1841 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
1842 soffset
.low
, soffset
.high
,
1843 TREE_INT_CST_LOW (elt_size
),
1844 TREE_INT_CST_HIGH (elt_size
),
1845 &lquo
, &hquo
, &lrem
, &hrem
)
1849 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
1852 /* Assume the low bound is zero. If there is a domain type, get the
1853 low bound, if any, convert the index into that type, and add the
1855 min_idx
= build_int_cst (idx_type
, 0);
1856 domain_type
= TYPE_DOMAIN (array_type
);
1859 idx_type
= domain_type
;
1860 if (TYPE_MIN_VALUE (idx_type
))
1861 min_idx
= TYPE_MIN_VALUE (idx_type
);
1863 min_idx
= fold_convert (idx_type
, min_idx
);
1865 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1868 elt_offset
= fold_convert (idx_type
, elt_offset
);
1871 if (!integer_zerop (min_idx
))
1872 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1873 if (!integer_zerop (elt_offset
))
1874 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1876 /* Make sure to possibly truncate late after offsetting. */
1877 idx
= fold_convert (idx_type
, idx
);
1879 /* We don't want to construct access past array bounds. For example
1882 should not be simplified into (*c)[14] or tree-vrp will
1883 give false warnings. The same is true for
1884 struct A { long x; char d[0]; } *a;
1886 which should be not folded to &a->d[-8]. */
1888 && TYPE_MAX_VALUE (domain_type
)
1889 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
)
1891 tree up_bound
= TYPE_MAX_VALUE (domain_type
);
1893 if (tree_int_cst_lt (up_bound
, idx
)
1894 /* Accesses after the end of arrays of size 0 (gcc
1895 extension) and 1 are likely intentional ("struct
1897 && compare_tree_int (up_bound
, 1) > 0)
1901 && TYPE_MIN_VALUE (domain_type
))
1903 if (!allow_negative_idx
1904 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
1905 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
1908 else if (!allow_negative_idx
1909 && compare_tree_int (idx
, 0) < 0)
1913 tree t
= build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
1914 SET_EXPR_LOCATION (t
, loc
);
1920 /* Attempt to fold *(S+O) to S.X.
1921 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1922 is the desired result type.
1924 LOC is the location of the original expression. */
1927 maybe_fold_offset_to_component_ref (location_t loc
, tree record_type
,
1928 tree base
, tree offset
, tree orig_type
)
1930 tree f
, t
, field_type
, tail_array_field
, field_offset
;
1934 if (TREE_CODE (record_type
) != RECORD_TYPE
1935 && TREE_CODE (record_type
) != UNION_TYPE
1936 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1939 /* Short-circuit silly cases. */
1940 if (useless_type_conversion_p (record_type
, orig_type
))
1943 tail_array_field
= NULL_TREE
;
1944 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1948 if (TREE_CODE (f
) != FIELD_DECL
)
1950 if (DECL_BIT_FIELD (f
))
1953 if (!DECL_FIELD_OFFSET (f
))
1955 field_offset
= byte_position (f
);
1956 if (TREE_CODE (field_offset
) != INTEGER_CST
)
1959 /* ??? Java creates "interesting" fields for representing base classes.
1960 They have no name, and have no context. With no context, we get into
1961 trouble with nonoverlapping_component_refs_p. Skip them. */
1962 if (!DECL_FIELD_CONTEXT (f
))
1965 /* The previous array field isn't at the end. */
1966 tail_array_field
= NULL_TREE
;
1968 /* Check to see if this offset overlaps with the field. */
1969 cmp
= tree_int_cst_compare (field_offset
, offset
);
1973 field_type
= TREE_TYPE (f
);
1975 /* Here we exactly match the offset being checked. If the types match,
1976 then we can return that field. */
1978 && useless_type_conversion_p (orig_type
, field_type
))
1980 t
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1984 /* Don't care about offsets into the middle of scalars. */
1985 if (!AGGREGATE_TYPE_P (field_type
))
1988 /* Check for array at the end of the struct. This is often
1989 used as for flexible array members. We should be able to
1990 turn this into an array access anyway. */
1991 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1992 tail_array_field
= f
;
1994 /* Check the end of the field against the offset. */
1995 if (!DECL_SIZE_UNIT (f
)
1996 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1998 t
= int_const_binop (MINUS_EXPR
, offset
, field_offset
, 1);
1999 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
2002 /* If we matched, then set offset to the displacement into
2004 new_base
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
2005 SET_EXPR_LOCATION (new_base
, loc
);
2007 /* Recurse to possibly find the match. */
2008 ret
= maybe_fold_offset_to_array_ref (loc
, new_base
, t
, orig_type
,
2009 f
== TYPE_FIELDS (record_type
));
2012 ret
= maybe_fold_offset_to_component_ref (loc
, field_type
, new_base
, t
,
2018 if (!tail_array_field
)
2021 f
= tail_array_field
;
2022 field_type
= TREE_TYPE (f
);
2023 offset
= int_const_binop (MINUS_EXPR
, offset
, byte_position (f
), 1);
2025 /* If we get here, we've got an aggregate field, and a possibly
2026 nonzero offset into them. Recurse and hope for a valid match. */
2027 base
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
2028 SET_EXPR_LOCATION (base
, loc
);
2030 t
= maybe_fold_offset_to_array_ref (loc
, base
, offset
, orig_type
,
2031 f
== TYPE_FIELDS (record_type
));
2034 return maybe_fold_offset_to_component_ref (loc
, field_type
, base
, offset
,
2038 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
2039 or BASE[index] or by combination of those.
2041 LOC is the location of original expression.
2043 Before attempting the conversion strip off existing ADDR_EXPRs and
2044 handled component refs. */
2047 maybe_fold_offset_to_reference (location_t loc
, tree base
, tree offset
,
2054 if (TREE_CODE (base
) != ADDR_EXPR
)
2057 base
= TREE_OPERAND (base
, 0);
2059 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
2060 so it needs to be removed and new COMPONENT_REF constructed.
2061 The wrong COMPONENT_REF are often constructed by folding the
2062 (type *)&object within the expression (type *)&object+offset */
2063 if (handled_component_p (base
))
2065 HOST_WIDE_INT sub_offset
, size
, maxsize
;
2067 newbase
= get_ref_base_and_extent (base
, &sub_offset
,
2069 gcc_assert (newbase
);
2072 && !(sub_offset
& (BITS_PER_UNIT
- 1)))
2076 offset
= int_const_binop (PLUS_EXPR
, offset
,
2077 build_int_cst (TREE_TYPE (offset
),
2078 sub_offset
/ BITS_PER_UNIT
), 1);
2081 if (useless_type_conversion_p (orig_type
, TREE_TYPE (base
))
2082 && integer_zerop (offset
))
2084 type
= TREE_TYPE (base
);
2086 ret
= maybe_fold_offset_to_component_ref (loc
, type
, base
, offset
, orig_type
);
2088 ret
= maybe_fold_offset_to_array_ref (loc
, base
, offset
, orig_type
, true);
2093 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
2094 or &BASE[index] or by combination of those.
2096 LOC is the location of the original expression.
2098 Before attempting the conversion strip off existing component refs. */
2101 maybe_fold_offset_to_address (location_t loc
, tree addr
, tree offset
,
2106 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr
))
2107 && POINTER_TYPE_P (orig_type
));
2109 t
= maybe_fold_offset_to_reference (loc
, addr
, offset
,
2110 TREE_TYPE (orig_type
));
2116 /* For __builtin_object_size to function correctly we need to
2117 make sure not to fold address arithmetic so that we change
2118 reference from one array to another. This would happen for
2121 struct X { char s1[10]; char s2[10] } s;
2122 char *foo (void) { return &s.s2[-4]; }
2124 where we need to avoid generating &s.s1[6]. As the C and
2125 C++ frontends create different initial trees
2126 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
2127 sophisticated comparisons here. Note that checking for the
2128 condition after the fact is easier than trying to avoid doing
2131 if (TREE_CODE (orig
) == ADDR_EXPR
)
2132 orig
= TREE_OPERAND (orig
, 0);
2133 if ((TREE_CODE (orig
) == ARRAY_REF
2134 || (TREE_CODE (orig
) == COMPONENT_REF
2135 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig
, 1))) == ARRAY_TYPE
))
2136 && (TREE_CODE (t
) == ARRAY_REF
2137 || TREE_CODE (t
) == COMPONENT_REF
)
2138 && !operand_equal_p (TREE_CODE (orig
) == ARRAY_REF
2139 ? TREE_OPERAND (orig
, 0) : orig
,
2140 TREE_CODE (t
) == ARRAY_REF
2141 ? TREE_OPERAND (t
, 0) : t
, 0))
2144 ptr_type
= build_pointer_type (TREE_TYPE (t
));
2145 if (!useless_type_conversion_p (orig_type
, ptr_type
))
2147 return build_fold_addr_expr_with_type_loc (loc
, t
, ptr_type
);
2153 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
2154 Return the simplified expression, or NULL if nothing could be done. */
2157 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
2160 bool volatile_p
= TREE_THIS_VOLATILE (expr
);
2161 location_t loc
= EXPR_LOCATION (expr
);
2163 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2164 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2165 are sometimes added. */
2167 STRIP_TYPE_NOPS (base
);
2168 TREE_OPERAND (expr
, 0) = base
;
2170 /* One possibility is that the address reduces to a string constant. */
2171 t
= fold_read_from_constant_string (expr
);
2175 /* Add in any offset from a POINTER_PLUS_EXPR. */
2176 if (TREE_CODE (base
) == POINTER_PLUS_EXPR
)
2180 offset2
= TREE_OPERAND (base
, 1);
2181 if (TREE_CODE (offset2
) != INTEGER_CST
)
2183 base
= TREE_OPERAND (base
, 0);
2185 offset
= fold_convert (sizetype
,
2186 int_const_binop (PLUS_EXPR
, offset
, offset2
, 1));
2189 if (TREE_CODE (base
) == ADDR_EXPR
)
2191 tree base_addr
= base
;
2193 /* Strip the ADDR_EXPR. */
2194 base
= TREE_OPERAND (base
, 0);
2196 /* Fold away CONST_DECL to its value, if the type is scalar. */
2197 if (TREE_CODE (base
) == CONST_DECL
2198 && is_gimple_min_invariant (DECL_INITIAL (base
)))
2199 return DECL_INITIAL (base
);
2201 /* If there is no offset involved simply return the folded base. */
2202 if (integer_zerop (offset
))
2205 /* Try folding *(&B+O) to B.X. */
2206 t
= maybe_fold_offset_to_reference (loc
, base_addr
, offset
,
2210 /* Preserve volatileness of the original expression.
2211 We can end up with a plain decl here which is shared
2212 and we shouldn't mess with its flags. */
2214 TREE_THIS_VOLATILE (t
) = volatile_p
;
2220 /* We can get here for out-of-range string constant accesses,
2221 such as "_"[3]. Bail out of the entire substitution search
2222 and arrange for the entire statement to be replaced by a
2223 call to __builtin_trap. In all likelihood this will all be
2224 constant-folded away, but in the meantime we can't leave with
2225 something that get_expr_operands can't understand. */
2229 if (TREE_CODE (t
) == ADDR_EXPR
2230 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
2232 /* FIXME: Except that this causes problems elsewhere with dead
2233 code not being deleted, and we die in the rtl expanders
2234 because we failed to remove some ssa_name. In the meantime,
2235 just return zero. */
2236 /* FIXME2: This condition should be signaled by
2237 fold_read_from_constant_string directly, rather than
2238 re-checking for it here. */
2239 return integer_zero_node
;
2242 /* Try folding *(B+O) to B->X. Still an improvement. */
2243 if (POINTER_TYPE_P (TREE_TYPE (base
)))
2245 t
= maybe_fold_offset_to_reference (loc
, base
, offset
,
2252 /* Otherwise we had an offset that we could not simplify. */
2257 /* A quaint feature extant in our address arithmetic is that there
2258 can be hidden type changes here. The type of the result need
2259 not be the same as the type of the input pointer.
2261 What we're after here is an expression of the form
2262 (T *)(&array + const)
2263 where array is OP0, const is OP1, RES_TYPE is T and
2264 the cast doesn't actually exist, but is implicit in the
2265 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2267 which may be able to propagate further. */
2270 maybe_fold_stmt_addition (location_t loc
, tree res_type
, tree op0
, tree op1
)
2275 /* The first operand should be an ADDR_EXPR. */
2276 if (TREE_CODE (op0
) != ADDR_EXPR
)
2278 op0
= TREE_OPERAND (op0
, 0);
2280 /* It had better be a constant. */
2281 if (TREE_CODE (op1
) != INTEGER_CST
)
2283 /* Or op0 should now be A[0] and the non-constant offset defined
2284 via a multiplication by the array element size. */
2285 if (TREE_CODE (op0
) == ARRAY_REF
2286 && integer_zerop (TREE_OPERAND (op0
, 1))
2287 && TREE_CODE (op1
) == SSA_NAME
2288 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0
)), 1))
2290 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
2291 if (!is_gimple_assign (offset_def
))
2294 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
2295 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
2296 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
),
2297 TYPE_SIZE_UNIT (TREE_TYPE (op0
))))
2298 return build_fold_addr_expr
2299 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
2300 TREE_OPERAND (op0
, 0),
2301 gimple_assign_rhs1 (offset_def
),
2302 TREE_OPERAND (op0
, 2),
2303 TREE_OPERAND (op0
, 3)));
2304 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0
)))
2305 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
2306 return build_fold_addr_expr
2307 (build4 (ARRAY_REF
, TREE_TYPE (op0
),
2308 TREE_OPERAND (op0
, 0),
2310 TREE_OPERAND (op0
, 2),
2311 TREE_OPERAND (op0
, 3)));
2316 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2317 the offset into it. */
2318 while (TREE_CODE (op0
) == ARRAY_REF
)
2320 tree array_obj
= TREE_OPERAND (op0
, 0);
2321 tree array_idx
= TREE_OPERAND (op0
, 1);
2322 tree elt_type
= TREE_TYPE (op0
);
2323 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
2326 if (TREE_CODE (array_idx
) != INTEGER_CST
)
2328 if (TREE_CODE (elt_size
) != INTEGER_CST
)
2331 /* Un-bias the index by the min index of the array type. */
2332 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
2335 min_idx
= TYPE_MIN_VALUE (min_idx
);
2338 if (TREE_CODE (min_idx
) != INTEGER_CST
)
2341 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
2342 if (!integer_zerop (min_idx
))
2343 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
2348 /* Convert the index to a byte offset. */
2349 array_idx
= fold_convert (sizetype
, array_idx
);
2350 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
2352 /* Update the operands for the next round, or for folding. */
2353 op1
= int_const_binop (PLUS_EXPR
,
2358 ptd_type
= TREE_TYPE (res_type
);
2359 /* If we want a pointer to void, reconstruct the reference from the
2360 array element type. A pointer to that can be trivially converted
2361 to void *. This happens as we fold (void *)(ptr p+ off). */
2362 if (VOID_TYPE_P (ptd_type
)
2363 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
2364 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
2366 /* At which point we can try some of the same things as for indirects. */
2367 t
= maybe_fold_offset_to_array_ref (loc
, op0
, op1
, ptd_type
, true);
2369 t
= maybe_fold_offset_to_component_ref (loc
, TREE_TYPE (op0
), op0
, op1
,
2373 t
= build1 (ADDR_EXPR
, res_type
, t
);
2374 SET_EXPR_LOCATION (t
, loc
);
2380 /* Subroutine of fold_stmt. We perform several simplifications of the
2381 memory reference tree EXPR and make sure to re-gimplify them properly
2382 after propagation of constant addresses. IS_LHS is true if the
2383 reference is supposed to be an lvalue. */
2386 maybe_fold_reference (tree expr
, bool is_lhs
)
2390 if (TREE_CODE (expr
) == ARRAY_REF
2393 tree tem
= fold_read_from_constant_string (expr
);
2398 /* ??? We might want to open-code the relevant remaining cases
2399 to avoid using the generic fold. */
2400 if (handled_component_p (*t
)
2401 && CONSTANT_CLASS_P (TREE_OPERAND (*t
, 0)))
2403 tree tem
= fold (*t
);
2408 while (handled_component_p (*t
))
2409 t
= &TREE_OPERAND (*t
, 0);
2411 if (TREE_CODE (*t
) == INDIRECT_REF
)
2413 tree tem
= maybe_fold_stmt_indirect (*t
, TREE_OPERAND (*t
, 0),
2415 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2416 if (is_lhs
&& tem
&& CONSTANT_CLASS_P (tem
))
2419 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
)
2420 /* If we had a good reason for propagating the address here,
2421 make sure we end up with valid gimple. See PR34989. */
2422 tem
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2427 tem
= maybe_fold_reference (expr
, is_lhs
);
2436 tree tem
= get_symbol_constant_value (*t
);
2438 && useless_type_conversion_p (TREE_TYPE (*t
), TREE_TYPE (tem
)))
2440 *t
= unshare_expr (tem
);
2441 tem
= maybe_fold_reference (expr
, is_lhs
);
2452 /* Return the string length, maximum string length or maximum value of
2454 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2455 is not NULL and, for TYPE == 0, its value is not equal to the length
2456 we determine or if we are unable to determine the length or value,
2457 return false. VISITED is a bitmap of visited variables.
2458 TYPE is 0 if string length should be returned, 1 for maximum string
2459 length and 2 for maximum value ARG can have. */
2462 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
2467 if (TREE_CODE (arg
) != SSA_NAME
)
2469 if (TREE_CODE (arg
) == COND_EXPR
)
2470 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
2471 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
2472 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2473 else if (TREE_CODE (arg
) == ADDR_EXPR
2474 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
2475 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
2477 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
2478 if (TREE_CODE (aop0
) == INDIRECT_REF
2479 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
2480 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
2481 length
, visited
, type
);
2487 if (TREE_CODE (val
) != INTEGER_CST
2488 || tree_int_cst_sgn (val
) < 0)
2492 val
= c_strlen (arg
, 1);
2500 if (TREE_CODE (*length
) != INTEGER_CST
2501 || TREE_CODE (val
) != INTEGER_CST
)
2504 if (tree_int_cst_lt (*length
, val
))
2508 else if (simple_cst_equal (val
, *length
) != 1)
2516 /* If we were already here, break the infinite cycle. */
2517 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
2519 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
2522 def_stmt
= SSA_NAME_DEF_STMT (var
);
2524 switch (gimple_code (def_stmt
))
2527 /* The RHS of the statement defining VAR must either have a
2528 constant length or come from another SSA_NAME with a constant
2530 if (gimple_assign_single_p (def_stmt
)
2531 || gimple_assign_unary_nop_p (def_stmt
))
2533 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2534 return get_maxval_strlen (rhs
, length
, visited
, type
);
2540 /* All the arguments of the PHI node must have the same constant
2544 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
2546 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
2548 /* If this PHI has itself as an argument, we cannot
2549 determine the string length of this argument. However,
2550 if we can find a constant string length for the other
2551 PHI args then we can still be sure that this is a
2552 constant string length. So be optimistic and just
2553 continue with the next argument. */
2554 if (arg
== gimple_phi_result (def_stmt
))
2557 if (!get_maxval_strlen (arg
, length
, visited
, type
))
2569 /* Fold builtin call in statement STMT. Returns a simplified tree.
2570 We may return a non-constant expression, including another call
2571 to a different function and with different arguments, e.g.,
2572 substituting memcpy for strcpy when the string length is known.
2573 Note that some builtins expand into inline code that may not
2574 be valid in GIMPLE. Callers must take care. */
2577 ccp_fold_builtin (gimple stmt
)
2579 tree result
, val
[3];
2585 location_t loc
= gimple_location (stmt
);
2587 gcc_assert (is_gimple_call (stmt
));
2589 ignore
= (gimple_call_lhs (stmt
) == NULL
);
2591 /* First try the generic builtin folder. If that succeeds, return the
2593 result
= fold_call_stmt (stmt
, ignore
);
2597 STRIP_NOPS (result
);
2601 /* Ignore MD builtins. */
2602 callee
= gimple_call_fndecl (stmt
);
2603 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
2606 /* If the builtin could not be folded, and it has no argument list,
2608 nargs
= gimple_call_num_args (stmt
);
2612 /* Limit the work only for builtins we know how to simplify. */
2613 switch (DECL_FUNCTION_CODE (callee
))
2615 case BUILT_IN_STRLEN
:
2616 case BUILT_IN_FPUTS
:
2617 case BUILT_IN_FPUTS_UNLOCKED
:
2621 case BUILT_IN_STRCPY
:
2622 case BUILT_IN_STRNCPY
:
2626 case BUILT_IN_MEMCPY_CHK
:
2627 case BUILT_IN_MEMPCPY_CHK
:
2628 case BUILT_IN_MEMMOVE_CHK
:
2629 case BUILT_IN_MEMSET_CHK
:
2630 case BUILT_IN_STRNCPY_CHK
:
2634 case BUILT_IN_STRCPY_CHK
:
2635 case BUILT_IN_STPCPY_CHK
:
2639 case BUILT_IN_SNPRINTF_CHK
:
2640 case BUILT_IN_VSNPRINTF_CHK
:
2648 if (arg_idx
>= nargs
)
2651 /* Try to use the dataflow information gathered by the CCP process. */
2652 visited
= BITMAP_ALLOC (NULL
);
2653 bitmap_clear (visited
);
2655 memset (val
, 0, sizeof (val
));
2656 a
= gimple_call_arg (stmt
, arg_idx
);
2657 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
2658 val
[arg_idx
] = NULL_TREE
;
2660 BITMAP_FREE (visited
);
2663 switch (DECL_FUNCTION_CODE (callee
))
2665 case BUILT_IN_STRLEN
:
2666 if (val
[0] && nargs
== 1)
2669 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
2671 /* If the result is not a valid gimple value, or not a cast
2672 of a valid gimple value, then we can not use the result. */
2673 if (is_gimple_val (new_val
)
2674 || (is_gimple_cast (new_val
)
2675 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
2680 case BUILT_IN_STRCPY
:
2681 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
2682 result
= fold_builtin_strcpy (loc
, callee
,
2683 gimple_call_arg (stmt
, 0),
2684 gimple_call_arg (stmt
, 1),
2688 case BUILT_IN_STRNCPY
:
2689 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2690 result
= fold_builtin_strncpy (loc
, callee
,
2691 gimple_call_arg (stmt
, 0),
2692 gimple_call_arg (stmt
, 1),
2693 gimple_call_arg (stmt
, 2),
2697 case BUILT_IN_FPUTS
:
2699 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
2700 gimple_call_arg (stmt
, 1),
2701 ignore
, false, val
[0]);
2704 case BUILT_IN_FPUTS_UNLOCKED
:
2706 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
2707 gimple_call_arg (stmt
, 1),
2708 ignore
, true, val
[0]);
2711 case BUILT_IN_MEMCPY_CHK
:
2712 case BUILT_IN_MEMPCPY_CHK
:
2713 case BUILT_IN_MEMMOVE_CHK
:
2714 case BUILT_IN_MEMSET_CHK
:
2715 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2716 result
= fold_builtin_memory_chk (loc
, callee
,
2717 gimple_call_arg (stmt
, 0),
2718 gimple_call_arg (stmt
, 1),
2719 gimple_call_arg (stmt
, 2),
2720 gimple_call_arg (stmt
, 3),
2722 DECL_FUNCTION_CODE (callee
));
2725 case BUILT_IN_STRCPY_CHK
:
2726 case BUILT_IN_STPCPY_CHK
:
2727 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2728 result
= fold_builtin_stxcpy_chk (loc
, callee
,
2729 gimple_call_arg (stmt
, 0),
2730 gimple_call_arg (stmt
, 1),
2731 gimple_call_arg (stmt
, 2),
2733 DECL_FUNCTION_CODE (callee
));
2736 case BUILT_IN_STRNCPY_CHK
:
2737 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2738 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
2739 gimple_call_arg (stmt
, 1),
2740 gimple_call_arg (stmt
, 2),
2741 gimple_call_arg (stmt
, 3),
2745 case BUILT_IN_SNPRINTF_CHK
:
2746 case BUILT_IN_VSNPRINTF_CHK
:
2747 if (val
[1] && is_gimple_val (val
[1]))
2748 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
2749 DECL_FUNCTION_CODE (callee
));
2756 if (result
&& ignore
)
2757 result
= fold_ignored_result (result
);
2761 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2762 replacement rhs for the statement or NULL_TREE if no simplification
2763 could be made. It is assumed that the operands have been previously
2767 fold_gimple_assign (gimple_stmt_iterator
*si
)
2769 gimple stmt
= gsi_stmt (*si
);
2770 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2771 location_t loc
= gimple_location (stmt
);
2773 tree result
= NULL_TREE
;
2775 switch (get_gimple_rhs_class (subcode
))
2777 case GIMPLE_SINGLE_RHS
:
2779 tree rhs
= gimple_assign_rhs1 (stmt
);
2781 /* Try to fold a conditional expression. */
2782 if (TREE_CODE (rhs
) == COND_EXPR
)
2784 tree op0
= COND_EXPR_COND (rhs
);
2787 location_t cond_loc
= EXPR_LOCATION (rhs
);
2789 if (COMPARISON_CLASS_P (op0
))
2791 fold_defer_overflow_warnings ();
2792 tem
= fold_binary_loc (cond_loc
,
2793 TREE_CODE (op0
), TREE_TYPE (op0
),
2794 TREE_OPERAND (op0
, 0),
2795 TREE_OPERAND (op0
, 1));
2796 /* This is actually a conditional expression, not a GIMPLE
2797 conditional statement, however, the valid_gimple_rhs_p
2798 test still applies. */
2799 set
= (tem
&& is_gimple_condexpr (tem
)
2800 && valid_gimple_rhs_p (tem
));
2801 fold_undefer_overflow_warnings (set
, stmt
, 0);
2803 else if (is_gimple_min_invariant (op0
))
2812 result
= fold_build3_loc (cond_loc
, COND_EXPR
, TREE_TYPE (rhs
), tem
,
2813 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
2816 else if (TREE_CODE (rhs
) == TARGET_MEM_REF
)
2817 return maybe_fold_tmr (rhs
);
2819 else if (REFERENCE_CLASS_P (rhs
))
2820 return maybe_fold_reference (rhs
, false);
2822 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
2824 tree tem
= maybe_fold_reference (TREE_OPERAND (rhs
, 0), true);
2826 result
= fold_convert (TREE_TYPE (rhs
),
2827 build_fold_addr_expr_loc (loc
, tem
));
2830 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2831 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2832 && (CONSTRUCTOR_NELTS (rhs
)
2833 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2835 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
2839 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2840 if (TREE_CODE (val
) != INTEGER_CST
2841 && TREE_CODE (val
) != REAL_CST
2842 && TREE_CODE (val
) != FIXED_CST
)
2845 return build_vector_from_ctor (TREE_TYPE (rhs
),
2846 CONSTRUCTOR_ELTS (rhs
));
2849 else if (DECL_P (rhs
))
2850 return unshare_expr (get_symbol_constant_value (rhs
));
2852 /* If we couldn't fold the RHS, hand over to the generic
2854 if (result
== NULL_TREE
)
2855 result
= fold (rhs
);
2857 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2858 that may have been added by fold, and "useless" type
2859 conversions that might now be apparent due to propagation. */
2860 STRIP_USELESS_TYPE_CONVERSION (result
);
2862 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
2869 case GIMPLE_UNARY_RHS
:
2871 tree rhs
= gimple_assign_rhs1 (stmt
);
2873 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
2876 /* If the operation was a conversion do _not_ mark a
2877 resulting constant with TREE_OVERFLOW if the original
2878 constant was not. These conversions have implementation
2879 defined behavior and retaining the TREE_OVERFLOW flag
2880 here would confuse later passes such as VRP. */
2881 if (CONVERT_EXPR_CODE_P (subcode
)
2882 && TREE_CODE (result
) == INTEGER_CST
2883 && TREE_CODE (rhs
) == INTEGER_CST
)
2884 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
2886 STRIP_USELESS_TYPE_CONVERSION (result
);
2887 if (valid_gimple_rhs_p (result
))
2890 else if (CONVERT_EXPR_CODE_P (subcode
)
2891 && POINTER_TYPE_P (gimple_expr_type (stmt
))
2892 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
2894 tree type
= gimple_expr_type (stmt
);
2895 tree t
= maybe_fold_offset_to_address (loc
,
2896 gimple_assign_rhs1 (stmt
),
2897 integer_zero_node
, type
);
2904 case GIMPLE_BINARY_RHS
:
2905 /* Try to fold pointer addition. */
2906 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2908 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2909 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
2911 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
2912 if (!useless_type_conversion_p
2913 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
2914 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2916 result
= maybe_fold_stmt_addition (gimple_location (stmt
),
2918 gimple_assign_rhs1 (stmt
),
2919 gimple_assign_rhs2 (stmt
));
2923 result
= fold_binary_loc (loc
, subcode
,
2924 TREE_TYPE (gimple_assign_lhs (stmt
)),
2925 gimple_assign_rhs1 (stmt
),
2926 gimple_assign_rhs2 (stmt
));
2930 STRIP_USELESS_TYPE_CONVERSION (result
);
2931 if (valid_gimple_rhs_p (result
))
2934 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2935 we lose canonicalization opportunities. Do not go again
2936 through fold here though, or the same non-GIMPLE will be
2938 if (commutative_tree_code (subcode
)
2939 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
2940 gimple_assign_rhs2 (stmt
), false))
2941 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
2942 gimple_assign_rhs2 (stmt
),
2943 gimple_assign_rhs1 (stmt
));
2947 case GIMPLE_INVALID_RHS
:
2954 /* Attempt to fold a conditional statement. Return true if any changes were
2955 made. We only attempt to fold the condition expression, and do not perform
2956 any transformation that would require alteration of the cfg. It is
2957 assumed that the operands have been previously folded. */
2960 fold_gimple_cond (gimple stmt
)
2962 tree result
= fold_binary_loc (gimple_location (stmt
),
2963 gimple_cond_code (stmt
),
2965 gimple_cond_lhs (stmt
),
2966 gimple_cond_rhs (stmt
));
2970 STRIP_USELESS_TYPE_CONVERSION (result
);
2971 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
2973 gimple_cond_set_condition_from_tree (stmt
, result
);
2981 static void gimplify_and_update_call_from_tree (gimple_stmt_iterator
*, tree
);
2983 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2984 The statement may be replaced by another statement, e.g., if the call
2985 simplifies to a constant value. Return true if any changes were made.
2986 It is assumed that the operands have been previously folded. */
2989 fold_gimple_call (gimple_stmt_iterator
*gsi
)
2991 gimple stmt
= gsi_stmt (*gsi
);
2993 tree callee
= gimple_call_fndecl (stmt
);
2995 /* Check for builtins that CCP can handle using information not
2996 available in the generic fold routines. */
2997 if (callee
&& DECL_BUILT_IN (callee
))
2999 tree result
= ccp_fold_builtin (stmt
);
3003 if (!update_call_from_tree (gsi
, result
))
3004 gimplify_and_update_call_from_tree (gsi
, result
);
3010 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
3011 here are when we've propagated the address of a decl into the
3013 /* ??? Should perhaps do this in fold proper. However, doing it
3014 there requires that we create a new CALL_EXPR, and that requires
3015 copying EH region info to the new node. Easier to just do it
3016 here where we can just smash the call operand. */
3017 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
3018 callee
= gimple_call_fn (stmt
);
3019 if (TREE_CODE (callee
) == OBJ_TYPE_REF
3020 && lang_hooks
.fold_obj_type_ref
3021 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
3022 && DECL_P (TREE_OPERAND
3023 (OBJ_TYPE_REF_OBJECT (callee
), 0)))
3027 /* ??? Caution: Broken ADDR_EXPR semantics means that
3028 looking at the type of the operand of the addr_expr
3029 can yield an array type. See silly exception in
3030 check_pointer_types_r. */
3031 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
3032 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
3035 gimple_call_set_fn (stmt
, t
);
3044 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3045 distinguishes both cases. */
3048 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
3050 bool changed
= false;
3051 gimple stmt
= gsi_stmt (*gsi
);
3054 /* Fold the main computation performed by the statement. */
3055 switch (gimple_code (stmt
))
3059 unsigned old_num_ops
= gimple_num_ops (stmt
);
3060 tree new_rhs
= fold_gimple_assign (gsi
);
3061 tree lhs
= gimple_assign_lhs (stmt
);
3063 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3064 TREE_TYPE (new_rhs
)))
3065 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3068 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3070 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3077 changed
|= fold_gimple_cond (stmt
);
3081 /* Fold *& in call arguments. */
3082 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3083 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3085 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3088 gimple_call_set_arg (stmt
, i
, tmp
);
3092 /* The entire statement may be replaced in this case. */
3094 changed
|= fold_gimple_call (gsi
);
3098 /* Fold *& in asm operands. */
3099 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
3101 tree link
= gimple_asm_output_op (stmt
, i
);
3102 tree op
= TREE_VALUE (link
);
3103 if (REFERENCE_CLASS_P (op
)
3104 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3106 TREE_VALUE (link
) = op
;
3110 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
3112 tree link
= gimple_asm_input_op (stmt
, i
);
3113 tree op
= TREE_VALUE (link
);
3114 if (REFERENCE_CLASS_P (op
)
3115 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
3117 TREE_VALUE (link
) = op
;
3126 stmt
= gsi_stmt (*gsi
);
3128 /* Fold *& on the lhs. */
3129 if (gimple_has_lhs (stmt
))
3131 tree lhs
= gimple_get_lhs (stmt
);
3132 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3134 tree new_lhs
= maybe_fold_reference (lhs
, true);
3137 gimple_set_lhs (stmt
, new_lhs
);
3146 /* Fold the statement pointed to by GSI. In some cases, this function may
3147 replace the whole statement with a new one. Returns true iff folding
3149 The statement pointed to by GSI should be in valid gimple form but may
3150 be in unfolded state as resulting from for example constant propagation
3151 which can produce *&x = 0. */
3154 fold_stmt (gimple_stmt_iterator
*gsi
)
3156 return fold_stmt_1 (gsi
, false);
3159 /* Perform the minimal folding on statement STMT. Only operations like
3160 *&x created by constant propagation are handled. The statement cannot
3161 be replaced with a new one. Return true if the statement was
3162 changed, false otherwise.
3163 The statement STMT should be in valid gimple form but may
3164 be in unfolded state as resulting from for example constant propagation
3165 which can produce *&x = 0. */
3168 fold_stmt_inplace (gimple stmt
)
3170 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3171 bool changed
= fold_stmt_1 (&gsi
, true);
3172 gcc_assert (gsi_stmt (gsi
) == stmt
);
3176 /* Try to optimize out __builtin_stack_restore. Optimize it out
3177 if there is another __builtin_stack_restore in the same basic
3178 block and no calls or ASM_EXPRs are in between, or if this block's
3179 only outgoing edge is to EXIT_BLOCK and there are no calls or
3180 ASM_EXPRs after this __builtin_stack_restore. */
3183 optimize_stack_restore (gimple_stmt_iterator i
)
3188 basic_block bb
= gsi_bb (i
);
3189 gimple call
= gsi_stmt (i
);
3191 if (gimple_code (call
) != GIMPLE_CALL
3192 || gimple_call_num_args (call
) != 1
3193 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
3194 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
3197 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
3199 stmt
= gsi_stmt (i
);
3200 if (gimple_code (stmt
) == GIMPLE_ASM
)
3202 if (gimple_code (stmt
) != GIMPLE_CALL
)
3205 callee
= gimple_call_fndecl (stmt
);
3207 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3208 /* All regular builtins are ok, just obviously not alloca. */
3209 || DECL_FUNCTION_CODE (callee
) == BUILT_IN_ALLOCA
)
3212 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
3213 goto second_stack_restore
;
3219 /* Allow one successor of the exit block, or zero successors. */
3220 switch (EDGE_COUNT (bb
->succs
))
3225 if (single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR
)
3231 second_stack_restore
:
3233 /* If there's exactly one use, then zap the call to __builtin_stack_save.
3234 If there are multiple uses, then the last one should remove the call.
3235 In any case, whether the call to __builtin_stack_save can be removed
3236 or not is irrelevant to removing the call to __builtin_stack_restore. */
3237 if (has_single_use (gimple_call_arg (call
, 0)))
3239 gimple stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
3240 if (is_gimple_call (stack_save
))
3242 callee
= gimple_call_fndecl (stack_save
);
3244 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
3245 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_SAVE
)
3247 gimple_stmt_iterator stack_save_gsi
;
3250 stack_save_gsi
= gsi_for_stmt (stack_save
);
3251 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
3252 update_call_from_tree (&stack_save_gsi
, rhs
);
3257 /* No effect, so the statement will be deleted. */
3258 return integer_zero_node
;
3261 /* If va_list type is a simple pointer and nothing special is needed,
3262 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3263 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3264 pointer assignment. */
3267 optimize_stdarg_builtin (gimple call
)
3269 tree callee
, lhs
, rhs
, cfun_va_list
;
3270 bool va_list_simple_ptr
;
3271 location_t loc
= gimple_location (call
);
3273 if (gimple_code (call
) != GIMPLE_CALL
)
3276 callee
= gimple_call_fndecl (call
);
3278 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
3279 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
3280 && (TREE_TYPE (cfun_va_list
) == void_type_node
3281 || TREE_TYPE (cfun_va_list
) == char_type_node
);
3283 switch (DECL_FUNCTION_CODE (callee
))
3285 case BUILT_IN_VA_START
:
3286 if (!va_list_simple_ptr
3287 || targetm
.expand_builtin_va_start
!= NULL
3288 || built_in_decls
[BUILT_IN_NEXT_ARG
] == NULL
)
3291 if (gimple_call_num_args (call
) != 2)
3294 lhs
= gimple_call_arg (call
, 0);
3295 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3296 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3297 != TYPE_MAIN_VARIANT (cfun_va_list
))
3300 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
3301 rhs
= build_call_expr_loc (loc
, built_in_decls
[BUILT_IN_NEXT_ARG
],
3302 1, integer_zero_node
);
3303 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
3304 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3306 case BUILT_IN_VA_COPY
:
3307 if (!va_list_simple_ptr
)
3310 if (gimple_call_num_args (call
) != 2)
3313 lhs
= gimple_call_arg (call
, 0);
3314 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3315 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3316 != TYPE_MAIN_VARIANT (cfun_va_list
))
3319 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
3320 rhs
= gimple_call_arg (call
, 1);
3321 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
3322 != TYPE_MAIN_VARIANT (cfun_va_list
))
3325 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
3326 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3328 case BUILT_IN_VA_END
:
3329 /* No effect, so the statement will be deleted. */
3330 return integer_zero_node
;
3337 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3338 RHS of an assignment. Insert the necessary statements before
3339 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3340 is replaced. If the call is expected to produces a result, then it
3341 is replaced by an assignment of the new RHS to the result variable.
3342 If the result is to be ignored, then the call is replaced by a
3346 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
3349 tree tmp
= NULL_TREE
; /* Silence warning. */
3350 gimple stmt
, new_stmt
;
3351 gimple_stmt_iterator i
;
3352 gimple_seq stmts
= gimple_seq_alloc();
3353 struct gimplify_ctx gctx
;
3355 stmt
= gsi_stmt (*si_p
);
3357 gcc_assert (is_gimple_call (stmt
));
3359 lhs
= gimple_call_lhs (stmt
);
3361 push_gimplify_context (&gctx
);
3363 if (lhs
== NULL_TREE
)
3364 gimplify_and_add (expr
, &stmts
);
3366 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
3368 pop_gimplify_context (NULL
);
3370 if (gimple_has_location (stmt
))
3371 annotate_all_with_location (stmts
, gimple_location (stmt
));
3373 /* The replacement can expose previously unreferenced variables. */
3374 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
3376 new_stmt
= gsi_stmt (i
);
3377 find_new_referenced_vars (new_stmt
);
3378 gsi_insert_before (si_p
, new_stmt
, GSI_NEW_STMT
);
3379 mark_symbols_for_renaming (new_stmt
);
3383 if (lhs
== NULL_TREE
)
3385 new_stmt
= gimple_build_nop ();
3386 unlink_stmt_vdef (stmt
);
3387 release_defs (stmt
);
3391 new_stmt
= gimple_build_assign (lhs
, tmp
);
3392 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3393 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3394 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
3397 gimple_set_location (new_stmt
, gimple_location (stmt
));
3398 gsi_replace (si_p
, new_stmt
, false);
3401 /* A simple pass that attempts to fold all builtin functions. This pass
3402 is run after we've propagated as many constants as we can. */
3405 execute_fold_all_builtins (void)
3407 bool cfg_changed
= false;
3409 unsigned int todoflags
= 0;
3413 gimple_stmt_iterator i
;
3414 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
3416 gimple stmt
, old_stmt
;
3417 tree callee
, result
;
3418 enum built_in_function fcode
;
3420 stmt
= gsi_stmt (i
);
3422 if (gimple_code (stmt
) != GIMPLE_CALL
)
3427 callee
= gimple_call_fndecl (stmt
);
3428 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
3433 fcode
= DECL_FUNCTION_CODE (callee
);
3435 result
= ccp_fold_builtin (stmt
);
3438 gimple_remove_stmt_histograms (cfun
, stmt
);
3441 switch (DECL_FUNCTION_CODE (callee
))
3443 case BUILT_IN_CONSTANT_P
:
3444 /* Resolve __builtin_constant_p. If it hasn't been
3445 folded to integer_one_node by now, it's fairly
3446 certain that the value simply isn't constant. */
3447 result
= integer_zero_node
;
3450 case BUILT_IN_STACK_RESTORE
:
3451 result
= optimize_stack_restore (i
);
3457 case BUILT_IN_VA_START
:
3458 case BUILT_IN_VA_END
:
3459 case BUILT_IN_VA_COPY
:
3460 /* These shouldn't be folded before pass_stdarg. */
3461 result
= optimize_stdarg_builtin (stmt
);
3471 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3473 fprintf (dump_file
, "Simplified\n ");
3474 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3478 if (!update_call_from_tree (&i
, result
))
3480 gimplify_and_update_call_from_tree (&i
, result
);
3481 todoflags
|= TODO_update_address_taken
;
3484 stmt
= gsi_stmt (i
);
3487 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
3488 && gimple_purge_dead_eh_edges (bb
))
3491 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3493 fprintf (dump_file
, "to\n ");
3494 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3495 fprintf (dump_file
, "\n");
3498 /* Retry the same statement if it changed into another
3499 builtin, there might be new opportunities now. */
3500 if (gimple_code (stmt
) != GIMPLE_CALL
)
3505 callee
= gimple_call_fndecl (stmt
);
3507 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3508 || DECL_FUNCTION_CODE (callee
) == fcode
)
3513 /* Delete unreachable blocks. */
3515 todoflags
|= TODO_cleanup_cfg
;
3521 struct gimple_opt_pass pass_fold_builtins
=
3527 execute_fold_all_builtins
, /* execute */
3530 0, /* static_pass_number */
3531 TV_NONE
, /* tv_id */
3532 PROP_cfg
| PROP_ssa
, /* properties_required */
3533 0, /* properties_provided */
3534 0, /* properties_destroyed */
3535 0, /* todo_flags_start */
3538 | TODO_update_ssa
/* todo_flags_finish */