1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
214 /* Possible lattice values. */
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
229 static prop_value_t
*const_val
;
231 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
234 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
236 switch (val
.lattice_val
)
239 fprintf (outf
, "%sUNINITIALIZED", prefix
);
242 fprintf (outf
, "%sUNDEFINED", prefix
);
245 fprintf (outf
, "%sVARYING", prefix
);
248 fprintf (outf
, "%sCONSTANT ", prefix
);
249 print_generic_expr (outf
, val
.value
, dump_flags
);
257 /* Print lattice value VAL to stderr. */
259 void debug_lattice_value (prop_value_t val
);
262 debug_lattice_value (prop_value_t val
)
264 dump_lattice_value (stderr
, "", val
);
265 fprintf (stderr
, "\n");
270 /* If SYM is a constant variable with known value, return the value.
271 NULL_TREE is returned otherwise. */
274 get_symbol_constant_value (tree sym
)
276 if (TREE_STATIC (sym
)
277 && TREE_READONLY (sym
))
279 tree val
= DECL_INITIAL (sym
);
282 STRIP_USELESS_TYPE_CONVERSION (val
);
283 if (is_gimple_min_invariant (val
))
286 /* Variables declared 'const' without an initializer
287 have zero as the initializer if they may not be
288 overridden at link or run time. */
290 && !DECL_EXTERNAL (sym
)
291 && targetm
.binds_local_p (sym
)
292 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
293 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
294 return fold_convert (TREE_TYPE (sym
), integer_zero_node
);
300 /* Compute a default value for variable VAR and store it in the
301 CONST_VAL array. The following rules are used to get default
304 1- Global and static variables that are declared constant are
307 2- Any other value is considered UNDEFINED. This is useful when
308 considering PHI nodes. PHI arguments that are undefined do not
309 change the constant value of the PHI node, which allows for more
310 constants to be propagated.
312 3- Variables defined by statements other than assignments and PHI
313 nodes are considered VARYING.
315 4- Initial values of variables that are not GIMPLE registers are
316 considered VARYING. */
319 get_default_value (tree var
)
321 tree sym
= SSA_NAME_VAR (var
);
322 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
};
325 stmt
= SSA_NAME_DEF_STMT (var
);
327 if (gimple_nop_p (stmt
))
329 /* Variables defined by an empty statement are those used
330 before being initialized. If VAR is a local variable, we
331 can assume initially that it is UNDEFINED, otherwise we must
332 consider it VARYING. */
333 if (is_gimple_reg (sym
) && TREE_CODE (sym
) != PARM_DECL
)
334 val
.lattice_val
= UNDEFINED
;
336 val
.lattice_val
= VARYING
;
338 else if (is_gimple_assign (stmt
)
339 /* Value-returning GIMPLE_CALL statements assign to
340 a variable, and are treated similarly to GIMPLE_ASSIGN. */
341 || (is_gimple_call (stmt
)
342 && gimple_call_lhs (stmt
) != NULL_TREE
)
343 || gimple_code (stmt
) == GIMPLE_PHI
)
346 if (gimple_assign_single_p (stmt
)
347 && DECL_P (gimple_assign_rhs1 (stmt
))
348 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
350 val
.lattice_val
= CONSTANT
;
354 /* Any other variable defined by an assignment or a PHI node
355 is considered UNDEFINED. */
356 val
.lattice_val
= UNDEFINED
;
360 /* Otherwise, VAR will never take on a constant value. */
361 val
.lattice_val
= VARYING
;
368 /* Get the constant value associated with variable VAR. */
370 static inline prop_value_t
*
375 if (const_val
== NULL
)
378 val
= &const_val
[SSA_NAME_VERSION (var
)];
379 if (val
->lattice_val
== UNINITIALIZED
)
380 *val
= get_default_value (var
);
385 /* Sets the value associated with VAR to VARYING. */
388 set_value_varying (tree var
)
390 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
392 val
->lattice_val
= VARYING
;
393 val
->value
= NULL_TREE
;
396 /* For float types, modify the value of VAL to make ccp work correctly
397 for non-standard values (-0, NaN):
399 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
400 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
401 This is to fix the following problem (see PR 29921): Suppose we have
405 and we set value of y to NaN. This causes value of x to be set to NaN.
406 When we later determine that y is in fact VARYING, fold uses the fact
407 that HONOR_NANS is false, and we try to change the value of x to 0,
408 causing an ICE. With HONOR_NANS being false, the real appearance of
409 NaN would cause undefined behavior, though, so claiming that y (and x)
410 are UNDEFINED initially is correct. */
413 canonicalize_float_value (prop_value_t
*val
)
415 enum machine_mode mode
;
419 if (val
->lattice_val
!= CONSTANT
420 || TREE_CODE (val
->value
) != REAL_CST
)
423 d
= TREE_REAL_CST (val
->value
);
424 type
= TREE_TYPE (val
->value
);
425 mode
= TYPE_MODE (type
);
427 if (!HONOR_SIGNED_ZEROS (mode
)
428 && REAL_VALUE_MINUS_ZERO (d
))
430 val
->value
= build_real (type
, dconst0
);
434 if (!HONOR_NANS (mode
)
435 && REAL_VALUE_ISNAN (d
))
437 val
->lattice_val
= UNDEFINED
;
443 /* Set the value for variable VAR to NEW_VAL. Return true if the new
444 value is different from VAR's previous value. */
447 set_lattice_value (tree var
, prop_value_t new_val
)
449 prop_value_t
*old_val
= get_value (var
);
451 canonicalize_float_value (&new_val
);
453 /* Lattice transitions must always be monotonically increasing in
454 value. If *OLD_VAL and NEW_VAL are the same, return false to
455 inform the caller that this was a non-transition. */
457 gcc_assert (old_val
->lattice_val
< new_val
.lattice_val
458 || (old_val
->lattice_val
== new_val
.lattice_val
459 && ((!old_val
->value
&& !new_val
.value
)
460 || operand_equal_p (old_val
->value
, new_val
.value
, 0))));
462 if (old_val
->lattice_val
!= new_val
.lattice_val
)
464 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
466 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
467 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
472 gcc_assert (new_val
.lattice_val
!= UNDEFINED
);
480 /* Return the likely CCP lattice value for STMT.
482 If STMT has no operands, then return CONSTANT.
484 Else if undefinedness of operands of STMT cause its value to be
485 undefined, then return UNDEFINED.
487 Else if any operands of STMT are constants, then return CONSTANT.
489 Else return VARYING. */
492 likely_value (gimple stmt
)
494 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
499 enum gimple_code code
= gimple_code (stmt
);
501 /* This function appears to be called only for assignments, calls,
502 conditionals, and switches, due to the logic in visit_stmt. */
503 gcc_assert (code
== GIMPLE_ASSIGN
504 || code
== GIMPLE_CALL
505 || code
== GIMPLE_COND
506 || code
== GIMPLE_SWITCH
);
508 /* If the statement has volatile operands, it won't fold to a
510 if (gimple_has_volatile_ops (stmt
))
513 /* Arrive here for more complex cases. */
514 has_constant_operand
= false;
515 has_undefined_operand
= false;
516 all_undefined_operands
= true;
517 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
519 prop_value_t
*val
= get_value (use
);
521 if (val
->lattice_val
== UNDEFINED
)
522 has_undefined_operand
= true;
524 all_undefined_operands
= false;
526 if (val
->lattice_val
== CONSTANT
)
527 has_constant_operand
= true;
530 /* There may be constants in regular rhs operands. For calls we
531 have to ignore lhs, fndecl and static chain, otherwise only
533 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
534 i
< gimple_num_ops (stmt
); ++i
)
536 tree op
= gimple_op (stmt
, i
);
537 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
539 if (is_gimple_min_invariant (op
))
540 has_constant_operand
= true;
543 /* If the operation combines operands like COMPLEX_EXPR make sure to
544 not mark the result UNDEFINED if only one part of the result is
546 if (has_undefined_operand
&& all_undefined_operands
)
548 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
550 switch (gimple_assign_rhs_code (stmt
))
552 /* Unary operators are handled with all_undefined_operands. */
555 case POINTER_PLUS_EXPR
:
556 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
557 Not bitwise operators, one VARYING operand may specify the
558 result completely. Not logical operators for the same reason.
559 Not COMPLEX_EXPR as one VARYING operand makes the result partly
560 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
561 the undefined operand may be promoted. */
568 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
569 fall back to VARYING even if there were CONSTANT operands. */
570 if (has_undefined_operand
)
573 /* We do not consider virtual operands here -- load from read-only
574 memory may have only VARYING virtual operands, but still be
576 if (has_constant_operand
577 || gimple_references_memory_p (stmt
))
583 /* Returns true if STMT cannot be constant. */
586 surely_varying_stmt_p (gimple stmt
)
588 /* If the statement has operands that we cannot handle, it cannot be
590 if (gimple_has_volatile_ops (stmt
))
593 /* If it is a call and does not return a value or is not a
594 builtin and not an indirect call, it is varying. */
595 if (is_gimple_call (stmt
))
598 if (!gimple_call_lhs (stmt
)
599 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
600 && !DECL_BUILT_IN (fndecl
)))
604 /* Any other store operation is not interesting. */
605 else if (gimple_vdef (stmt
))
608 /* Anything other than assignments and conditional jumps are not
609 interesting for CCP. */
610 if (gimple_code (stmt
) != GIMPLE_ASSIGN
611 && gimple_code (stmt
) != GIMPLE_COND
612 && gimple_code (stmt
) != GIMPLE_SWITCH
613 && gimple_code (stmt
) != GIMPLE_CALL
)
619 /* Initialize local data structures for CCP. */
622 ccp_initialize (void)
626 const_val
= XCNEWVEC (prop_value_t
, num_ssa_names
);
628 /* Initialize simulation flags for PHI nodes and statements. */
631 gimple_stmt_iterator i
;
633 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
635 gimple stmt
= gsi_stmt (i
);
636 bool is_varying
= surely_varying_stmt_p (stmt
);
643 /* If the statement will not produce a constant, mark
644 all its outputs VARYING. */
645 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
646 set_value_varying (def
);
648 prop_set_simulate_again (stmt
, !is_varying
);
652 /* Now process PHI nodes. We never clear the simulate_again flag on
653 phi nodes, since we do not know which edges are executable yet,
654 except for phi nodes for virtual operands when we do not do store ccp. */
657 gimple_stmt_iterator i
;
659 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
661 gimple phi
= gsi_stmt (i
);
663 if (!is_gimple_reg (gimple_phi_result (phi
)))
664 prop_set_simulate_again (phi
, false);
666 prop_set_simulate_again (phi
, true);
671 /* Debug count support. Reset the values of ssa names
672 VARYING when the total number ssa names analyzed is
673 beyond the debug count specified. */
679 for (i
= 0; i
< num_ssa_names
; i
++)
683 const_val
[i
].lattice_val
= VARYING
;
684 const_val
[i
].value
= NULL_TREE
;
690 /* Do final substitution of propagated values, cleanup the flowgraph and
691 free allocated storage.
693 Return TRUE when something was optimized. */
698 bool something_changed
;
701 /* Perform substitutions based on the known constant values. */
702 something_changed
= substitute_and_fold (const_val
, false);
706 return something_changed
;;
710 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
713 any M UNDEFINED = any
714 any M VARYING = VARYING
715 Ci M Cj = Ci if (i == j)
716 Ci M Cj = VARYING if (i != j)
720 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
722 if (val1
->lattice_val
== UNDEFINED
)
724 /* UNDEFINED M any = any */
727 else if (val2
->lattice_val
== UNDEFINED
)
729 /* any M UNDEFINED = any
730 Nothing to do. VAL1 already contains the value we want. */
733 else if (val1
->lattice_val
== VARYING
734 || val2
->lattice_val
== VARYING
)
736 /* any M VARYING = VARYING. */
737 val1
->lattice_val
= VARYING
;
738 val1
->value
= NULL_TREE
;
740 else if (val1
->lattice_val
== CONSTANT
741 && val2
->lattice_val
== CONSTANT
742 && simple_cst_equal (val1
->value
, val2
->value
) == 1)
744 /* Ci M Cj = Ci if (i == j)
745 Ci M Cj = VARYING if (i != j)
747 If these two values come from memory stores, make sure that
748 they come from the same memory reference. */
749 val1
->lattice_val
= CONSTANT
;
750 val1
->value
= val1
->value
;
754 /* Any other combination is VARYING. */
755 val1
->lattice_val
= VARYING
;
756 val1
->value
= NULL_TREE
;
761 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
762 lattice values to determine PHI_NODE's lattice value. The value of a
763 PHI node is determined calling ccp_lattice_meet with all the arguments
764 of the PHI node that are incoming via executable edges. */
766 static enum ssa_prop_result
767 ccp_visit_phi_node (gimple phi
)
770 prop_value_t
*old_val
, new_val
;
772 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
774 fprintf (dump_file
, "\nVisiting PHI node: ");
775 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
778 old_val
= get_value (gimple_phi_result (phi
));
779 switch (old_val
->lattice_val
)
782 return SSA_PROP_VARYING
;
789 new_val
.lattice_val
= UNDEFINED
;
790 new_val
.value
= NULL_TREE
;
797 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
799 /* Compute the meet operator over all the PHI arguments flowing
800 through executable edges. */
801 edge e
= gimple_phi_arg_edge (phi
, i
);
803 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
806 "\n Argument #%d (%d -> %d %sexecutable)\n",
807 i
, e
->src
->index
, e
->dest
->index
,
808 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
811 /* If the incoming edge is executable, Compute the meet operator for
812 the existing value of the PHI node and the current PHI argument. */
813 if (e
->flags
& EDGE_EXECUTABLE
)
815 tree arg
= gimple_phi_arg (phi
, i
)->def
;
816 prop_value_t arg_val
;
818 if (is_gimple_min_invariant (arg
))
820 arg_val
.lattice_val
= CONSTANT
;
824 arg_val
= *(get_value (arg
));
826 ccp_lattice_meet (&new_val
, &arg_val
);
828 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
830 fprintf (dump_file
, "\t");
831 print_generic_expr (dump_file
, arg
, dump_flags
);
832 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
833 fprintf (dump_file
, "\n");
836 if (new_val
.lattice_val
== VARYING
)
841 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
843 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
844 fprintf (dump_file
, "\n\n");
847 /* Make the transition to the new value. */
848 if (set_lattice_value (gimple_phi_result (phi
), new_val
))
850 if (new_val
.lattice_val
== VARYING
)
851 return SSA_PROP_VARYING
;
853 return SSA_PROP_INTERESTING
;
856 return SSA_PROP_NOT_INTERESTING
;
859 /* Return true if we may propagate the address expression ADDR into the
860 dereference DEREF and cancel them. */
863 may_propagate_address_into_dereference (tree addr
, tree deref
)
865 gcc_assert (INDIRECT_REF_P (deref
)
866 && TREE_CODE (addr
) == ADDR_EXPR
);
868 /* Don't propagate if ADDR's operand has incomplete type. */
869 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
872 /* If the address is invariant then we do not need to preserve restrict
873 qualifications. But we do need to preserve volatile qualifiers until
874 we can annotate the folded dereference itself properly. */
875 if (is_gimple_min_invariant (addr
)
876 && (!TREE_THIS_VOLATILE (deref
)
877 || TYPE_VOLATILE (TREE_TYPE (addr
))))
878 return useless_type_conversion_p (TREE_TYPE (deref
),
879 TREE_TYPE (TREE_OPERAND (addr
, 0)));
881 /* Else both the address substitution and the folding must result in
882 a valid useless type conversion sequence. */
883 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
885 && useless_type_conversion_p (TREE_TYPE (deref
),
886 TREE_TYPE (TREE_OPERAND (addr
, 0))));
889 /* CCP specific front-end to the non-destructive constant folding
892 Attempt to simplify the RHS of STMT knowing that one or more
893 operands are constants.
895 If simplification is possible, return the simplified RHS,
896 otherwise return the original RHS or NULL_TREE. */
899 ccp_fold (gimple stmt
)
901 switch (gimple_code (stmt
))
905 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
907 switch (get_gimple_rhs_class (subcode
))
909 case GIMPLE_SINGLE_RHS
:
911 tree rhs
= gimple_assign_rhs1 (stmt
);
912 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
914 if (TREE_CODE (rhs
) == SSA_NAME
)
916 /* If the RHS is an SSA_NAME, return its known constant value,
918 return get_value (rhs
)->value
;
920 /* Handle propagating invariant addresses into address operations.
921 The folding we do here matches that in tree-ssa-forwprop.c. */
922 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
925 base
= &TREE_OPERAND (rhs
, 0);
926 while (handled_component_p (*base
))
927 base
= &TREE_OPERAND (*base
, 0);
928 if (TREE_CODE (*base
) == INDIRECT_REF
929 && TREE_CODE (TREE_OPERAND (*base
, 0)) == SSA_NAME
)
931 prop_value_t
*val
= get_value (TREE_OPERAND (*base
, 0));
932 if (val
->lattice_val
== CONSTANT
933 && TREE_CODE (val
->value
) == ADDR_EXPR
934 && may_propagate_address_into_dereference
937 /* We need to return a new tree, not modify the IL
938 or share parts of it. So play some tricks to
939 avoid manually building it. */
940 tree ret
, save
= *base
;
941 *base
= TREE_OPERAND (val
->value
, 0);
942 ret
= unshare_expr (rhs
);
943 recompute_tree_invariant_for_addr_expr (ret
);
950 if (kind
== tcc_reference
)
952 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
953 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
955 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
956 if (val
->lattice_val
== CONSTANT
)
957 return fold_unary (VIEW_CONVERT_EXPR
,
958 TREE_TYPE (rhs
), val
->value
);
960 else if (TREE_CODE (rhs
) == INDIRECT_REF
961 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
963 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
964 if (val
->lattice_val
== CONSTANT
965 && TREE_CODE (val
->value
) == ADDR_EXPR
966 && useless_type_conversion_p (TREE_TYPE (rhs
),
967 TREE_TYPE (TREE_TYPE (val
->value
))))
968 rhs
= TREE_OPERAND (val
->value
, 0);
970 return fold_const_aggregate_ref (rhs
);
972 else if (kind
== tcc_declaration
)
973 return get_symbol_constant_value (rhs
);
977 case GIMPLE_UNARY_RHS
:
979 /* Handle unary operators that can appear in GIMPLE form.
980 Note that we know the single operand must be a constant,
981 so this should almost always return a simplified RHS. */
982 tree lhs
= gimple_assign_lhs (stmt
);
983 tree op0
= gimple_assign_rhs1 (stmt
);
985 /* Simplify the operand down to a constant. */
986 if (TREE_CODE (op0
) == SSA_NAME
)
988 prop_value_t
*val
= get_value (op0
);
989 if (val
->lattice_val
== CONSTANT
)
990 op0
= get_value (op0
)->value
;
993 /* Conversions are useless for CCP purposes if they are
994 value-preserving. Thus the restrictions that
995 useless_type_conversion_p places for pointer type conversions
996 do not apply here. Substitution later will only substitute to
998 if (CONVERT_EXPR_CODE_P (subcode
)
999 && POINTER_TYPE_P (TREE_TYPE (lhs
))
1000 && POINTER_TYPE_P (TREE_TYPE (op0
))
1001 /* Do not allow differences in volatile qualification
1002 as this might get us confused as to whether a
1003 propagation destination statement is volatile
1004 or not. See PR36988. */
1005 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs
)))
1006 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0
)))))
1009 /* Still try to generate a constant of correct type. */
1010 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
1012 && ((tem
= maybe_fold_offset_to_address
1013 (op0
, integer_zero_node
, TREE_TYPE (lhs
)))
1019 return fold_unary_ignore_overflow (subcode
,
1020 gimple_expr_type (stmt
), op0
);
1023 case GIMPLE_BINARY_RHS
:
1025 /* Handle binary operators that can appear in GIMPLE form. */
1026 tree op0
= gimple_assign_rhs1 (stmt
);
1027 tree op1
= gimple_assign_rhs2 (stmt
);
1029 /* Simplify the operands down to constants when appropriate. */
1030 if (TREE_CODE (op0
) == SSA_NAME
)
1032 prop_value_t
*val
= get_value (op0
);
1033 if (val
->lattice_val
== CONSTANT
)
1037 if (TREE_CODE (op1
) == SSA_NAME
)
1039 prop_value_t
*val
= get_value (op1
);
1040 if (val
->lattice_val
== CONSTANT
)
1044 /* Fold &foo + CST into an invariant reference if possible. */
1045 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1046 && TREE_CODE (op0
) == ADDR_EXPR
1047 && TREE_CODE (op1
) == INTEGER_CST
)
1049 tree lhs
= gimple_assign_lhs (stmt
);
1050 tree tem
= maybe_fold_offset_to_address (op0
, op1
,
1052 if (tem
!= NULL_TREE
)
1056 return fold_binary (subcode
, gimple_expr_type (stmt
), op0
, op1
);
1067 tree fn
= gimple_call_fn (stmt
);
1070 if (TREE_CODE (fn
) == SSA_NAME
)
1072 val
= get_value (fn
);
1073 if (val
->lattice_val
== CONSTANT
)
1076 if (TREE_CODE (fn
) == ADDR_EXPR
1077 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1078 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
1080 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
1083 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1085 args
[i
] = gimple_call_arg (stmt
, i
);
1086 if (TREE_CODE (args
[i
]) == SSA_NAME
)
1088 val
= get_value (args
[i
]);
1089 if (val
->lattice_val
== CONSTANT
)
1090 args
[i
] = val
->value
;
1093 call
= build_call_array (gimple_call_return_type (stmt
),
1094 fn
, gimple_call_num_args (stmt
), args
);
1095 retval
= fold_call_expr (call
, false);
1097 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1098 STRIP_NOPS (retval
);
1106 /* Handle comparison operators that can appear in GIMPLE form. */
1107 tree op0
= gimple_cond_lhs (stmt
);
1108 tree op1
= gimple_cond_rhs (stmt
);
1109 enum tree_code code
= gimple_cond_code (stmt
);
1111 /* Simplify the operands down to constants when appropriate. */
1112 if (TREE_CODE (op0
) == SSA_NAME
)
1114 prop_value_t
*val
= get_value (op0
);
1115 if (val
->lattice_val
== CONSTANT
)
1119 if (TREE_CODE (op1
) == SSA_NAME
)
1121 prop_value_t
*val
= get_value (op1
);
1122 if (val
->lattice_val
== CONSTANT
)
1126 return fold_binary (code
, boolean_type_node
, op0
, op1
);
1131 tree rhs
= gimple_switch_index (stmt
);
1133 if (TREE_CODE (rhs
) == SSA_NAME
)
1135 /* If the RHS is an SSA_NAME, return its known constant value,
1137 return get_value (rhs
)->value
;
1149 /* Return the tree representing the element referenced by T if T is an
1150 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1151 NULL_TREE otherwise. */
1154 fold_const_aggregate_ref (tree t
)
1156 prop_value_t
*value
;
1157 tree base
, ctor
, idx
, field
;
1158 unsigned HOST_WIDE_INT cnt
;
1161 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
1162 return get_symbol_constant_value (t
);
1164 switch (TREE_CODE (t
))
1167 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1168 DECL_INITIAL. If BASE is a nested reference into another
1169 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1170 the inner reference. */
1171 base
= TREE_OPERAND (t
, 0);
1172 switch (TREE_CODE (base
))
1175 if (!TREE_READONLY (base
)
1176 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
1177 || !targetm
.binds_local_p (base
))
1180 ctor
= DECL_INITIAL (base
);
1185 ctor
= fold_const_aggregate_ref (base
);
1197 if (ctor
== NULL_TREE
1198 || (TREE_CODE (ctor
) != CONSTRUCTOR
1199 && TREE_CODE (ctor
) != STRING_CST
)
1200 || !TREE_STATIC (ctor
))
1203 /* Get the index. If we have an SSA_NAME, try to resolve it
1204 with the current lattice value for the SSA_NAME. */
1205 idx
= TREE_OPERAND (t
, 1);
1206 switch (TREE_CODE (idx
))
1209 if ((value
= get_value (idx
))
1210 && value
->lattice_val
== CONSTANT
1211 && TREE_CODE (value
->value
) == INTEGER_CST
)
1224 /* Fold read from constant string. */
1225 if (TREE_CODE (ctor
) == STRING_CST
)
1227 if ((TYPE_MODE (TREE_TYPE (t
))
1228 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1229 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1231 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
1232 && compare_tree_int (idx
, TREE_STRING_LENGTH (ctor
)) < 0)
1233 return build_int_cst_type (TREE_TYPE (t
),
1234 (TREE_STRING_POINTER (ctor
)
1235 [TREE_INT_CST_LOW (idx
)]));
1239 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1240 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1241 if (tree_int_cst_equal (cfield
, idx
))
1243 STRIP_USELESS_TYPE_CONVERSION (cval
);
1249 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1250 DECL_INITIAL. If BASE is a nested reference into another
1251 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1252 the inner reference. */
1253 base
= TREE_OPERAND (t
, 0);
1254 switch (TREE_CODE (base
))
1257 if (!TREE_READONLY (base
)
1258 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1259 || !targetm
.binds_local_p (base
))
1262 ctor
= DECL_INITIAL (base
);
1267 ctor
= fold_const_aggregate_ref (base
);
1274 if (ctor
== NULL_TREE
1275 || TREE_CODE (ctor
) != CONSTRUCTOR
1276 || !TREE_STATIC (ctor
))
1279 field
= TREE_OPERAND (t
, 1);
1281 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1283 /* FIXME: Handle bit-fields. */
1284 && ! DECL_BIT_FIELD (cfield
))
1286 STRIP_USELESS_TYPE_CONVERSION (cval
);
1294 tree c
= fold_const_aggregate_ref (TREE_OPERAND (t
, 0));
1295 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
1296 return fold_build1 (TREE_CODE (t
), TREE_TYPE (t
), c
);
1302 tree base
= TREE_OPERAND (t
, 0);
1303 if (TREE_CODE (base
) == SSA_NAME
1304 && (value
= get_value (base
))
1305 && value
->lattice_val
== CONSTANT
1306 && TREE_CODE (value
->value
) == ADDR_EXPR
)
1307 return fold_const_aggregate_ref (TREE_OPERAND (value
->value
, 0));
1318 /* Evaluate statement STMT.
1319 Valid only for assignments, calls, conditionals, and switches. */
1322 evaluate_stmt (gimple stmt
)
1325 tree simplified
= NULL_TREE
;
1326 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1329 fold_defer_overflow_warnings ();
1331 /* If the statement is likely to have a CONSTANT result, then try
1332 to fold the statement to determine the constant value. */
1333 /* FIXME. This is the only place that we call ccp_fold.
1334 Since likely_value never returns CONSTANT for calls, we will
1335 not attempt to fold them, including builtins that may profit. */
1336 if (likelyvalue
== CONSTANT
)
1337 simplified
= ccp_fold (stmt
);
1338 /* If the statement is likely to have a VARYING result, then do not
1339 bother folding the statement. */
1340 else if (likelyvalue
== VARYING
)
1342 enum gimple_code code
= gimple_code (stmt
);
1343 if (code
== GIMPLE_ASSIGN
)
1345 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1347 /* Other cases cannot satisfy is_gimple_min_invariant
1349 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1350 simplified
= gimple_assign_rhs1 (stmt
);
1352 else if (code
== GIMPLE_SWITCH
)
1353 simplified
= gimple_switch_index (stmt
);
1355 /* These cannot satisfy is_gimple_min_invariant without folding. */
1356 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1359 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1361 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1363 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1365 fprintf (dump_file
, "which is likely ");
1366 switch (likelyvalue
)
1369 fprintf (dump_file
, "CONSTANT");
1372 fprintf (dump_file
, "UNDEFINED");
1375 fprintf (dump_file
, "VARYING");
1379 fprintf (dump_file
, "\n");
1384 /* The statement produced a constant value. */
1385 val
.lattice_val
= CONSTANT
;
1386 val
.value
= simplified
;
1390 /* The statement produced a nonconstant value. If the statement
1391 had UNDEFINED operands, then the result of the statement
1392 should be UNDEFINED. Otherwise, the statement is VARYING. */
1393 if (likelyvalue
== UNDEFINED
)
1394 val
.lattice_val
= likelyvalue
;
1396 val
.lattice_val
= VARYING
;
1398 val
.value
= NULL_TREE
;
1404 /* Visit the assignment statement STMT. Set the value of its LHS to the
1405 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1406 creates virtual definitions, set the value of each new name to that
1407 of the RHS (if we can derive a constant out of the RHS).
1408 Value-returning call statements also perform an assignment, and
1409 are handled here. */
1411 static enum ssa_prop_result
1412 visit_assignment (gimple stmt
, tree
*output_p
)
1415 enum ssa_prop_result retval
;
1417 tree lhs
= gimple_get_lhs (stmt
);
1419 gcc_assert (gimple_code (stmt
) != GIMPLE_CALL
1420 || gimple_call_lhs (stmt
) != NULL_TREE
);
1422 if (gimple_assign_copy_p (stmt
))
1424 tree rhs
= gimple_assign_rhs1 (stmt
);
1426 if (TREE_CODE (rhs
) == SSA_NAME
)
1428 /* For a simple copy operation, we copy the lattice values. */
1429 prop_value_t
*nval
= get_value (rhs
);
1433 val
= evaluate_stmt (stmt
);
1436 /* Evaluate the statement, which could be
1437 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1438 val
= evaluate_stmt (stmt
);
1440 retval
= SSA_PROP_NOT_INTERESTING
;
1442 /* Set the lattice value of the statement's output. */
1443 if (TREE_CODE (lhs
) == SSA_NAME
)
1445 /* If STMT is an assignment to an SSA_NAME, we only have one
1447 if (set_lattice_value (lhs
, val
))
1450 if (val
.lattice_val
== VARYING
)
1451 retval
= SSA_PROP_VARYING
;
1453 retval
= SSA_PROP_INTERESTING
;
1461 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1462 if it can determine which edge will be taken. Otherwise, return
1463 SSA_PROP_VARYING. */
1465 static enum ssa_prop_result
1466 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
1471 block
= gimple_bb (stmt
);
1472 val
= evaluate_stmt (stmt
);
1474 /* Find which edge out of the conditional block will be taken and add it
1475 to the worklist. If no single edge can be determined statically,
1476 return SSA_PROP_VARYING to feed all the outgoing edges to the
1477 propagation engine. */
1478 *taken_edge_p
= val
.value
? find_taken_edge (block
, val
.value
) : 0;
1480 return SSA_PROP_INTERESTING
;
1482 return SSA_PROP_VARYING
;
1486 /* Evaluate statement STMT. If the statement produces an output value and
1487 its evaluation changes the lattice value of its output, return
1488 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1491 If STMT is a conditional branch and we can determine its truth
1492 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1493 value, return SSA_PROP_VARYING. */
1495 static enum ssa_prop_result
1496 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
1501 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1503 fprintf (dump_file
, "\nVisiting statement:\n");
1504 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1507 switch (gimple_code (stmt
))
1510 /* If the statement is an assignment that produces a single
1511 output value, evaluate its RHS to see if the lattice value of
1512 its output has changed. */
1513 return visit_assignment (stmt
, output_p
);
1516 /* A value-returning call also performs an assignment. */
1517 if (gimple_call_lhs (stmt
) != NULL_TREE
)
1518 return visit_assignment (stmt
, output_p
);
1523 /* If STMT is a conditional branch, see if we can determine
1524 which branch will be taken. */
1525 /* FIXME. It appears that we should be able to optimize
1526 computed GOTOs here as well. */
1527 return visit_cond_stmt (stmt
, taken_edge_p
);
1533 /* Any other kind of statement is not interesting for constant
1534 propagation and, therefore, not worth simulating. */
1535 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1536 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1538 /* Definitions made by statements other than assignments to
1539 SSA_NAMEs represent unknown modifications to their outputs.
1540 Mark them VARYING. */
1541 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1543 prop_value_t v
= { VARYING
, NULL_TREE
};
1544 set_lattice_value (def
, v
);
1547 return SSA_PROP_VARYING
;
1551 /* Main entry point for SSA Conditional Constant Propagation. */
1557 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1558 if (ccp_finalize ())
1559 return (TODO_cleanup_cfg
| TODO_update_ssa
| TODO_remove_unused_locals
);
1568 return flag_tree_ccp
!= 0;
1572 struct gimple_opt_pass pass_ccp
=
1577 gate_ccp
, /* gate */
1578 do_ssa_ccp
, /* execute */
1581 0, /* static_pass_number */
1582 TV_TREE_CCP
, /* tv_id */
1583 PROP_cfg
| PROP_ssa
, /* properties_required */
1584 0, /* properties_provided */
1585 0, /* properties_destroyed */
1586 0, /* todo_flags_start */
1587 TODO_dump_func
| TODO_verify_ssa
1588 | TODO_verify_stmts
| TODO_ggc_collect
/* todo_flags_finish */
1593 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1594 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1595 is the desired result type. */
1598 maybe_fold_offset_to_array_ref (tree base
, tree offset
, tree orig_type
,
1599 bool allow_negative_idx
)
1601 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
1602 tree array_type
, elt_type
, elt_size
;
1605 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1606 measured in units of the size of elements type) from that ARRAY_REF).
1607 We can't do anything if either is variable.
1609 The case we handle here is *(&A[N]+O). */
1610 if (TREE_CODE (base
) == ARRAY_REF
)
1612 tree low_bound
= array_ref_low_bound (base
);
1614 elt_offset
= TREE_OPERAND (base
, 1);
1615 if (TREE_CODE (low_bound
) != INTEGER_CST
1616 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1619 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1620 base
= TREE_OPERAND (base
, 0);
1623 /* Ignore stupid user tricks of indexing non-array variables. */
1624 array_type
= TREE_TYPE (base
);
1625 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1627 elt_type
= TREE_TYPE (array_type
);
1628 if (!useless_type_conversion_p (orig_type
, elt_type
))
1631 /* Use signed size type for intermediate computation on the index. */
1632 idx_type
= signed_type_for (size_type_node
);
1634 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1635 element type (so we can use the alignment if it's not constant).
1636 Otherwise, compute the offset as an index by using a division. If the
1637 division isn't exact, then don't do anything. */
1638 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1641 if (integer_zerop (offset
))
1643 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1644 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1646 idx
= build_int_cst (idx_type
, 0);
1650 unsigned HOST_WIDE_INT lquo
, lrem
;
1651 HOST_WIDE_INT hquo
, hrem
;
1654 /* The final array offset should be signed, so we need
1655 to sign-extend the (possibly pointer) offset here
1656 and use signed division. */
1657 soffset
= double_int_sext (tree_to_double_int (offset
),
1658 TYPE_PRECISION (TREE_TYPE (offset
)));
1659 if (TREE_CODE (elt_size
) != INTEGER_CST
1660 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
1661 soffset
.low
, soffset
.high
,
1662 TREE_INT_CST_LOW (elt_size
),
1663 TREE_INT_CST_HIGH (elt_size
),
1664 &lquo
, &hquo
, &lrem
, &hrem
)
1668 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
1671 /* Assume the low bound is zero. If there is a domain type, get the
1672 low bound, if any, convert the index into that type, and add the
1674 min_idx
= build_int_cst (idx_type
, 0);
1675 domain_type
= TYPE_DOMAIN (array_type
);
1678 idx_type
= domain_type
;
1679 if (TYPE_MIN_VALUE (idx_type
))
1680 min_idx
= TYPE_MIN_VALUE (idx_type
);
1682 min_idx
= fold_convert (idx_type
, min_idx
);
1684 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1687 elt_offset
= fold_convert (idx_type
, elt_offset
);
1690 if (!integer_zerop (min_idx
))
1691 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1692 if (!integer_zerop (elt_offset
))
1693 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1695 /* Make sure to possibly truncate late after offsetting. */
1696 idx
= fold_convert (idx_type
, idx
);
1698 /* We don't want to construct access past array bounds. For example
1701 should not be simplified into (*c)[14] or tree-vrp will
1702 give false warnings. The same is true for
1703 struct A { long x; char d[0]; } *a;
1705 which should be not folded to &a->d[-8]. */
1707 && TYPE_MAX_VALUE (domain_type
)
1708 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
)
1710 tree up_bound
= TYPE_MAX_VALUE (domain_type
);
1712 if (tree_int_cst_lt (up_bound
, idx
)
1713 /* Accesses after the end of arrays of size 0 (gcc
1714 extension) and 1 are likely intentional ("struct
1716 && compare_tree_int (up_bound
, 1) > 0)
1720 && TYPE_MIN_VALUE (domain_type
))
1722 if (!allow_negative_idx
1723 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
1724 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
1727 else if (!allow_negative_idx
1728 && compare_tree_int (idx
, 0) < 0)
1731 return build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
1735 /* Attempt to fold *(S+O) to S.X.
1736 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1737 is the desired result type. */
1740 maybe_fold_offset_to_component_ref (tree record_type
, tree base
, tree offset
,
1741 tree orig_type
, bool base_is_ptr
)
1743 tree f
, t
, field_type
, tail_array_field
, field_offset
;
1747 if (TREE_CODE (record_type
) != RECORD_TYPE
1748 && TREE_CODE (record_type
) != UNION_TYPE
1749 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1752 /* Short-circuit silly cases. */
1753 if (useless_type_conversion_p (record_type
, orig_type
))
1756 tail_array_field
= NULL_TREE
;
1757 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1761 if (TREE_CODE (f
) != FIELD_DECL
)
1763 if (DECL_BIT_FIELD (f
))
1766 if (!DECL_FIELD_OFFSET (f
))
1768 field_offset
= byte_position (f
);
1769 if (TREE_CODE (field_offset
) != INTEGER_CST
)
1772 /* ??? Java creates "interesting" fields for representing base classes.
1773 They have no name, and have no context. With no context, we get into
1774 trouble with nonoverlapping_component_refs_p. Skip them. */
1775 if (!DECL_FIELD_CONTEXT (f
))
1778 /* The previous array field isn't at the end. */
1779 tail_array_field
= NULL_TREE
;
1781 /* Check to see if this offset overlaps with the field. */
1782 cmp
= tree_int_cst_compare (field_offset
, offset
);
1786 field_type
= TREE_TYPE (f
);
1788 /* Here we exactly match the offset being checked. If the types match,
1789 then we can return that field. */
1791 && useless_type_conversion_p (orig_type
, field_type
))
1794 base
= build1 (INDIRECT_REF
, record_type
, base
);
1795 t
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1799 /* Don't care about offsets into the middle of scalars. */
1800 if (!AGGREGATE_TYPE_P (field_type
))
1803 /* Check for array at the end of the struct. This is often
1804 used as for flexible array members. We should be able to
1805 turn this into an array access anyway. */
1806 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1807 tail_array_field
= f
;
1809 /* Check the end of the field against the offset. */
1810 if (!DECL_SIZE_UNIT (f
)
1811 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1813 t
= int_const_binop (MINUS_EXPR
, offset
, field_offset
, 1);
1814 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1817 /* If we matched, then set offset to the displacement into
1820 new_base
= build1 (INDIRECT_REF
, record_type
, base
);
1823 new_base
= build3 (COMPONENT_REF
, field_type
, new_base
, f
, NULL_TREE
);
1825 /* Recurse to possibly find the match. */
1826 ret
= maybe_fold_offset_to_array_ref (new_base
, t
, orig_type
,
1827 f
== TYPE_FIELDS (record_type
));
1830 ret
= maybe_fold_offset_to_component_ref (field_type
, new_base
, t
,
1836 if (!tail_array_field
)
1839 f
= tail_array_field
;
1840 field_type
= TREE_TYPE (f
);
1841 offset
= int_const_binop (MINUS_EXPR
, offset
, byte_position (f
), 1);
1843 /* If we get here, we've got an aggregate field, and a possibly
1844 nonzero offset into them. Recurse and hope for a valid match. */
1846 base
= build1 (INDIRECT_REF
, record_type
, base
);
1847 base
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1849 t
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
,
1850 f
== TYPE_FIELDS (record_type
));
1853 return maybe_fold_offset_to_component_ref (field_type
, base
, offset
,
1857 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1858 or BASE[index] or by combination of those.
1860 Before attempting the conversion strip off existing ADDR_EXPRs and
1861 handled component refs. */
1864 maybe_fold_offset_to_reference (tree base
, tree offset
, tree orig_type
)
1868 bool base_is_ptr
= true;
1871 if (TREE_CODE (base
) == ADDR_EXPR
)
1873 base_is_ptr
= false;
1875 base
= TREE_OPERAND (base
, 0);
1877 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1878 so it needs to be removed and new COMPONENT_REF constructed.
1879 The wrong COMPONENT_REF are often constructed by folding the
1880 (type *)&object within the expression (type *)&object+offset */
1881 if (handled_component_p (base
))
1883 HOST_WIDE_INT sub_offset
, size
, maxsize
;
1885 newbase
= get_ref_base_and_extent (base
, &sub_offset
,
1887 gcc_assert (newbase
);
1890 && !(sub_offset
& (BITS_PER_UNIT
- 1)))
1894 offset
= int_const_binop (PLUS_EXPR
, offset
,
1895 build_int_cst (TREE_TYPE (offset
),
1896 sub_offset
/ BITS_PER_UNIT
), 1);
1899 if (useless_type_conversion_p (orig_type
, TREE_TYPE (base
))
1900 && integer_zerop (offset
))
1902 type
= TREE_TYPE (base
);
1907 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
1909 type
= TREE_TYPE (TREE_TYPE (base
));
1911 ret
= maybe_fold_offset_to_component_ref (type
, base
, offset
,
1912 orig_type
, base_is_ptr
);
1916 base
= build1 (INDIRECT_REF
, type
, base
);
1917 ret
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
, true);
1922 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1923 or &BASE[index] or by combination of those.
1925 Before attempting the conversion strip off existing component refs. */
1928 maybe_fold_offset_to_address (tree addr
, tree offset
, tree orig_type
)
1932 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr
))
1933 && POINTER_TYPE_P (orig_type
));
1935 t
= maybe_fold_offset_to_reference (addr
, offset
, TREE_TYPE (orig_type
));
1941 /* For __builtin_object_size to function correctly we need to
1942 make sure not to fold address arithmetic so that we change
1943 reference from one array to another. This would happen for
1946 struct X { char s1[10]; char s2[10] } s;
1947 char *foo (void) { return &s.s2[-4]; }
1949 where we need to avoid generating &s.s1[6]. As the C and
1950 C++ frontends create different initial trees
1951 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1952 sophisticated comparisons here. Note that checking for the
1953 condition after the fact is easier than trying to avoid doing
1956 if (TREE_CODE (orig
) == ADDR_EXPR
)
1957 orig
= TREE_OPERAND (orig
, 0);
1958 if ((TREE_CODE (orig
) == ARRAY_REF
1959 || (TREE_CODE (orig
) == COMPONENT_REF
1960 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig
, 1))) == ARRAY_TYPE
))
1961 && (TREE_CODE (t
) == ARRAY_REF
1962 || TREE_CODE (t
) == COMPONENT_REF
)
1963 && !operand_equal_p (TREE_CODE (orig
) == ARRAY_REF
1964 ? TREE_OPERAND (orig
, 0) : orig
,
1965 TREE_CODE (t
) == ARRAY_REF
1966 ? TREE_OPERAND (t
, 0) : t
, 0))
1969 ptr_type
= build_pointer_type (TREE_TYPE (t
));
1970 if (!useless_type_conversion_p (orig_type
, ptr_type
))
1972 return build_fold_addr_expr_with_type (t
, ptr_type
);
1978 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1979 Return the simplified expression, or NULL if nothing could be done. */
1982 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
1985 bool volatile_p
= TREE_THIS_VOLATILE (expr
);
1987 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1988 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1989 are sometimes added. */
1991 STRIP_TYPE_NOPS (base
);
1992 TREE_OPERAND (expr
, 0) = base
;
1994 /* One possibility is that the address reduces to a string constant. */
1995 t
= fold_read_from_constant_string (expr
);
1999 /* Add in any offset from a POINTER_PLUS_EXPR. */
2000 if (TREE_CODE (base
) == POINTER_PLUS_EXPR
)
2004 offset2
= TREE_OPERAND (base
, 1);
2005 if (TREE_CODE (offset2
) != INTEGER_CST
)
2007 base
= TREE_OPERAND (base
, 0);
2009 offset
= fold_convert (sizetype
,
2010 int_const_binop (PLUS_EXPR
, offset
, offset2
, 1));
2013 if (TREE_CODE (base
) == ADDR_EXPR
)
2015 tree base_addr
= base
;
2017 /* Strip the ADDR_EXPR. */
2018 base
= TREE_OPERAND (base
, 0);
2020 /* Fold away CONST_DECL to its value, if the type is scalar. */
2021 if (TREE_CODE (base
) == CONST_DECL
2022 && is_gimple_min_invariant (DECL_INITIAL (base
)))
2023 return DECL_INITIAL (base
);
2025 /* Try folding *(&B+O) to B.X. */
2026 t
= maybe_fold_offset_to_reference (base_addr
, offset
,
2030 /* Preserve volatileness of the original expression.
2031 We can end up with a plain decl here which is shared
2032 and we shouldn't mess with its flags. */
2034 TREE_THIS_VOLATILE (t
) = volatile_p
;
2040 /* We can get here for out-of-range string constant accesses,
2041 such as "_"[3]. Bail out of the entire substitution search
2042 and arrange for the entire statement to be replaced by a
2043 call to __builtin_trap. In all likelihood this will all be
2044 constant-folded away, but in the meantime we can't leave with
2045 something that get_expr_operands can't understand. */
2049 if (TREE_CODE (t
) == ADDR_EXPR
2050 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
2052 /* FIXME: Except that this causes problems elsewhere with dead
2053 code not being deleted, and we die in the rtl expanders
2054 because we failed to remove some ssa_name. In the meantime,
2055 just return zero. */
2056 /* FIXME2: This condition should be signaled by
2057 fold_read_from_constant_string directly, rather than
2058 re-checking for it here. */
2059 return integer_zero_node
;
2062 /* Try folding *(B+O) to B->X. Still an improvement. */
2063 if (POINTER_TYPE_P (TREE_TYPE (base
)))
2065 t
= maybe_fold_offset_to_reference (base
, offset
,
2072 /* Otherwise we had an offset that we could not simplify. */
2077 /* A quaint feature extant in our address arithmetic is that there
2078 can be hidden type changes here. The type of the result need
2079 not be the same as the type of the input pointer.
2081 What we're after here is an expression of the form
2082 (T *)(&array + const)
2083 where array is OP0, const is OP1, RES_TYPE is T and
2084 the cast doesn't actually exist, but is implicit in the
2085 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2087 which may be able to propagate further. */
2090 maybe_fold_stmt_addition (tree res_type
, tree op0
, tree op1
)
2095 /* It had better be a constant. */
2096 if (TREE_CODE (op1
) != INTEGER_CST
)
2098 /* The first operand should be an ADDR_EXPR. */
2099 if (TREE_CODE (op0
) != ADDR_EXPR
)
2101 op0
= TREE_OPERAND (op0
, 0);
2103 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2104 the offset into it. */
2105 while (TREE_CODE (op0
) == ARRAY_REF
)
2107 tree array_obj
= TREE_OPERAND (op0
, 0);
2108 tree array_idx
= TREE_OPERAND (op0
, 1);
2109 tree elt_type
= TREE_TYPE (op0
);
2110 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
2113 if (TREE_CODE (array_idx
) != INTEGER_CST
)
2115 if (TREE_CODE (elt_size
) != INTEGER_CST
)
2118 /* Un-bias the index by the min index of the array type. */
2119 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
2122 min_idx
= TYPE_MIN_VALUE (min_idx
);
2125 if (TREE_CODE (min_idx
) != INTEGER_CST
)
2128 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
2129 if (!integer_zerop (min_idx
))
2130 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
2135 /* Convert the index to a byte offset. */
2136 array_idx
= fold_convert (sizetype
, array_idx
);
2137 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
2139 /* Update the operands for the next round, or for folding. */
2140 op1
= int_const_binop (PLUS_EXPR
,
2145 ptd_type
= TREE_TYPE (res_type
);
2146 /* If we want a pointer to void, reconstruct the reference from the
2147 array element type. A pointer to that can be trivially converted
2148 to void *. This happens as we fold (void *)(ptr p+ off). */
2149 if (VOID_TYPE_P (ptd_type
)
2150 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
2151 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
2153 /* At which point we can try some of the same things as for indirects. */
2154 t
= maybe_fold_offset_to_array_ref (op0
, op1
, ptd_type
, true);
2156 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (op0
), op0
, op1
,
2159 t
= build1 (ADDR_EXPR
, res_type
, t
);
2164 /* For passing state through walk_tree into fold_stmt_r and its
2167 struct fold_stmt_r_data
2171 bool *inside_addr_expr_p
;
2174 /* Subroutine of fold_stmt called via walk_tree. We perform several
2175 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2178 fold_stmt_r (tree
*expr_p
, int *walk_subtrees
, void *data
)
2180 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
2181 struct fold_stmt_r_data
*fold_stmt_r_data
;
2182 bool *inside_addr_expr_p
;
2184 tree expr
= *expr_p
, t
;
2185 bool volatile_p
= TREE_THIS_VOLATILE (expr
);
2187 fold_stmt_r_data
= (struct fold_stmt_r_data
*) wi
->info
;
2188 inside_addr_expr_p
= fold_stmt_r_data
->inside_addr_expr_p
;
2189 changed_p
= fold_stmt_r_data
->changed_p
;
2191 /* ??? It'd be nice if walk_tree had a pre-order option. */
2192 switch (TREE_CODE (expr
))
2195 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2200 t
= maybe_fold_stmt_indirect (expr
, TREE_OPERAND (expr
, 0),
2202 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2203 if (wi
->is_lhs
&& t
&& TREE_CODE (t
) == INTEGER_CST
)
2206 && TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
2207 /* If we had a good reason for propagating the address here,
2208 make sure we end up with valid gimple. See PR34989. */
2209 t
= TREE_OPERAND (TREE_OPERAND (expr
, 0), 0);
2213 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2218 if (POINTER_TYPE_P (TREE_TYPE (expr
))
2219 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr
)))
2220 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
2221 && (t
= maybe_fold_offset_to_address (TREE_OPERAND (expr
, 0),
2223 TREE_TYPE (TREE_TYPE (expr
)))))
2227 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2228 We'd only want to bother decomposing an existing ARRAY_REF if
2229 the base array is found to have another offset contained within.
2230 Otherwise we'd be wasting time. */
2232 /* If we are not processing expressions found within an
2233 ADDR_EXPR, then we can fold constant array references.
2234 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2236 if (!*inside_addr_expr_p
&& !wi
->is_lhs
)
2237 t
= fold_read_from_constant_string (expr
);
2243 *inside_addr_expr_p
= true;
2244 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2245 *inside_addr_expr_p
= false;
2250 /* Make sure the value is properly considered constant, and so gets
2251 propagated as expected. */
2253 recompute_tree_invariant_for_addr_expr (expr
);
2257 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2262 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2263 We've already checked that the records are compatible, so we should
2264 come up with a set of compatible fields. */
2266 tree expr_record
= TREE_TYPE (TREE_OPERAND (expr
, 0));
2267 tree expr_field
= TREE_OPERAND (expr
, 1);
2269 if (DECL_FIELD_CONTEXT (expr_field
) != TYPE_MAIN_VARIANT (expr_record
))
2271 expr_field
= find_compatible_field (expr_record
, expr_field
);
2272 TREE_OPERAND (expr
, 1) = expr_field
;
2277 case TARGET_MEM_REF
:
2278 t
= maybe_fold_tmr (expr
);
2281 case POINTER_PLUS_EXPR
:
2282 t
= walk_tree (&TREE_OPERAND (expr
, 0), fold_stmt_r
, data
, NULL
);
2285 t
= walk_tree (&TREE_OPERAND (expr
, 1), fold_stmt_r
, data
, NULL
);
2290 t
= maybe_fold_stmt_addition (TREE_TYPE (expr
),
2291 TREE_OPERAND (expr
, 0),
2292 TREE_OPERAND (expr
, 1));
2296 if (COMPARISON_CLASS_P (TREE_OPERAND (expr
, 0)))
2298 tree op0
= TREE_OPERAND (expr
, 0);
2302 fold_defer_overflow_warnings ();
2303 tem
= fold_binary (TREE_CODE (op0
), TREE_TYPE (op0
),
2304 TREE_OPERAND (op0
, 0),
2305 TREE_OPERAND (op0
, 1));
2306 /* This is actually a conditional expression, not a GIMPLE
2307 conditional statement, however, the valid_gimple_rhs_p
2308 test still applies. */
2309 set
= tem
&& is_gimple_condexpr (tem
) && valid_gimple_rhs_p (tem
);
2310 fold_undefer_overflow_warnings (set
, fold_stmt_r_data
->stmt
, 0);
2313 COND_EXPR_COND (expr
) = tem
;
2326 /* Preserve volatileness of the original expression.
2327 We can end up with a plain decl here which is shared
2328 and we shouldn't mess with its flags. */
2330 TREE_THIS_VOLATILE (t
) = volatile_p
;
2338 /* Return the string length, maximum string length or maximum value of
2340 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2341 is not NULL and, for TYPE == 0, its value is not equal to the length
2342 we determine or if we are unable to determine the length or value,
2343 return false. VISITED is a bitmap of visited variables.
2344 TYPE is 0 if string length should be returned, 1 for maximum string
2345 length and 2 for maximum value ARG can have. */
2348 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
2353 if (TREE_CODE (arg
) != SSA_NAME
)
2355 if (TREE_CODE (arg
) == COND_EXPR
)
2356 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
2357 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
2358 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2359 else if (TREE_CODE (arg
) == ADDR_EXPR
2360 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
2361 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
2363 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
2364 if (TREE_CODE (aop0
) == INDIRECT_REF
2365 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
2366 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
2367 length
, visited
, type
);
2373 if (TREE_CODE (val
) != INTEGER_CST
2374 || tree_int_cst_sgn (val
) < 0)
2378 val
= c_strlen (arg
, 1);
2386 if (TREE_CODE (*length
) != INTEGER_CST
2387 || TREE_CODE (val
) != INTEGER_CST
)
2390 if (tree_int_cst_lt (*length
, val
))
2394 else if (simple_cst_equal (val
, *length
) != 1)
2402 /* If we were already here, break the infinite cycle. */
2403 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
2405 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
2408 def_stmt
= SSA_NAME_DEF_STMT (var
);
2410 switch (gimple_code (def_stmt
))
2413 /* The RHS of the statement defining VAR must either have a
2414 constant length or come from another SSA_NAME with a constant
2416 if (gimple_assign_single_p (def_stmt
)
2417 || gimple_assign_unary_nop_p (def_stmt
))
2419 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2420 return get_maxval_strlen (rhs
, length
, visited
, type
);
2426 /* All the arguments of the PHI node must have the same constant
2430 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
2432 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
2434 /* If this PHI has itself as an argument, we cannot
2435 determine the string length of this argument. However,
2436 if we can find a constant string length for the other
2437 PHI args then we can still be sure that this is a
2438 constant string length. So be optimistic and just
2439 continue with the next argument. */
2440 if (arg
== gimple_phi_result (def_stmt
))
2443 if (!get_maxval_strlen (arg
, length
, visited
, type
))
2455 /* Fold builtin call in statement STMT. Returns a simplified tree.
2456 We may return a non-constant expression, including another call
2457 to a different function and with different arguments, e.g.,
2458 substituting memcpy for strcpy when the string length is known.
2459 Note that some builtins expand into inline code that may not
2460 be valid in GIMPLE. Callers must take care. */
2463 ccp_fold_builtin (gimple stmt
)
2465 tree result
, val
[3];
2472 gcc_assert (is_gimple_call (stmt
));
2474 ignore
= (gimple_call_lhs (stmt
) == NULL
);
2476 /* First try the generic builtin folder. If that succeeds, return the
2478 result
= fold_call_stmt (stmt
, ignore
);
2482 STRIP_NOPS (result
);
2486 /* Ignore MD builtins. */
2487 callee
= gimple_call_fndecl (stmt
);
2488 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
2491 /* If the builtin could not be folded, and it has no argument list,
2493 nargs
= gimple_call_num_args (stmt
);
2497 /* Limit the work only for builtins we know how to simplify. */
2498 switch (DECL_FUNCTION_CODE (callee
))
2500 case BUILT_IN_STRLEN
:
2501 case BUILT_IN_FPUTS
:
2502 case BUILT_IN_FPUTS_UNLOCKED
:
2506 case BUILT_IN_STRCPY
:
2507 case BUILT_IN_STRNCPY
:
2511 case BUILT_IN_MEMCPY_CHK
:
2512 case BUILT_IN_MEMPCPY_CHK
:
2513 case BUILT_IN_MEMMOVE_CHK
:
2514 case BUILT_IN_MEMSET_CHK
:
2515 case BUILT_IN_STRNCPY_CHK
:
2519 case BUILT_IN_STRCPY_CHK
:
2520 case BUILT_IN_STPCPY_CHK
:
2524 case BUILT_IN_SNPRINTF_CHK
:
2525 case BUILT_IN_VSNPRINTF_CHK
:
2533 if (arg_idx
>= nargs
)
2536 /* Try to use the dataflow information gathered by the CCP process. */
2537 visited
= BITMAP_ALLOC (NULL
);
2538 bitmap_clear (visited
);
2540 memset (val
, 0, sizeof (val
));
2541 a
= gimple_call_arg (stmt
, arg_idx
);
2542 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
2543 val
[arg_idx
] = NULL_TREE
;
2545 BITMAP_FREE (visited
);
2548 switch (DECL_FUNCTION_CODE (callee
))
2550 case BUILT_IN_STRLEN
:
2551 if (val
[0] && nargs
== 1)
2554 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
2556 /* If the result is not a valid gimple value, or not a cast
2557 of a valid gimple value, then we can not use the result. */
2558 if (is_gimple_val (new_val
)
2559 || (is_gimple_cast (new_val
)
2560 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
2565 case BUILT_IN_STRCPY
:
2566 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
2567 result
= fold_builtin_strcpy (callee
,
2568 gimple_call_arg (stmt
, 0),
2569 gimple_call_arg (stmt
, 1),
2573 case BUILT_IN_STRNCPY
:
2574 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2575 result
= fold_builtin_strncpy (callee
,
2576 gimple_call_arg (stmt
, 0),
2577 gimple_call_arg (stmt
, 1),
2578 gimple_call_arg (stmt
, 2),
2582 case BUILT_IN_FPUTS
:
2584 result
= fold_builtin_fputs (gimple_call_arg (stmt
, 0),
2585 gimple_call_arg (stmt
, 1),
2586 ignore
, false, val
[0]);
2589 case BUILT_IN_FPUTS_UNLOCKED
:
2591 result
= fold_builtin_fputs (gimple_call_arg (stmt
, 0),
2592 gimple_call_arg (stmt
, 1),
2593 ignore
, true, val
[0]);
2596 case BUILT_IN_MEMCPY_CHK
:
2597 case BUILT_IN_MEMPCPY_CHK
:
2598 case BUILT_IN_MEMMOVE_CHK
:
2599 case BUILT_IN_MEMSET_CHK
:
2600 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2601 result
= fold_builtin_memory_chk (callee
,
2602 gimple_call_arg (stmt
, 0),
2603 gimple_call_arg (stmt
, 1),
2604 gimple_call_arg (stmt
, 2),
2605 gimple_call_arg (stmt
, 3),
2607 DECL_FUNCTION_CODE (callee
));
2610 case BUILT_IN_STRCPY_CHK
:
2611 case BUILT_IN_STPCPY_CHK
:
2612 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2613 result
= fold_builtin_stxcpy_chk (callee
,
2614 gimple_call_arg (stmt
, 0),
2615 gimple_call_arg (stmt
, 1),
2616 gimple_call_arg (stmt
, 2),
2618 DECL_FUNCTION_CODE (callee
));
2621 case BUILT_IN_STRNCPY_CHK
:
2622 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2623 result
= fold_builtin_strncpy_chk (gimple_call_arg (stmt
, 0),
2624 gimple_call_arg (stmt
, 1),
2625 gimple_call_arg (stmt
, 2),
2626 gimple_call_arg (stmt
, 3),
2630 case BUILT_IN_SNPRINTF_CHK
:
2631 case BUILT_IN_VSNPRINTF_CHK
:
2632 if (val
[1] && is_gimple_val (val
[1]))
2633 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
2634 DECL_FUNCTION_CODE (callee
));
2641 if (result
&& ignore
)
2642 result
= fold_ignored_result (result
);
2646 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2647 replacement rhs for the statement or NULL_TREE if no simplification
2648 could be made. It is assumed that the operands have been previously
2652 fold_gimple_assign (gimple_stmt_iterator
*si
)
2654 gimple stmt
= gsi_stmt (*si
);
2655 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2659 switch (get_gimple_rhs_class (subcode
))
2661 case GIMPLE_SINGLE_RHS
:
2663 tree rhs
= gimple_assign_rhs1 (stmt
);
2665 /* Try to fold a conditional expression. */
2666 if (TREE_CODE (rhs
) == COND_EXPR
)
2668 tree temp
= fold (COND_EXPR_COND (rhs
));
2669 if (temp
!= COND_EXPR_COND (rhs
))
2670 result
= fold_build3 (COND_EXPR
, TREE_TYPE (rhs
), temp
,
2671 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
2674 /* If we couldn't fold the RHS, hand over to the generic
2676 if (result
== NULL_TREE
)
2677 result
= fold (rhs
);
2679 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2680 that may have been added by fold, and "useless" type
2681 conversions that might now be apparent due to propagation. */
2682 STRIP_USELESS_TYPE_CONVERSION (result
);
2684 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
2687 /* It is possible that fold_stmt_r simplified the RHS.
2688 Make sure that the subcode of this statement still
2689 reflects the principal operator of the rhs operand. */
2694 case GIMPLE_UNARY_RHS
:
2696 tree rhs
= gimple_assign_rhs1 (stmt
);
2698 result
= fold_unary (subcode
, gimple_expr_type (stmt
), rhs
);
2701 /* If the operation was a conversion do _not_ mark a
2702 resulting constant with TREE_OVERFLOW if the original
2703 constant was not. These conversions have implementation
2704 defined behavior and retaining the TREE_OVERFLOW flag
2705 here would confuse later passes such as VRP. */
2706 if (CONVERT_EXPR_CODE_P (subcode
)
2707 && TREE_CODE (result
) == INTEGER_CST
2708 && TREE_CODE (rhs
) == INTEGER_CST
)
2709 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
2711 STRIP_USELESS_TYPE_CONVERSION (result
);
2712 if (valid_gimple_rhs_p (result
))
2715 else if (CONVERT_EXPR_CODE_P (subcode
)
2716 && POINTER_TYPE_P (gimple_expr_type (stmt
))
2717 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
2719 tree type
= gimple_expr_type (stmt
);
2720 tree t
= maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt
),
2721 integer_zero_node
, type
);
2728 case GIMPLE_BINARY_RHS
:
2729 /* Try to fold pointer addition. */
2730 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2732 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2733 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
2735 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
2736 if (!useless_type_conversion_p
2737 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
2738 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2740 result
= maybe_fold_stmt_addition (type
,
2741 gimple_assign_rhs1 (stmt
),
2742 gimple_assign_rhs2 (stmt
));
2746 result
= fold_binary (subcode
,
2747 TREE_TYPE (gimple_assign_lhs (stmt
)),
2748 gimple_assign_rhs1 (stmt
),
2749 gimple_assign_rhs2 (stmt
));
2753 STRIP_USELESS_TYPE_CONVERSION (result
);
2754 if (valid_gimple_rhs_p (result
))
2757 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2758 we lose canonicalization opportunities. Do not go again
2759 through fold here though, or the same non-GIMPLE will be
2761 if (commutative_tree_code (subcode
)
2762 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
2763 gimple_assign_rhs2 (stmt
), false))
2764 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
2765 gimple_assign_rhs2 (stmt
),
2766 gimple_assign_rhs1 (stmt
));
2770 case GIMPLE_INVALID_RHS
:
2777 /* Attempt to fold a conditional statement. Return true if any changes were
2778 made. We only attempt to fold the condition expression, and do not perform
2779 any transformation that would require alteration of the cfg. It is
2780 assumed that the operands have been previously folded. */
2783 fold_gimple_cond (gimple stmt
)
2785 tree result
= fold_binary (gimple_cond_code (stmt
),
2787 gimple_cond_lhs (stmt
),
2788 gimple_cond_rhs (stmt
));
2792 STRIP_USELESS_TYPE_CONVERSION (result
);
2793 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
2795 gimple_cond_set_condition_from_tree (stmt
, result
);
2804 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2805 The statement may be replaced by another statement, e.g., if the call
2806 simplifies to a constant value. Return true if any changes were made.
2807 It is assumed that the operands have been previously folded. */
2810 fold_gimple_call (gimple_stmt_iterator
*gsi
)
2812 gimple stmt
= gsi_stmt (*gsi
);
2814 tree callee
= gimple_call_fndecl (stmt
);
2816 /* Check for builtins that CCP can handle using information not
2817 available in the generic fold routines. */
2818 if (callee
&& DECL_BUILT_IN (callee
))
2820 tree result
= ccp_fold_builtin (stmt
);
2823 return update_call_from_tree (gsi
, result
);
2827 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2828 here are when we've propagated the address of a decl into the
2830 /* ??? Should perhaps do this in fold proper. However, doing it
2831 there requires that we create a new CALL_EXPR, and that requires
2832 copying EH region info to the new node. Easier to just do it
2833 here where we can just smash the call operand. */
2834 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2835 callee
= gimple_call_fn (stmt
);
2836 if (TREE_CODE (callee
) == OBJ_TYPE_REF
2837 && lang_hooks
.fold_obj_type_ref
2838 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
2839 && DECL_P (TREE_OPERAND
2840 (OBJ_TYPE_REF_OBJECT (callee
), 0)))
2844 /* ??? Caution: Broken ADDR_EXPR semantics means that
2845 looking at the type of the operand of the addr_expr
2846 can yield an array type. See silly exception in
2847 check_pointer_types_r. */
2848 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
2849 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
2852 gimple_call_set_fn (stmt
, t
);
2861 /* Fold the statement pointed to by GSI. In some cases, this function may
2862 replace the whole statement with a new one. Returns true iff folding
2863 makes any changes. */
2866 fold_stmt (gimple_stmt_iterator
*gsi
)
2869 struct fold_stmt_r_data fold_stmt_r_data
;
2870 struct walk_stmt_info wi
;
2872 bool changed
= false;
2873 bool inside_addr_expr
= false;
2875 gimple stmt
= gsi_stmt (*gsi
);
2877 fold_stmt_r_data
.stmt
= stmt
;
2878 fold_stmt_r_data
.changed_p
= &changed
;
2879 fold_stmt_r_data
.inside_addr_expr_p
= &inside_addr_expr
;
2881 memset (&wi
, 0, sizeof (wi
));
2882 wi
.info
= &fold_stmt_r_data
;
2884 /* Fold the individual operands.
2885 For example, fold instances of *&VAR into VAR, etc. */
2886 res
= walk_gimple_op (stmt
, fold_stmt_r
, &wi
);
2889 /* Fold the main computation performed by the statement. */
2890 switch (gimple_code (stmt
))
2894 tree new_rhs
= fold_gimple_assign (gsi
);
2895 if (new_rhs
!= NULL_TREE
)
2897 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
2900 stmt
= gsi_stmt (*gsi
);
2904 changed
|= fold_gimple_cond (stmt
);
2907 /* The entire statement may be replaced in this case. */
2908 changed
|= fold_gimple_call (gsi
);
2919 /* Perform the minimal folding on statement STMT. Only operations like
2920 *&x created by constant propagation are handled. The statement cannot
2921 be replaced with a new one. Return true if the statement was
2922 changed, false otherwise. */
2925 fold_stmt_inplace (gimple stmt
)
2928 struct fold_stmt_r_data fold_stmt_r_data
;
2929 struct walk_stmt_info wi
;
2930 gimple_stmt_iterator si
;
2932 bool changed
= false;
2933 bool inside_addr_expr
= false;
2935 fold_stmt_r_data
.stmt
= stmt
;
2936 fold_stmt_r_data
.changed_p
= &changed
;
2937 fold_stmt_r_data
.inside_addr_expr_p
= &inside_addr_expr
;
2939 memset (&wi
, 0, sizeof (wi
));
2940 wi
.info
= &fold_stmt_r_data
;
2942 /* Fold the individual operands.
2943 For example, fold instances of *&VAR into VAR, etc.
2945 It appears that, at one time, maybe_fold_stmt_indirect
2946 would cause the walk to return non-null in order to
2947 signal that the entire statement should be replaced with
2948 a call to _builtin_trap. This functionality is currently
2949 disabled, as noted in a FIXME, and cannot be supported here. */
2950 res
= walk_gimple_op (stmt
, fold_stmt_r
, &wi
);
2953 /* Fold the main computation performed by the statement. */
2954 switch (gimple_code (stmt
))
2958 unsigned old_num_ops
;
2960 old_num_ops
= gimple_num_ops (stmt
);
2961 si
= gsi_for_stmt (stmt
);
2962 new_rhs
= fold_gimple_assign (&si
);
2963 if (new_rhs
!= NULL_TREE
2964 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
)
2966 gimple_assign_set_rhs_from_tree (&si
, new_rhs
);
2969 gcc_assert (gsi_stmt (si
) == stmt
);
2973 changed
|= fold_gimple_cond (stmt
);
2983 /* Try to optimize out __builtin_stack_restore. Optimize it out
2984 if there is another __builtin_stack_restore in the same basic
2985 block and no calls or ASM_EXPRs are in between, or if this block's
2986 only outgoing edge is to EXIT_BLOCK and there are no calls or
2987 ASM_EXPRs after this __builtin_stack_restore. */
2990 optimize_stack_restore (gimple_stmt_iterator i
)
2993 gimple stmt
, stack_save
;
2994 gimple_stmt_iterator stack_save_gsi
;
2996 basic_block bb
= gsi_bb (i
);
2997 gimple call
= gsi_stmt (i
);
2999 if (gimple_code (call
) != GIMPLE_CALL
3000 || gimple_call_num_args (call
) != 1
3001 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
3002 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
3005 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
3007 stmt
= gsi_stmt (i
);
3008 if (gimple_code (stmt
) == GIMPLE_ASM
)
3010 if (gimple_code (stmt
) != GIMPLE_CALL
)
3013 callee
= gimple_call_fndecl (stmt
);
3014 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
3017 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
3022 && (! single_succ_p (bb
)
3023 || single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR
))
3026 stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
3027 if (gimple_code (stack_save
) != GIMPLE_CALL
3028 || gimple_call_lhs (stack_save
) != gimple_call_arg (call
, 0)
3029 || stmt_could_throw_p (stack_save
)
3030 || !has_single_use (gimple_call_arg (call
, 0)))
3033 callee
= gimple_call_fndecl (stack_save
);
3035 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3036 || DECL_FUNCTION_CODE (callee
) != BUILT_IN_STACK_SAVE
3037 || gimple_call_num_args (stack_save
) != 0)
3040 stack_save_gsi
= gsi_for_stmt (stack_save
);
3041 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi
));
3042 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
3043 if (!update_call_from_tree (&stack_save_gsi
, rhs
))
3045 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi
));
3048 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi
));
3050 /* No effect, so the statement will be deleted. */
3051 return integer_zero_node
;
3054 /* If va_list type is a simple pointer and nothing special is needed,
3055 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3056 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3057 pointer assignment. */
3060 optimize_stdarg_builtin (gimple call
)
3062 tree callee
, lhs
, rhs
, cfun_va_list
;
3063 bool va_list_simple_ptr
;
3065 if (gimple_code (call
) != GIMPLE_CALL
)
3068 callee
= gimple_call_fndecl (call
);
3070 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
3071 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
3072 && (TREE_TYPE (cfun_va_list
) == void_type_node
3073 || TREE_TYPE (cfun_va_list
) == char_type_node
);
3075 switch (DECL_FUNCTION_CODE (callee
))
3077 case BUILT_IN_VA_START
:
3078 if (!va_list_simple_ptr
3079 || targetm
.expand_builtin_va_start
!= NULL
3080 || built_in_decls
[BUILT_IN_NEXT_ARG
] == NULL
)
3083 if (gimple_call_num_args (call
) != 2)
3086 lhs
= gimple_call_arg (call
, 0);
3087 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3088 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3089 != TYPE_MAIN_VARIANT (cfun_va_list
))
3092 lhs
= build_fold_indirect_ref (lhs
);
3093 rhs
= build_call_expr (built_in_decls
[BUILT_IN_NEXT_ARG
],
3094 1, integer_zero_node
);
3095 rhs
= fold_convert (TREE_TYPE (lhs
), rhs
);
3096 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3098 case BUILT_IN_VA_COPY
:
3099 if (!va_list_simple_ptr
)
3102 if (gimple_call_num_args (call
) != 2)
3105 lhs
= gimple_call_arg (call
, 0);
3106 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3107 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3108 != TYPE_MAIN_VARIANT (cfun_va_list
))
3111 lhs
= build_fold_indirect_ref (lhs
);
3112 rhs
= gimple_call_arg (call
, 1);
3113 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
3114 != TYPE_MAIN_VARIANT (cfun_va_list
))
3117 rhs
= fold_convert (TREE_TYPE (lhs
), rhs
);
3118 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3120 case BUILT_IN_VA_END
:
3121 /* No effect, so the statement will be deleted. */
3122 return integer_zero_node
;
3129 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3130 RHS of an assignment. Insert the necessary statements before
3131 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3132 is replaced. If the call is expected to produces a result, then it
3133 is replaced by an assignment of the new RHS to the result variable.
3134 If the result is to be ignored, then the call is replaced by a
3138 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
3141 tree tmp
= NULL_TREE
; /* Silence warning. */
3142 gimple stmt
, new_stmt
;
3143 gimple_stmt_iterator i
;
3144 gimple_seq stmts
= gimple_seq_alloc();
3145 struct gimplify_ctx gctx
;
3147 stmt
= gsi_stmt (*si_p
);
3149 gcc_assert (is_gimple_call (stmt
));
3151 lhs
= gimple_call_lhs (stmt
);
3153 push_gimplify_context (&gctx
);
3155 if (lhs
== NULL_TREE
)
3156 gimplify_and_add (expr
, &stmts
);
3158 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
3160 pop_gimplify_context (NULL
);
3162 if (gimple_has_location (stmt
))
3163 annotate_all_with_location (stmts
, gimple_location (stmt
));
3165 /* The replacement can expose previously unreferenced variables. */
3166 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
3168 new_stmt
= gsi_stmt (i
);
3169 find_new_referenced_vars (new_stmt
);
3170 gsi_insert_before (si_p
, new_stmt
, GSI_NEW_STMT
);
3171 mark_symbols_for_renaming (new_stmt
);
3175 if (lhs
== NULL_TREE
)
3177 new_stmt
= gimple_build_nop ();
3178 unlink_stmt_vdef (stmt
);
3179 release_defs (stmt
);
3183 new_stmt
= gimple_build_assign (lhs
, tmp
);
3184 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3185 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3186 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
3189 gimple_set_location (new_stmt
, gimple_location (stmt
));
3190 gsi_replace (si_p
, new_stmt
, false);
3193 /* A simple pass that attempts to fold all builtin functions. This pass
3194 is run after we've propagated as many constants as we can. */
3197 execute_fold_all_builtins (void)
3199 bool cfg_changed
= false;
3201 unsigned int todoflags
= 0;
3205 gimple_stmt_iterator i
;
3206 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
3208 gimple stmt
, old_stmt
;
3209 tree callee
, result
;
3210 enum built_in_function fcode
;
3212 stmt
= gsi_stmt (i
);
3214 if (gimple_code (stmt
) != GIMPLE_CALL
)
3219 callee
= gimple_call_fndecl (stmt
);
3220 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
3225 fcode
= DECL_FUNCTION_CODE (callee
);
3227 result
= ccp_fold_builtin (stmt
);
3230 gimple_remove_stmt_histograms (cfun
, stmt
);
3233 switch (DECL_FUNCTION_CODE (callee
))
3235 case BUILT_IN_CONSTANT_P
:
3236 /* Resolve __builtin_constant_p. If it hasn't been
3237 folded to integer_one_node by now, it's fairly
3238 certain that the value simply isn't constant. */
3239 result
= integer_zero_node
;
3242 case BUILT_IN_STACK_RESTORE
:
3243 result
= optimize_stack_restore (i
);
3249 case BUILT_IN_VA_START
:
3250 case BUILT_IN_VA_END
:
3251 case BUILT_IN_VA_COPY
:
3252 /* These shouldn't be folded before pass_stdarg. */
3253 result
= optimize_stdarg_builtin (stmt
);
3263 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3265 fprintf (dump_file
, "Simplified\n ");
3266 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3270 push_stmt_changes (gsi_stmt_ptr (&i
));
3272 if (!update_call_from_tree (&i
, result
))
3273 gimplify_and_update_call_from_tree (&i
, result
);
3275 stmt
= gsi_stmt (i
);
3276 pop_stmt_changes (gsi_stmt_ptr (&i
));
3278 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
3279 && gimple_purge_dead_eh_edges (bb
))
3282 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3284 fprintf (dump_file
, "to\n ");
3285 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3286 fprintf (dump_file
, "\n");
3289 /* Retry the same statement if it changed into another
3290 builtin, there might be new opportunities now. */
3291 if (gimple_code (stmt
) != GIMPLE_CALL
)
3296 callee
= gimple_call_fndecl (stmt
);
3298 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3299 || DECL_FUNCTION_CODE (callee
) == fcode
)
3304 /* Delete unreachable blocks. */
3306 todoflags
|= TODO_cleanup_cfg
;
3312 struct gimple_opt_pass pass_fold_builtins
=
3318 execute_fold_all_builtins
, /* execute */
3321 0, /* static_pass_number */
3323 PROP_cfg
| PROP_ssa
, /* properties_required */
3324 0, /* properties_provided */
3325 0, /* properties_destroyed */
3326 0, /* todo_flags_start */
3329 | TODO_update_ssa
/* todo_flags_finish */