1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
214 /* Possible lattice values. */
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
229 static prop_value_t
*const_val
;
231 static void canonicalize_float_value (prop_value_t
*);
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
236 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
238 switch (val
.lattice_val
)
241 fprintf (outf
, "%sUNINITIALIZED", prefix
);
244 fprintf (outf
, "%sUNDEFINED", prefix
);
247 fprintf (outf
, "%sVARYING", prefix
);
250 fprintf (outf
, "%sCONSTANT ", prefix
);
251 print_generic_expr (outf
, val
.value
, dump_flags
);
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val
);
264 debug_lattice_value (prop_value_t val
)
266 dump_lattice_value (stderr
, "", val
);
267 fprintf (stderr
, "\n");
272 /* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
276 get_symbol_constant_value (tree sym
)
278 if (TREE_STATIC (sym
)
279 && (TREE_READONLY (sym
)
280 || TREE_CODE (sym
) == CONST_DECL
))
282 tree val
= DECL_INITIAL (sym
);
285 STRIP_USELESS_TYPE_CONVERSION (val
);
286 if (is_gimple_min_invariant (val
))
288 if (TREE_CODE (val
) == ADDR_EXPR
)
290 tree base
= get_base_address (TREE_OPERAND (val
, 0));
291 if (base
&& TREE_CODE (base
) == VAR_DECL
)
293 TREE_ADDRESSABLE (base
) = 1;
294 if (gimple_referenced_vars (cfun
))
295 add_referenced_var (base
);
301 /* Variables declared 'const' without an initializer
302 have zero as the initializer if they may not be
303 overridden at link or run time. */
305 && !DECL_EXTERNAL (sym
)
306 && targetm
.binds_local_p (sym
)
307 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
308 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
309 return fold_convert (TREE_TYPE (sym
), integer_zero_node
);
315 /* Compute a default value for variable VAR and store it in the
316 CONST_VAL array. The following rules are used to get default
319 1- Global and static variables that are declared constant are
322 2- Any other value is considered UNDEFINED. This is useful when
323 considering PHI nodes. PHI arguments that are undefined do not
324 change the constant value of the PHI node, which allows for more
325 constants to be propagated.
327 3- Variables defined by statements other than assignments and PHI
328 nodes are considered VARYING.
330 4- Initial values of variables that are not GIMPLE registers are
331 considered VARYING. */
334 get_default_value (tree var
)
336 tree sym
= SSA_NAME_VAR (var
);
337 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
};
340 stmt
= SSA_NAME_DEF_STMT (var
);
342 if (gimple_nop_p (stmt
))
344 /* Variables defined by an empty statement are those used
345 before being initialized. If VAR is a local variable, we
346 can assume initially that it is UNDEFINED, otherwise we must
347 consider it VARYING. */
348 if (is_gimple_reg (sym
) && TREE_CODE (sym
) != PARM_DECL
)
349 val
.lattice_val
= UNDEFINED
;
351 val
.lattice_val
= VARYING
;
353 else if (is_gimple_assign (stmt
)
354 /* Value-returning GIMPLE_CALL statements assign to
355 a variable, and are treated similarly to GIMPLE_ASSIGN. */
356 || (is_gimple_call (stmt
)
357 && gimple_call_lhs (stmt
) != NULL_TREE
)
358 || gimple_code (stmt
) == GIMPLE_PHI
)
361 if (gimple_assign_single_p (stmt
)
362 && DECL_P (gimple_assign_rhs1 (stmt
))
363 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
365 val
.lattice_val
= CONSTANT
;
369 /* Any other variable defined by an assignment or a PHI node
370 is considered UNDEFINED. */
371 val
.lattice_val
= UNDEFINED
;
375 /* Otherwise, VAR will never take on a constant value. */
376 val
.lattice_val
= VARYING
;
383 /* Get the constant value associated with variable VAR. */
385 static inline prop_value_t
*
390 if (const_val
== NULL
)
393 val
= &const_val
[SSA_NAME_VERSION (var
)];
394 if (val
->lattice_val
== UNINITIALIZED
)
395 *val
= get_default_value (var
);
397 canonicalize_float_value (val
);
402 /* Sets the value associated with VAR to VARYING. */
405 set_value_varying (tree var
)
407 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
409 val
->lattice_val
= VARYING
;
410 val
->value
= NULL_TREE
;
413 /* For float types, modify the value of VAL to make ccp work correctly
414 for non-standard values (-0, NaN):
416 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
417 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
418 This is to fix the following problem (see PR 29921): Suppose we have
422 and we set value of y to NaN. This causes value of x to be set to NaN.
423 When we later determine that y is in fact VARYING, fold uses the fact
424 that HONOR_NANS is false, and we try to change the value of x to 0,
425 causing an ICE. With HONOR_NANS being false, the real appearance of
426 NaN would cause undefined behavior, though, so claiming that y (and x)
427 are UNDEFINED initially is correct. */
430 canonicalize_float_value (prop_value_t
*val
)
432 enum machine_mode mode
;
436 if (val
->lattice_val
!= CONSTANT
437 || TREE_CODE (val
->value
) != REAL_CST
)
440 d
= TREE_REAL_CST (val
->value
);
441 type
= TREE_TYPE (val
->value
);
442 mode
= TYPE_MODE (type
);
444 if (!HONOR_SIGNED_ZEROS (mode
)
445 && REAL_VALUE_MINUS_ZERO (d
))
447 val
->value
= build_real (type
, dconst0
);
451 if (!HONOR_NANS (mode
)
452 && REAL_VALUE_ISNAN (d
))
454 val
->lattice_val
= UNDEFINED
;
460 /* Set the value for variable VAR to NEW_VAL. Return true if the new
461 value is different from VAR's previous value. */
464 set_lattice_value (tree var
, prop_value_t new_val
)
466 prop_value_t
*old_val
= get_value (var
);
468 canonicalize_float_value (&new_val
);
470 /* Lattice transitions must always be monotonically increasing in
471 value. If *OLD_VAL and NEW_VAL are the same, return false to
472 inform the caller that this was a non-transition. */
474 gcc_assert (old_val
->lattice_val
< new_val
.lattice_val
475 || (old_val
->lattice_val
== new_val
.lattice_val
476 && ((!old_val
->value
&& !new_val
.value
)
477 || operand_equal_p (old_val
->value
, new_val
.value
, 0))));
479 if (old_val
->lattice_val
!= new_val
.lattice_val
)
481 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
483 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
484 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
489 gcc_assert (new_val
.lattice_val
!= UNDEFINED
);
497 /* Return the likely CCP lattice value for STMT.
499 If STMT has no operands, then return CONSTANT.
501 Else if undefinedness of operands of STMT cause its value to be
502 undefined, then return UNDEFINED.
504 Else if any operands of STMT are constants, then return CONSTANT.
506 Else return VARYING. */
509 likely_value (gimple stmt
)
511 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
516 enum gimple_code code
= gimple_code (stmt
);
518 /* This function appears to be called only for assignments, calls,
519 conditionals, and switches, due to the logic in visit_stmt. */
520 gcc_assert (code
== GIMPLE_ASSIGN
521 || code
== GIMPLE_CALL
522 || code
== GIMPLE_COND
523 || code
== GIMPLE_SWITCH
);
525 /* If the statement has volatile operands, it won't fold to a
527 if (gimple_has_volatile_ops (stmt
))
530 /* Arrive here for more complex cases. */
531 has_constant_operand
= false;
532 has_undefined_operand
= false;
533 all_undefined_operands
= true;
534 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
536 prop_value_t
*val
= get_value (use
);
538 if (val
->lattice_val
== UNDEFINED
)
539 has_undefined_operand
= true;
541 all_undefined_operands
= false;
543 if (val
->lattice_val
== CONSTANT
)
544 has_constant_operand
= true;
547 /* There may be constants in regular rhs operands. For calls we
548 have to ignore lhs, fndecl and static chain, otherwise only
550 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
551 i
< gimple_num_ops (stmt
); ++i
)
553 tree op
= gimple_op (stmt
, i
);
554 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
556 if (is_gimple_min_invariant (op
))
557 has_constant_operand
= true;
560 /* If the operation combines operands like COMPLEX_EXPR make sure to
561 not mark the result UNDEFINED if only one part of the result is
563 if (has_undefined_operand
&& all_undefined_operands
)
565 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
567 switch (gimple_assign_rhs_code (stmt
))
569 /* Unary operators are handled with all_undefined_operands. */
572 case POINTER_PLUS_EXPR
:
573 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
574 Not bitwise operators, one VARYING operand may specify the
575 result completely. Not logical operators for the same reason.
576 Not COMPLEX_EXPR as one VARYING operand makes the result partly
577 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
578 the undefined operand may be promoted. */
585 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
586 fall back to VARYING even if there were CONSTANT operands. */
587 if (has_undefined_operand
)
590 /* We do not consider virtual operands here -- load from read-only
591 memory may have only VARYING virtual operands, but still be
593 if (has_constant_operand
594 || gimple_references_memory_p (stmt
))
600 /* Returns true if STMT cannot be constant. */
603 surely_varying_stmt_p (gimple stmt
)
605 /* If the statement has operands that we cannot handle, it cannot be
607 if (gimple_has_volatile_ops (stmt
))
610 /* If it is a call and does not return a value or is not a
611 builtin and not an indirect call, it is varying. */
612 if (is_gimple_call (stmt
))
615 if (!gimple_call_lhs (stmt
)
616 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
617 && !DECL_BUILT_IN (fndecl
)))
621 /* Any other store operation is not interesting. */
622 else if (gimple_vdef (stmt
))
625 /* Anything other than assignments and conditional jumps are not
626 interesting for CCP. */
627 if (gimple_code (stmt
) != GIMPLE_ASSIGN
628 && gimple_code (stmt
) != GIMPLE_COND
629 && gimple_code (stmt
) != GIMPLE_SWITCH
630 && gimple_code (stmt
) != GIMPLE_CALL
)
636 /* Initialize local data structures for CCP. */
639 ccp_initialize (void)
643 const_val
= XCNEWVEC (prop_value_t
, num_ssa_names
);
645 /* Initialize simulation flags for PHI nodes and statements. */
648 gimple_stmt_iterator i
;
650 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
652 gimple stmt
= gsi_stmt (i
);
653 bool is_varying
= surely_varying_stmt_p (stmt
);
660 /* If the statement will not produce a constant, mark
661 all its outputs VARYING. */
662 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
663 set_value_varying (def
);
665 prop_set_simulate_again (stmt
, !is_varying
);
669 /* Now process PHI nodes. We never clear the simulate_again flag on
670 phi nodes, since we do not know which edges are executable yet,
671 except for phi nodes for virtual operands when we do not do store ccp. */
674 gimple_stmt_iterator i
;
676 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
678 gimple phi
= gsi_stmt (i
);
680 if (!is_gimple_reg (gimple_phi_result (phi
)))
681 prop_set_simulate_again (phi
, false);
683 prop_set_simulate_again (phi
, true);
688 /* Debug count support. Reset the values of ssa names
689 VARYING when the total number ssa names analyzed is
690 beyond the debug count specified. */
696 for (i
= 0; i
< num_ssa_names
; i
++)
700 const_val
[i
].lattice_val
= VARYING
;
701 const_val
[i
].value
= NULL_TREE
;
707 /* Do final substitution of propagated values, cleanup the flowgraph and
708 free allocated storage.
710 Return TRUE when something was optimized. */
715 bool something_changed
;
718 /* Perform substitutions based on the known constant values. */
719 something_changed
= substitute_and_fold (const_val
, false);
723 return something_changed
;;
727 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
730 any M UNDEFINED = any
731 any M VARYING = VARYING
732 Ci M Cj = Ci if (i == j)
733 Ci M Cj = VARYING if (i != j)
737 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
739 if (val1
->lattice_val
== UNDEFINED
)
741 /* UNDEFINED M any = any */
744 else if (val2
->lattice_val
== UNDEFINED
)
746 /* any M UNDEFINED = any
747 Nothing to do. VAL1 already contains the value we want. */
750 else if (val1
->lattice_val
== VARYING
751 || val2
->lattice_val
== VARYING
)
753 /* any M VARYING = VARYING. */
754 val1
->lattice_val
= VARYING
;
755 val1
->value
= NULL_TREE
;
757 else if (val1
->lattice_val
== CONSTANT
758 && val2
->lattice_val
== CONSTANT
759 && simple_cst_equal (val1
->value
, val2
->value
) == 1)
761 /* Ci M Cj = Ci if (i == j)
762 Ci M Cj = VARYING if (i != j)
764 If these two values come from memory stores, make sure that
765 they come from the same memory reference. */
766 val1
->lattice_val
= CONSTANT
;
767 val1
->value
= val1
->value
;
771 /* Any other combination is VARYING. */
772 val1
->lattice_val
= VARYING
;
773 val1
->value
= NULL_TREE
;
778 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
779 lattice values to determine PHI_NODE's lattice value. The value of a
780 PHI node is determined calling ccp_lattice_meet with all the arguments
781 of the PHI node that are incoming via executable edges. */
783 static enum ssa_prop_result
784 ccp_visit_phi_node (gimple phi
)
787 prop_value_t
*old_val
, new_val
;
789 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
791 fprintf (dump_file
, "\nVisiting PHI node: ");
792 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
795 old_val
= get_value (gimple_phi_result (phi
));
796 switch (old_val
->lattice_val
)
799 return SSA_PROP_VARYING
;
806 new_val
.lattice_val
= UNDEFINED
;
807 new_val
.value
= NULL_TREE
;
814 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
816 /* Compute the meet operator over all the PHI arguments flowing
817 through executable edges. */
818 edge e
= gimple_phi_arg_edge (phi
, i
);
820 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
823 "\n Argument #%d (%d -> %d %sexecutable)\n",
824 i
, e
->src
->index
, e
->dest
->index
,
825 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
828 /* If the incoming edge is executable, Compute the meet operator for
829 the existing value of the PHI node and the current PHI argument. */
830 if (e
->flags
& EDGE_EXECUTABLE
)
832 tree arg
= gimple_phi_arg (phi
, i
)->def
;
833 prop_value_t arg_val
;
835 if (is_gimple_min_invariant (arg
))
837 arg_val
.lattice_val
= CONSTANT
;
841 arg_val
= *(get_value (arg
));
843 ccp_lattice_meet (&new_val
, &arg_val
);
845 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
847 fprintf (dump_file
, "\t");
848 print_generic_expr (dump_file
, arg
, dump_flags
);
849 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
850 fprintf (dump_file
, "\n");
853 if (new_val
.lattice_val
== VARYING
)
858 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
860 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
861 fprintf (dump_file
, "\n\n");
864 /* Make the transition to the new value. */
865 if (set_lattice_value (gimple_phi_result (phi
), new_val
))
867 if (new_val
.lattice_val
== VARYING
)
868 return SSA_PROP_VARYING
;
870 return SSA_PROP_INTERESTING
;
873 return SSA_PROP_NOT_INTERESTING
;
876 /* Return true if we may propagate the address expression ADDR into the
877 dereference DEREF and cancel them. */
880 may_propagate_address_into_dereference (tree addr
, tree deref
)
882 gcc_assert (INDIRECT_REF_P (deref
)
883 && TREE_CODE (addr
) == ADDR_EXPR
);
885 /* Don't propagate if ADDR's operand has incomplete type. */
886 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
889 /* If the address is invariant then we do not need to preserve restrict
890 qualifications. But we do need to preserve volatile qualifiers until
891 we can annotate the folded dereference itself properly. */
892 if (is_gimple_min_invariant (addr
)
893 && (!TREE_THIS_VOLATILE (deref
)
894 || TYPE_VOLATILE (TREE_TYPE (addr
))))
895 return useless_type_conversion_p (TREE_TYPE (deref
),
896 TREE_TYPE (TREE_OPERAND (addr
, 0)));
898 /* Else both the address substitution and the folding must result in
899 a valid useless type conversion sequence. */
900 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
902 && useless_type_conversion_p (TREE_TYPE (deref
),
903 TREE_TYPE (TREE_OPERAND (addr
, 0))));
906 /* CCP specific front-end to the non-destructive constant folding
909 Attempt to simplify the RHS of STMT knowing that one or more
910 operands are constants.
912 If simplification is possible, return the simplified RHS,
913 otherwise return the original RHS or NULL_TREE. */
916 ccp_fold (gimple stmt
)
918 location_t loc
= gimple_location (stmt
);
919 switch (gimple_code (stmt
))
923 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
925 switch (get_gimple_rhs_class (subcode
))
927 case GIMPLE_SINGLE_RHS
:
929 tree rhs
= gimple_assign_rhs1 (stmt
);
930 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
932 if (TREE_CODE (rhs
) == SSA_NAME
)
934 /* If the RHS is an SSA_NAME, return its known constant value,
936 return get_value (rhs
)->value
;
938 /* Handle propagating invariant addresses into address operations.
939 The folding we do here matches that in tree-ssa-forwprop.c. */
940 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
943 base
= &TREE_OPERAND (rhs
, 0);
944 while (handled_component_p (*base
))
945 base
= &TREE_OPERAND (*base
, 0);
946 if (TREE_CODE (*base
) == INDIRECT_REF
947 && TREE_CODE (TREE_OPERAND (*base
, 0)) == SSA_NAME
)
949 prop_value_t
*val
= get_value (TREE_OPERAND (*base
, 0));
950 if (val
->lattice_val
== CONSTANT
951 && TREE_CODE (val
->value
) == ADDR_EXPR
952 && may_propagate_address_into_dereference
955 /* We need to return a new tree, not modify the IL
956 or share parts of it. So play some tricks to
957 avoid manually building it. */
958 tree ret
, save
= *base
;
959 *base
= TREE_OPERAND (val
->value
, 0);
960 ret
= unshare_expr (rhs
);
961 recompute_tree_invariant_for_addr_expr (ret
);
967 else if (TREE_CODE (rhs
) == CONSTRUCTOR
968 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
969 && (CONSTRUCTOR_NELTS (rhs
)
970 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
976 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
978 if (TREE_CODE (val
) == SSA_NAME
979 && get_value (val
)->lattice_val
== CONSTANT
)
980 val
= get_value (val
)->value
;
981 if (TREE_CODE (val
) == INTEGER_CST
982 || TREE_CODE (val
) == REAL_CST
983 || TREE_CODE (val
) == FIXED_CST
)
984 list
= tree_cons (NULL_TREE
, val
, list
);
989 return build_vector (TREE_TYPE (rhs
), nreverse (list
));
992 if (kind
== tcc_reference
)
994 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
995 || TREE_CODE (rhs
) == REALPART_EXPR
996 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
997 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
999 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
1000 if (val
->lattice_val
== CONSTANT
)
1001 return fold_unary_loc (EXPR_LOCATION (rhs
),
1003 TREE_TYPE (rhs
), val
->value
);
1005 else if (TREE_CODE (rhs
) == INDIRECT_REF
1006 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
1008 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
1009 if (val
->lattice_val
== CONSTANT
1010 && TREE_CODE (val
->value
) == ADDR_EXPR
1011 && useless_type_conversion_p (TREE_TYPE (rhs
),
1012 TREE_TYPE (TREE_TYPE (val
->value
))))
1013 rhs
= TREE_OPERAND (val
->value
, 0);
1015 return fold_const_aggregate_ref (rhs
);
1017 else if (kind
== tcc_declaration
)
1018 return get_symbol_constant_value (rhs
);
1022 case GIMPLE_UNARY_RHS
:
1024 /* Handle unary operators that can appear in GIMPLE form.
1025 Note that we know the single operand must be a constant,
1026 so this should almost always return a simplified RHS. */
1027 tree lhs
= gimple_assign_lhs (stmt
);
1028 tree op0
= gimple_assign_rhs1 (stmt
);
1030 /* Simplify the operand down to a constant. */
1031 if (TREE_CODE (op0
) == SSA_NAME
)
1033 prop_value_t
*val
= get_value (op0
);
1034 if (val
->lattice_val
== CONSTANT
)
1035 op0
= get_value (op0
)->value
;
1038 /* Conversions are useless for CCP purposes if they are
1039 value-preserving. Thus the restrictions that
1040 useless_type_conversion_p places for pointer type conversions
1041 do not apply here. Substitution later will only substitute to
1043 if (CONVERT_EXPR_CODE_P (subcode
)
1044 && POINTER_TYPE_P (TREE_TYPE (lhs
))
1045 && POINTER_TYPE_P (TREE_TYPE (op0
))
1046 /* Do not allow differences in volatile qualification
1047 as this might get us confused as to whether a
1048 propagation destination statement is volatile
1049 or not. See PR36988. */
1050 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs
)))
1051 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0
)))))
1054 /* Still try to generate a constant of correct type. */
1055 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
1057 && ((tem
= maybe_fold_offset_to_address
1059 op0
, integer_zero_node
, TREE_TYPE (lhs
)))
1066 fold_unary_ignore_overflow_loc (loc
, subcode
,
1067 gimple_expr_type (stmt
), op0
);
1070 case GIMPLE_BINARY_RHS
:
1072 /* Handle binary operators that can appear in GIMPLE form. */
1073 tree op0
= gimple_assign_rhs1 (stmt
);
1074 tree op1
= gimple_assign_rhs2 (stmt
);
1076 /* Simplify the operands down to constants when appropriate. */
1077 if (TREE_CODE (op0
) == SSA_NAME
)
1079 prop_value_t
*val
= get_value (op0
);
1080 if (val
->lattice_val
== CONSTANT
)
1084 if (TREE_CODE (op1
) == SSA_NAME
)
1086 prop_value_t
*val
= get_value (op1
);
1087 if (val
->lattice_val
== CONSTANT
)
1091 /* Fold &foo + CST into an invariant reference if possible. */
1092 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1093 && TREE_CODE (op0
) == ADDR_EXPR
1094 && TREE_CODE (op1
) == INTEGER_CST
)
1096 tree tem
= maybe_fold_offset_to_address
1097 (loc
, op0
, op1
, TREE_TYPE (op0
));
1098 if (tem
!= NULL_TREE
)
1102 return fold_binary_loc (loc
, subcode
,
1103 gimple_expr_type (stmt
), op0
, op1
);
1114 tree fn
= gimple_call_fn (stmt
);
1117 if (TREE_CODE (fn
) == SSA_NAME
)
1119 val
= get_value (fn
);
1120 if (val
->lattice_val
== CONSTANT
)
1123 if (TREE_CODE (fn
) == ADDR_EXPR
1124 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1125 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
1127 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
1130 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1132 args
[i
] = gimple_call_arg (stmt
, i
);
1133 if (TREE_CODE (args
[i
]) == SSA_NAME
)
1135 val
= get_value (args
[i
]);
1136 if (val
->lattice_val
== CONSTANT
)
1137 args
[i
] = val
->value
;
1140 call
= build_call_array_loc (loc
,
1141 gimple_call_return_type (stmt
),
1142 fn
, gimple_call_num_args (stmt
), args
);
1143 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
1145 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1146 STRIP_NOPS (retval
);
1154 /* Handle comparison operators that can appear in GIMPLE form. */
1155 tree op0
= gimple_cond_lhs (stmt
);
1156 tree op1
= gimple_cond_rhs (stmt
);
1157 enum tree_code code
= gimple_cond_code (stmt
);
1159 /* Simplify the operands down to constants when appropriate. */
1160 if (TREE_CODE (op0
) == SSA_NAME
)
1162 prop_value_t
*val
= get_value (op0
);
1163 if (val
->lattice_val
== CONSTANT
)
1167 if (TREE_CODE (op1
) == SSA_NAME
)
1169 prop_value_t
*val
= get_value (op1
);
1170 if (val
->lattice_val
== CONSTANT
)
1174 return fold_binary_loc (loc
, code
, boolean_type_node
, op0
, op1
);
1179 tree rhs
= gimple_switch_index (stmt
);
1181 if (TREE_CODE (rhs
) == SSA_NAME
)
1183 /* If the RHS is an SSA_NAME, return its known constant value,
1185 return get_value (rhs
)->value
;
1197 /* Return the tree representing the element referenced by T if T is an
1198 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1199 NULL_TREE otherwise. */
1202 fold_const_aggregate_ref (tree t
)
1204 prop_value_t
*value
;
1205 tree base
, ctor
, idx
, field
;
1206 unsigned HOST_WIDE_INT cnt
;
1209 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
1210 return get_symbol_constant_value (t
);
1212 switch (TREE_CODE (t
))
1215 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1216 DECL_INITIAL. If BASE is a nested reference into another
1217 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1218 the inner reference. */
1219 base
= TREE_OPERAND (t
, 0);
1220 switch (TREE_CODE (base
))
1223 if (!TREE_READONLY (base
)
1224 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
1225 || !targetm
.binds_local_p (base
))
1228 ctor
= DECL_INITIAL (base
);
1233 ctor
= fold_const_aggregate_ref (base
);
1245 if (ctor
== NULL_TREE
1246 || (TREE_CODE (ctor
) != CONSTRUCTOR
1247 && TREE_CODE (ctor
) != STRING_CST
)
1248 || !TREE_STATIC (ctor
))
1251 /* Get the index. If we have an SSA_NAME, try to resolve it
1252 with the current lattice value for the SSA_NAME. */
1253 idx
= TREE_OPERAND (t
, 1);
1254 switch (TREE_CODE (idx
))
1257 if ((value
= get_value (idx
))
1258 && value
->lattice_val
== CONSTANT
1259 && TREE_CODE (value
->value
) == INTEGER_CST
)
1272 /* Fold read from constant string. */
1273 if (TREE_CODE (ctor
) == STRING_CST
)
1275 if ((TYPE_MODE (TREE_TYPE (t
))
1276 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1277 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1279 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
1280 && compare_tree_int (idx
, TREE_STRING_LENGTH (ctor
)) < 0)
1281 return build_int_cst_type (TREE_TYPE (t
),
1282 (TREE_STRING_POINTER (ctor
)
1283 [TREE_INT_CST_LOW (idx
)]));
1287 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1288 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1289 if (tree_int_cst_equal (cfield
, idx
))
1291 STRIP_USELESS_TYPE_CONVERSION (cval
);
1292 if (TREE_CODE (cval
) == ADDR_EXPR
)
1294 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1295 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1296 add_referenced_var (base
);
1303 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1304 DECL_INITIAL. If BASE is a nested reference into another
1305 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1306 the inner reference. */
1307 base
= TREE_OPERAND (t
, 0);
1308 switch (TREE_CODE (base
))
1311 if (!TREE_READONLY (base
)
1312 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1313 || !targetm
.binds_local_p (base
))
1316 ctor
= DECL_INITIAL (base
);
1321 ctor
= fold_const_aggregate_ref (base
);
1328 if (ctor
== NULL_TREE
1329 || TREE_CODE (ctor
) != CONSTRUCTOR
1330 || !TREE_STATIC (ctor
))
1333 field
= TREE_OPERAND (t
, 1);
1335 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1337 /* FIXME: Handle bit-fields. */
1338 && ! DECL_BIT_FIELD (cfield
))
1340 STRIP_USELESS_TYPE_CONVERSION (cval
);
1341 if (TREE_CODE (cval
) == ADDR_EXPR
)
1343 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1344 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1345 add_referenced_var (base
);
1354 tree c
= fold_const_aggregate_ref (TREE_OPERAND (t
, 0));
1355 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
1356 return fold_build1_loc (EXPR_LOCATION (t
),
1357 TREE_CODE (t
), TREE_TYPE (t
), c
);
1363 tree base
= TREE_OPERAND (t
, 0);
1364 if (TREE_CODE (base
) == SSA_NAME
1365 && (value
= get_value (base
))
1366 && value
->lattice_val
== CONSTANT
1367 && TREE_CODE (value
->value
) == ADDR_EXPR
1368 && useless_type_conversion_p (TREE_TYPE (t
),
1369 TREE_TYPE (TREE_TYPE (value
->value
))))
1370 return fold_const_aggregate_ref (TREE_OPERAND (value
->value
, 0));
1381 /* Evaluate statement STMT.
1382 Valid only for assignments, calls, conditionals, and switches. */
1385 evaluate_stmt (gimple stmt
)
1388 tree simplified
= NULL_TREE
;
1389 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1392 fold_defer_overflow_warnings ();
1394 /* If the statement is likely to have a CONSTANT result, then try
1395 to fold the statement to determine the constant value. */
1396 /* FIXME. This is the only place that we call ccp_fold.
1397 Since likely_value never returns CONSTANT for calls, we will
1398 not attempt to fold them, including builtins that may profit. */
1399 if (likelyvalue
== CONSTANT
)
1400 simplified
= ccp_fold (stmt
);
1401 /* If the statement is likely to have a VARYING result, then do not
1402 bother folding the statement. */
1403 else if (likelyvalue
== VARYING
)
1405 enum gimple_code code
= gimple_code (stmt
);
1406 if (code
== GIMPLE_ASSIGN
)
1408 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1410 /* Other cases cannot satisfy is_gimple_min_invariant
1412 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1413 simplified
= gimple_assign_rhs1 (stmt
);
1415 else if (code
== GIMPLE_SWITCH
)
1416 simplified
= gimple_switch_index (stmt
);
1418 /* These cannot satisfy is_gimple_min_invariant without folding. */
1419 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1422 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1424 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1426 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1428 fprintf (dump_file
, "which is likely ");
1429 switch (likelyvalue
)
1432 fprintf (dump_file
, "CONSTANT");
1435 fprintf (dump_file
, "UNDEFINED");
1438 fprintf (dump_file
, "VARYING");
1442 fprintf (dump_file
, "\n");
1447 /* The statement produced a constant value. */
1448 val
.lattice_val
= CONSTANT
;
1449 val
.value
= simplified
;
1453 /* The statement produced a nonconstant value. If the statement
1454 had UNDEFINED operands, then the result of the statement
1455 should be UNDEFINED. Otherwise, the statement is VARYING. */
1456 if (likelyvalue
== UNDEFINED
)
1457 val
.lattice_val
= likelyvalue
;
1459 val
.lattice_val
= VARYING
;
1461 val
.value
= NULL_TREE
;
1467 /* Visit the assignment statement STMT. Set the value of its LHS to the
1468 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1469 creates virtual definitions, set the value of each new name to that
1470 of the RHS (if we can derive a constant out of the RHS).
1471 Value-returning call statements also perform an assignment, and
1472 are handled here. */
1474 static enum ssa_prop_result
1475 visit_assignment (gimple stmt
, tree
*output_p
)
1478 enum ssa_prop_result retval
;
1480 tree lhs
= gimple_get_lhs (stmt
);
1482 gcc_assert (gimple_code (stmt
) != GIMPLE_CALL
1483 || gimple_call_lhs (stmt
) != NULL_TREE
);
1485 if (gimple_assign_copy_p (stmt
))
1487 tree rhs
= gimple_assign_rhs1 (stmt
);
1489 if (TREE_CODE (rhs
) == SSA_NAME
)
1491 /* For a simple copy operation, we copy the lattice values. */
1492 prop_value_t
*nval
= get_value (rhs
);
1496 val
= evaluate_stmt (stmt
);
1499 /* Evaluate the statement, which could be
1500 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1501 val
= evaluate_stmt (stmt
);
1503 retval
= SSA_PROP_NOT_INTERESTING
;
1505 /* Set the lattice value of the statement's output. */
1506 if (TREE_CODE (lhs
) == SSA_NAME
)
1508 /* If STMT is an assignment to an SSA_NAME, we only have one
1510 if (set_lattice_value (lhs
, val
))
1513 if (val
.lattice_val
== VARYING
)
1514 retval
= SSA_PROP_VARYING
;
1516 retval
= SSA_PROP_INTERESTING
;
1524 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1525 if it can determine which edge will be taken. Otherwise, return
1526 SSA_PROP_VARYING. */
1528 static enum ssa_prop_result
1529 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
1534 block
= gimple_bb (stmt
);
1535 val
= evaluate_stmt (stmt
);
1537 /* Find which edge out of the conditional block will be taken and add it
1538 to the worklist. If no single edge can be determined statically,
1539 return SSA_PROP_VARYING to feed all the outgoing edges to the
1540 propagation engine. */
1541 *taken_edge_p
= val
.value
? find_taken_edge (block
, val
.value
) : 0;
1543 return SSA_PROP_INTERESTING
;
1545 return SSA_PROP_VARYING
;
1549 /* Evaluate statement STMT. If the statement produces an output value and
1550 its evaluation changes the lattice value of its output, return
1551 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1554 If STMT is a conditional branch and we can determine its truth
1555 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1556 value, return SSA_PROP_VARYING. */
1558 static enum ssa_prop_result
1559 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
1564 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1566 fprintf (dump_file
, "\nVisiting statement:\n");
1567 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1570 switch (gimple_code (stmt
))
1573 /* If the statement is an assignment that produces a single
1574 output value, evaluate its RHS to see if the lattice value of
1575 its output has changed. */
1576 return visit_assignment (stmt
, output_p
);
1579 /* A value-returning call also performs an assignment. */
1580 if (gimple_call_lhs (stmt
) != NULL_TREE
)
1581 return visit_assignment (stmt
, output_p
);
1586 /* If STMT is a conditional branch, see if we can determine
1587 which branch will be taken. */
1588 /* FIXME. It appears that we should be able to optimize
1589 computed GOTOs here as well. */
1590 return visit_cond_stmt (stmt
, taken_edge_p
);
1596 /* Any other kind of statement is not interesting for constant
1597 propagation and, therefore, not worth simulating. */
1598 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1599 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1601 /* Definitions made by statements other than assignments to
1602 SSA_NAMEs represent unknown modifications to their outputs.
1603 Mark them VARYING. */
1604 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1606 prop_value_t v
= { VARYING
, NULL_TREE
};
1607 set_lattice_value (def
, v
);
1610 return SSA_PROP_VARYING
;
1614 /* Main entry point for SSA Conditional Constant Propagation. */
1620 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1621 if (ccp_finalize ())
1622 return (TODO_cleanup_cfg
| TODO_update_ssa
| TODO_remove_unused_locals
);
1631 return flag_tree_ccp
!= 0;
1635 struct gimple_opt_pass pass_ccp
=
1640 gate_ccp
, /* gate */
1641 do_ssa_ccp
, /* execute */
1644 0, /* static_pass_number */
1645 TV_TREE_CCP
, /* tv_id */
1646 PROP_cfg
| PROP_ssa
, /* properties_required */
1647 0, /* properties_provided */
1648 0, /* properties_destroyed */
1649 0, /* todo_flags_start */
1650 TODO_dump_func
| TODO_verify_ssa
1651 | TODO_verify_stmts
| TODO_ggc_collect
/* todo_flags_finish */
1656 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
1657 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1658 is the desired result type.
1660 LOC is the location of the original expression. */
1663 maybe_fold_offset_to_array_ref (location_t loc
, tree base
, tree offset
,
1665 bool allow_negative_idx
)
1667 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
1668 tree array_type
, elt_type
, elt_size
;
1671 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1672 measured in units of the size of elements type) from that ARRAY_REF).
1673 We can't do anything if either is variable.
1675 The case we handle here is *(&A[N]+O). */
1676 if (TREE_CODE (base
) == ARRAY_REF
)
1678 tree low_bound
= array_ref_low_bound (base
);
1680 elt_offset
= TREE_OPERAND (base
, 1);
1681 if (TREE_CODE (low_bound
) != INTEGER_CST
1682 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1685 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1686 base
= TREE_OPERAND (base
, 0);
1689 /* Ignore stupid user tricks of indexing non-array variables. */
1690 array_type
= TREE_TYPE (base
);
1691 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1693 elt_type
= TREE_TYPE (array_type
);
1694 if (!useless_type_conversion_p (orig_type
, elt_type
))
1697 /* Use signed size type for intermediate computation on the index. */
1698 idx_type
= signed_type_for (size_type_node
);
1700 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1701 element type (so we can use the alignment if it's not constant).
1702 Otherwise, compute the offset as an index by using a division. If the
1703 division isn't exact, then don't do anything. */
1704 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1707 if (integer_zerop (offset
))
1709 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1710 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1712 idx
= build_int_cst (idx_type
, 0);
1716 unsigned HOST_WIDE_INT lquo
, lrem
;
1717 HOST_WIDE_INT hquo
, hrem
;
1720 /* The final array offset should be signed, so we need
1721 to sign-extend the (possibly pointer) offset here
1722 and use signed division. */
1723 soffset
= double_int_sext (tree_to_double_int (offset
),
1724 TYPE_PRECISION (TREE_TYPE (offset
)));
1725 if (TREE_CODE (elt_size
) != INTEGER_CST
1726 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
1727 soffset
.low
, soffset
.high
,
1728 TREE_INT_CST_LOW (elt_size
),
1729 TREE_INT_CST_HIGH (elt_size
),
1730 &lquo
, &hquo
, &lrem
, &hrem
)
1734 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
1737 /* Assume the low bound is zero. If there is a domain type, get the
1738 low bound, if any, convert the index into that type, and add the
1740 min_idx
= build_int_cst (idx_type
, 0);
1741 domain_type
= TYPE_DOMAIN (array_type
);
1744 idx_type
= domain_type
;
1745 if (TYPE_MIN_VALUE (idx_type
))
1746 min_idx
= TYPE_MIN_VALUE (idx_type
);
1748 min_idx
= fold_convert (idx_type
, min_idx
);
1750 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1753 elt_offset
= fold_convert (idx_type
, elt_offset
);
1756 if (!integer_zerop (min_idx
))
1757 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1758 if (!integer_zerop (elt_offset
))
1759 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1761 /* Make sure to possibly truncate late after offsetting. */
1762 idx
= fold_convert (idx_type
, idx
);
1764 /* We don't want to construct access past array bounds. For example
1767 should not be simplified into (*c)[14] or tree-vrp will
1768 give false warnings. The same is true for
1769 struct A { long x; char d[0]; } *a;
1771 which should be not folded to &a->d[-8]. */
1773 && TYPE_MAX_VALUE (domain_type
)
1774 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
)
1776 tree up_bound
= TYPE_MAX_VALUE (domain_type
);
1778 if (tree_int_cst_lt (up_bound
, idx
)
1779 /* Accesses after the end of arrays of size 0 (gcc
1780 extension) and 1 are likely intentional ("struct
1782 && compare_tree_int (up_bound
, 1) > 0)
1786 && TYPE_MIN_VALUE (domain_type
))
1788 if (!allow_negative_idx
1789 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
1790 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
1793 else if (!allow_negative_idx
1794 && compare_tree_int (idx
, 0) < 0)
1798 tree t
= build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
1799 SET_EXPR_LOCATION (t
, loc
);
1805 /* Attempt to fold *(S+O) to S.X.
1806 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1807 is the desired result type.
1809 LOC is the location of the original expression. */
1812 maybe_fold_offset_to_component_ref (location_t loc
, tree record_type
,
1813 tree base
, tree offset
,
1814 tree orig_type
, bool base_is_ptr
)
1816 tree f
, t
, field_type
, tail_array_field
, field_offset
;
1820 if (TREE_CODE (record_type
) != RECORD_TYPE
1821 && TREE_CODE (record_type
) != UNION_TYPE
1822 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1825 /* Short-circuit silly cases. */
1826 if (useless_type_conversion_p (record_type
, orig_type
))
1829 tail_array_field
= NULL_TREE
;
1830 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1834 if (TREE_CODE (f
) != FIELD_DECL
)
1836 if (DECL_BIT_FIELD (f
))
1839 if (!DECL_FIELD_OFFSET (f
))
1841 field_offset
= byte_position (f
);
1842 if (TREE_CODE (field_offset
) != INTEGER_CST
)
1845 /* ??? Java creates "interesting" fields for representing base classes.
1846 They have no name, and have no context. With no context, we get into
1847 trouble with nonoverlapping_component_refs_p. Skip them. */
1848 if (!DECL_FIELD_CONTEXT (f
))
1851 /* The previous array field isn't at the end. */
1852 tail_array_field
= NULL_TREE
;
1854 /* Check to see if this offset overlaps with the field. */
1855 cmp
= tree_int_cst_compare (field_offset
, offset
);
1859 field_type
= TREE_TYPE (f
);
1861 /* Here we exactly match the offset being checked. If the types match,
1862 then we can return that field. */
1864 && useless_type_conversion_p (orig_type
, field_type
))
1867 base
= build1 (INDIRECT_REF
, record_type
, base
);
1868 t
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1872 /* Don't care about offsets into the middle of scalars. */
1873 if (!AGGREGATE_TYPE_P (field_type
))
1876 /* Check for array at the end of the struct. This is often
1877 used as for flexible array members. We should be able to
1878 turn this into an array access anyway. */
1879 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1880 tail_array_field
= f
;
1882 /* Check the end of the field against the offset. */
1883 if (!DECL_SIZE_UNIT (f
)
1884 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1886 t
= int_const_binop (MINUS_EXPR
, offset
, field_offset
, 1);
1887 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1890 /* If we matched, then set offset to the displacement into
1893 new_base
= build1 (INDIRECT_REF
, record_type
, base
);
1896 protected_set_expr_location (new_base
, loc
);
1897 new_base
= build3 (COMPONENT_REF
, field_type
, new_base
, f
, NULL_TREE
);
1898 protected_set_expr_location (new_base
, loc
);
1900 /* Recurse to possibly find the match. */
1901 ret
= maybe_fold_offset_to_array_ref (loc
, new_base
, t
, orig_type
,
1902 f
== TYPE_FIELDS (record_type
));
1905 ret
= maybe_fold_offset_to_component_ref (loc
, field_type
, new_base
, t
,
1911 if (!tail_array_field
)
1914 f
= tail_array_field
;
1915 field_type
= TREE_TYPE (f
);
1916 offset
= int_const_binop (MINUS_EXPR
, offset
, byte_position (f
), 1);
1918 /* If we get here, we've got an aggregate field, and a possibly
1919 nonzero offset into them. Recurse and hope for a valid match. */
1922 base
= build1 (INDIRECT_REF
, record_type
, base
);
1923 SET_EXPR_LOCATION (base
, loc
);
1925 base
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1926 SET_EXPR_LOCATION (base
, loc
);
1928 t
= maybe_fold_offset_to_array_ref (loc
, base
, offset
, orig_type
,
1929 f
== TYPE_FIELDS (record_type
));
1932 return maybe_fold_offset_to_component_ref (loc
, field_type
, base
, offset
,
1936 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1937 or BASE[index] or by combination of those.
1939 LOC is the location of original expression.
1941 Before attempting the conversion strip off existing ADDR_EXPRs and
1942 handled component refs. */
1945 maybe_fold_offset_to_reference (location_t loc
, tree base
, tree offset
,
1950 bool base_is_ptr
= true;
1953 if (TREE_CODE (base
) == ADDR_EXPR
)
1955 base_is_ptr
= false;
1957 base
= TREE_OPERAND (base
, 0);
1959 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1960 so it needs to be removed and new COMPONENT_REF constructed.
1961 The wrong COMPONENT_REF are often constructed by folding the
1962 (type *)&object within the expression (type *)&object+offset */
1963 if (handled_component_p (base
))
1965 HOST_WIDE_INT sub_offset
, size
, maxsize
;
1967 newbase
= get_ref_base_and_extent (base
, &sub_offset
,
1969 gcc_assert (newbase
);
1972 && !(sub_offset
& (BITS_PER_UNIT
- 1)))
1976 offset
= int_const_binop (PLUS_EXPR
, offset
,
1977 build_int_cst (TREE_TYPE (offset
),
1978 sub_offset
/ BITS_PER_UNIT
), 1);
1981 if (useless_type_conversion_p (orig_type
, TREE_TYPE (base
))
1982 && integer_zerop (offset
))
1984 type
= TREE_TYPE (base
);
1989 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
1991 type
= TREE_TYPE (TREE_TYPE (base
));
1993 ret
= maybe_fold_offset_to_component_ref (loc
, type
, base
, offset
,
1994 orig_type
, base_is_ptr
);
1999 base
= build1 (INDIRECT_REF
, type
, base
);
2000 SET_EXPR_LOCATION (base
, loc
);
2002 ret
= maybe_fold_offset_to_array_ref (loc
,
2003 base
, offset
, orig_type
, true);
2008 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
2009 or &BASE[index] or by combination of those.
2011 LOC is the location of the original expression.
2013 Before attempting the conversion strip off existing component refs. */
2016 maybe_fold_offset_to_address (location_t loc
, tree addr
, tree offset
,
2021 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr
))
2022 && POINTER_TYPE_P (orig_type
));
2024 t
= maybe_fold_offset_to_reference (loc
, addr
, offset
,
2025 TREE_TYPE (orig_type
));
2031 /* For __builtin_object_size to function correctly we need to
2032 make sure not to fold address arithmetic so that we change
2033 reference from one array to another. This would happen for
2036 struct X { char s1[10]; char s2[10] } s;
2037 char *foo (void) { return &s.s2[-4]; }
2039 where we need to avoid generating &s.s1[6]. As the C and
2040 C++ frontends create different initial trees
2041 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
2042 sophisticated comparisons here. Note that checking for the
2043 condition after the fact is easier than trying to avoid doing
2046 if (TREE_CODE (orig
) == ADDR_EXPR
)
2047 orig
= TREE_OPERAND (orig
, 0);
2048 if ((TREE_CODE (orig
) == ARRAY_REF
2049 || (TREE_CODE (orig
) == COMPONENT_REF
2050 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig
, 1))) == ARRAY_TYPE
))
2051 && (TREE_CODE (t
) == ARRAY_REF
2052 || TREE_CODE (t
) == COMPONENT_REF
)
2053 && !operand_equal_p (TREE_CODE (orig
) == ARRAY_REF
2054 ? TREE_OPERAND (orig
, 0) : orig
,
2055 TREE_CODE (t
) == ARRAY_REF
2056 ? TREE_OPERAND (t
, 0) : t
, 0))
2059 ptr_type
= build_pointer_type (TREE_TYPE (t
));
2060 if (!useless_type_conversion_p (orig_type
, ptr_type
))
2062 return build_fold_addr_expr_with_type_loc (loc
, t
, ptr_type
);
2068 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
2069 Return the simplified expression, or NULL if nothing could be done. */
2072 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
2075 bool volatile_p
= TREE_THIS_VOLATILE (expr
);
2076 location_t loc
= EXPR_LOCATION (expr
);
2078 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2079 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2080 are sometimes added. */
2082 STRIP_TYPE_NOPS (base
);
2083 TREE_OPERAND (expr
, 0) = base
;
2085 /* One possibility is that the address reduces to a string constant. */
2086 t
= fold_read_from_constant_string (expr
);
2090 /* Add in any offset from a POINTER_PLUS_EXPR. */
2091 if (TREE_CODE (base
) == POINTER_PLUS_EXPR
)
2095 offset2
= TREE_OPERAND (base
, 1);
2096 if (TREE_CODE (offset2
) != INTEGER_CST
)
2098 base
= TREE_OPERAND (base
, 0);
2100 offset
= fold_convert (sizetype
,
2101 int_const_binop (PLUS_EXPR
, offset
, offset2
, 1));
2104 if (TREE_CODE (base
) == ADDR_EXPR
)
2106 tree base_addr
= base
;
2108 /* Strip the ADDR_EXPR. */
2109 base
= TREE_OPERAND (base
, 0);
2111 /* Fold away CONST_DECL to its value, if the type is scalar. */
2112 if (TREE_CODE (base
) == CONST_DECL
2113 && is_gimple_min_invariant (DECL_INITIAL (base
)))
2114 return DECL_INITIAL (base
);
2116 /* Try folding *(&B+O) to B.X. */
2117 t
= maybe_fold_offset_to_reference (loc
, base_addr
, offset
,
2121 /* Preserve volatileness of the original expression.
2122 We can end up with a plain decl here which is shared
2123 and we shouldn't mess with its flags. */
2125 TREE_THIS_VOLATILE (t
) = volatile_p
;
2131 /* We can get here for out-of-range string constant accesses,
2132 such as "_"[3]. Bail out of the entire substitution search
2133 and arrange for the entire statement to be replaced by a
2134 call to __builtin_trap. In all likelihood this will all be
2135 constant-folded away, but in the meantime we can't leave with
2136 something that get_expr_operands can't understand. */
2140 if (TREE_CODE (t
) == ADDR_EXPR
2141 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
2143 /* FIXME: Except that this causes problems elsewhere with dead
2144 code not being deleted, and we die in the rtl expanders
2145 because we failed to remove some ssa_name. In the meantime,
2146 just return zero. */
2147 /* FIXME2: This condition should be signaled by
2148 fold_read_from_constant_string directly, rather than
2149 re-checking for it here. */
2150 return integer_zero_node
;
2153 /* Try folding *(B+O) to B->X. Still an improvement. */
2154 if (POINTER_TYPE_P (TREE_TYPE (base
)))
2156 t
= maybe_fold_offset_to_reference (loc
, base
, offset
,
2163 /* Otherwise we had an offset that we could not simplify. */
2168 /* A quaint feature extant in our address arithmetic is that there
2169 can be hidden type changes here. The type of the result need
2170 not be the same as the type of the input pointer.
2172 What we're after here is an expression of the form
2173 (T *)(&array + const)
2174 where array is OP0, const is OP1, RES_TYPE is T and
2175 the cast doesn't actually exist, but is implicit in the
2176 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2178 which may be able to propagate further. */
2181 maybe_fold_stmt_addition (location_t loc
, tree res_type
, tree op0
, tree op1
)
2186 /* The first operand should be an ADDR_EXPR. */
2187 if (TREE_CODE (op0
) != ADDR_EXPR
)
2189 op0
= TREE_OPERAND (op0
, 0);
2191 /* It had better be a constant. */
2192 if (TREE_CODE (op1
) != INTEGER_CST
)
2194 /* Or op0 should now be A[0] and the non-constant offset defined
2195 via a multiplication by the array element size. */
2196 if (TREE_CODE (op0
) == ARRAY_REF
2197 && integer_zerop (TREE_OPERAND (op0
, 1))
2198 && TREE_CODE (op1
) == SSA_NAME
2199 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0
)), 1))
2201 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
2202 if (!is_gimple_assign (offset_def
))
2205 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
2206 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
2207 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
),
2208 TYPE_SIZE_UNIT (TREE_TYPE (op0
))))
2209 return build1 (ADDR_EXPR
, res_type
,
2210 build4 (ARRAY_REF
, TREE_TYPE (op0
),
2211 TREE_OPERAND (op0
, 0),
2212 gimple_assign_rhs1 (offset_def
),
2213 TREE_OPERAND (op0
, 2),
2214 TREE_OPERAND (op0
, 3)));
2215 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0
)))
2216 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
2217 return build1 (ADDR_EXPR
, res_type
,
2218 build4 (ARRAY_REF
, TREE_TYPE (op0
),
2219 TREE_OPERAND (op0
, 0),
2221 TREE_OPERAND (op0
, 2),
2222 TREE_OPERAND (op0
, 3)));
2227 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2228 the offset into it. */
2229 while (TREE_CODE (op0
) == ARRAY_REF
)
2231 tree array_obj
= TREE_OPERAND (op0
, 0);
2232 tree array_idx
= TREE_OPERAND (op0
, 1);
2233 tree elt_type
= TREE_TYPE (op0
);
2234 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
2237 if (TREE_CODE (array_idx
) != INTEGER_CST
)
2239 if (TREE_CODE (elt_size
) != INTEGER_CST
)
2242 /* Un-bias the index by the min index of the array type. */
2243 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
2246 min_idx
= TYPE_MIN_VALUE (min_idx
);
2249 if (TREE_CODE (min_idx
) != INTEGER_CST
)
2252 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
2253 if (!integer_zerop (min_idx
))
2254 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
2259 /* Convert the index to a byte offset. */
2260 array_idx
= fold_convert (sizetype
, array_idx
);
2261 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
2263 /* Update the operands for the next round, or for folding. */
2264 op1
= int_const_binop (PLUS_EXPR
,
2269 ptd_type
= TREE_TYPE (res_type
);
2270 /* If we want a pointer to void, reconstruct the reference from the
2271 array element type. A pointer to that can be trivially converted
2272 to void *. This happens as we fold (void *)(ptr p+ off). */
2273 if (VOID_TYPE_P (ptd_type
)
2274 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
2275 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
2277 /* At which point we can try some of the same things as for indirects. */
2278 t
= maybe_fold_offset_to_array_ref (loc
, op0
, op1
, ptd_type
, true);
2280 t
= maybe_fold_offset_to_component_ref (loc
, TREE_TYPE (op0
), op0
, op1
,
2284 t
= build1 (ADDR_EXPR
, res_type
, t
);
2285 SET_EXPR_LOCATION (t
, loc
);
2291 /* Subroutine of fold_stmt. We perform several simplifications of the
2292 memory reference tree EXPR and make sure to re-gimplify them properly
2293 after propagation of constant addresses. IS_LHS is true if the
2294 reference is supposed to be an lvalue. */
2297 maybe_fold_reference (tree expr
, bool is_lhs
)
2301 if (TREE_CODE (expr
) == ARRAY_REF
2304 tree tem
= fold_read_from_constant_string (expr
);
2309 /* ??? We might want to open-code the relevant remaining cases
2310 to avoid using the generic fold. */
2311 if (handled_component_p (*t
)
2312 && CONSTANT_CLASS_P (TREE_OPERAND (*t
, 0)))
2314 tree tem
= fold (*t
);
2319 while (handled_component_p (*t
))
2320 t
= &TREE_OPERAND (*t
, 0);
2322 if (TREE_CODE (*t
) == INDIRECT_REF
)
2324 tree tem
= maybe_fold_stmt_indirect (*t
, TREE_OPERAND (*t
, 0),
2326 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2327 if (is_lhs
&& tem
&& CONSTANT_CLASS_P (tem
))
2330 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
)
2331 /* If we had a good reason for propagating the address here,
2332 make sure we end up with valid gimple. See PR34989. */
2333 tem
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2338 tem
= maybe_fold_reference (expr
, is_lhs
);
2347 tree tem
= get_symbol_constant_value (*t
);
2351 tem
= maybe_fold_reference (expr
, is_lhs
);
2362 /* Return the string length, maximum string length or maximum value of
2364 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2365 is not NULL and, for TYPE == 0, its value is not equal to the length
2366 we determine or if we are unable to determine the length or value,
2367 return false. VISITED is a bitmap of visited variables.
2368 TYPE is 0 if string length should be returned, 1 for maximum string
2369 length and 2 for maximum value ARG can have. */
2372 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
2377 if (TREE_CODE (arg
) != SSA_NAME
)
2379 if (TREE_CODE (arg
) == COND_EXPR
)
2380 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
2381 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
2382 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2383 else if (TREE_CODE (arg
) == ADDR_EXPR
2384 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
2385 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
2387 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
2388 if (TREE_CODE (aop0
) == INDIRECT_REF
2389 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
2390 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
2391 length
, visited
, type
);
2397 if (TREE_CODE (val
) != INTEGER_CST
2398 || tree_int_cst_sgn (val
) < 0)
2402 val
= c_strlen (arg
, 1);
2410 if (TREE_CODE (*length
) != INTEGER_CST
2411 || TREE_CODE (val
) != INTEGER_CST
)
2414 if (tree_int_cst_lt (*length
, val
))
2418 else if (simple_cst_equal (val
, *length
) != 1)
2426 /* If we were already here, break the infinite cycle. */
2427 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
2429 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
2432 def_stmt
= SSA_NAME_DEF_STMT (var
);
2434 switch (gimple_code (def_stmt
))
2437 /* The RHS of the statement defining VAR must either have a
2438 constant length or come from another SSA_NAME with a constant
2440 if (gimple_assign_single_p (def_stmt
)
2441 || gimple_assign_unary_nop_p (def_stmt
))
2443 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2444 return get_maxval_strlen (rhs
, length
, visited
, type
);
2450 /* All the arguments of the PHI node must have the same constant
2454 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
2456 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
2458 /* If this PHI has itself as an argument, we cannot
2459 determine the string length of this argument. However,
2460 if we can find a constant string length for the other
2461 PHI args then we can still be sure that this is a
2462 constant string length. So be optimistic and just
2463 continue with the next argument. */
2464 if (arg
== gimple_phi_result (def_stmt
))
2467 if (!get_maxval_strlen (arg
, length
, visited
, type
))
2479 /* Fold builtin call in statement STMT. Returns a simplified tree.
2480 We may return a non-constant expression, including another call
2481 to a different function and with different arguments, e.g.,
2482 substituting memcpy for strcpy when the string length is known.
2483 Note that some builtins expand into inline code that may not
2484 be valid in GIMPLE. Callers must take care. */
2487 ccp_fold_builtin (gimple stmt
)
2489 tree result
, val
[3];
2495 location_t loc
= gimple_location (stmt
);
2497 gcc_assert (is_gimple_call (stmt
));
2499 ignore
= (gimple_call_lhs (stmt
) == NULL
);
2501 /* First try the generic builtin folder. If that succeeds, return the
2503 result
= fold_call_stmt (stmt
, ignore
);
2507 STRIP_NOPS (result
);
2511 /* Ignore MD builtins. */
2512 callee
= gimple_call_fndecl (stmt
);
2513 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
2516 /* If the builtin could not be folded, and it has no argument list,
2518 nargs
= gimple_call_num_args (stmt
);
2522 /* Limit the work only for builtins we know how to simplify. */
2523 switch (DECL_FUNCTION_CODE (callee
))
2525 case BUILT_IN_STRLEN
:
2526 case BUILT_IN_FPUTS
:
2527 case BUILT_IN_FPUTS_UNLOCKED
:
2531 case BUILT_IN_STRCPY
:
2532 case BUILT_IN_STRNCPY
:
2536 case BUILT_IN_MEMCPY_CHK
:
2537 case BUILT_IN_MEMPCPY_CHK
:
2538 case BUILT_IN_MEMMOVE_CHK
:
2539 case BUILT_IN_MEMSET_CHK
:
2540 case BUILT_IN_STRNCPY_CHK
:
2544 case BUILT_IN_STRCPY_CHK
:
2545 case BUILT_IN_STPCPY_CHK
:
2549 case BUILT_IN_SNPRINTF_CHK
:
2550 case BUILT_IN_VSNPRINTF_CHK
:
2558 if (arg_idx
>= nargs
)
2561 /* Try to use the dataflow information gathered by the CCP process. */
2562 visited
= BITMAP_ALLOC (NULL
);
2563 bitmap_clear (visited
);
2565 memset (val
, 0, sizeof (val
));
2566 a
= gimple_call_arg (stmt
, arg_idx
);
2567 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
2568 val
[arg_idx
] = NULL_TREE
;
2570 BITMAP_FREE (visited
);
2573 switch (DECL_FUNCTION_CODE (callee
))
2575 case BUILT_IN_STRLEN
:
2576 if (val
[0] && nargs
== 1)
2579 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
2581 /* If the result is not a valid gimple value, or not a cast
2582 of a valid gimple value, then we can not use the result. */
2583 if (is_gimple_val (new_val
)
2584 || (is_gimple_cast (new_val
)
2585 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
2590 case BUILT_IN_STRCPY
:
2591 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
2592 result
= fold_builtin_strcpy (loc
, callee
,
2593 gimple_call_arg (stmt
, 0),
2594 gimple_call_arg (stmt
, 1),
2598 case BUILT_IN_STRNCPY
:
2599 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2600 result
= fold_builtin_strncpy (loc
, callee
,
2601 gimple_call_arg (stmt
, 0),
2602 gimple_call_arg (stmt
, 1),
2603 gimple_call_arg (stmt
, 2),
2607 case BUILT_IN_FPUTS
:
2609 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
2610 gimple_call_arg (stmt
, 1),
2611 ignore
, false, val
[0]);
2614 case BUILT_IN_FPUTS_UNLOCKED
:
2616 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
2617 gimple_call_arg (stmt
, 1),
2618 ignore
, true, val
[0]);
2621 case BUILT_IN_MEMCPY_CHK
:
2622 case BUILT_IN_MEMPCPY_CHK
:
2623 case BUILT_IN_MEMMOVE_CHK
:
2624 case BUILT_IN_MEMSET_CHK
:
2625 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2626 result
= fold_builtin_memory_chk (loc
, callee
,
2627 gimple_call_arg (stmt
, 0),
2628 gimple_call_arg (stmt
, 1),
2629 gimple_call_arg (stmt
, 2),
2630 gimple_call_arg (stmt
, 3),
2632 DECL_FUNCTION_CODE (callee
));
2635 case BUILT_IN_STRCPY_CHK
:
2636 case BUILT_IN_STPCPY_CHK
:
2637 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2638 result
= fold_builtin_stxcpy_chk (loc
, callee
,
2639 gimple_call_arg (stmt
, 0),
2640 gimple_call_arg (stmt
, 1),
2641 gimple_call_arg (stmt
, 2),
2643 DECL_FUNCTION_CODE (callee
));
2646 case BUILT_IN_STRNCPY_CHK
:
2647 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2648 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
2649 gimple_call_arg (stmt
, 1),
2650 gimple_call_arg (stmt
, 2),
2651 gimple_call_arg (stmt
, 3),
2655 case BUILT_IN_SNPRINTF_CHK
:
2656 case BUILT_IN_VSNPRINTF_CHK
:
2657 if (val
[1] && is_gimple_val (val
[1]))
2658 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
2659 DECL_FUNCTION_CODE (callee
));
2666 if (result
&& ignore
)
2667 result
= fold_ignored_result (result
);
2671 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2672 replacement rhs for the statement or NULL_TREE if no simplification
2673 could be made. It is assumed that the operands have been previously
2677 fold_gimple_assign (gimple_stmt_iterator
*si
)
2679 gimple stmt
= gsi_stmt (*si
);
2680 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2681 location_t loc
= gimple_location (stmt
);
2683 tree result
= NULL_TREE
;
2685 switch (get_gimple_rhs_class (subcode
))
2687 case GIMPLE_SINGLE_RHS
:
2689 tree rhs
= gimple_assign_rhs1 (stmt
);
2691 /* Try to fold a conditional expression. */
2692 if (TREE_CODE (rhs
) == COND_EXPR
)
2694 tree op0
= COND_EXPR_COND (rhs
);
2697 location_t cond_loc
= EXPR_LOCATION (rhs
);
2699 if (COMPARISON_CLASS_P (op0
))
2701 fold_defer_overflow_warnings ();
2702 tem
= fold_binary_loc (cond_loc
,
2703 TREE_CODE (op0
), TREE_TYPE (op0
),
2704 TREE_OPERAND (op0
, 0),
2705 TREE_OPERAND (op0
, 1));
2706 /* This is actually a conditional expression, not a GIMPLE
2707 conditional statement, however, the valid_gimple_rhs_p
2708 test still applies. */
2709 set
= (tem
&& is_gimple_condexpr (tem
)
2710 && valid_gimple_rhs_p (tem
));
2711 fold_undefer_overflow_warnings (set
, stmt
, 0);
2713 else if (is_gimple_min_invariant (op0
))
2722 result
= fold_build3_loc (cond_loc
, COND_EXPR
, TREE_TYPE (rhs
), tem
,
2723 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
2726 else if (TREE_CODE (rhs
) == TARGET_MEM_REF
)
2727 return maybe_fold_tmr (rhs
);
2729 else if (REFERENCE_CLASS_P (rhs
))
2730 return maybe_fold_reference (rhs
, false);
2732 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
2734 tree tem
= maybe_fold_reference (TREE_OPERAND (rhs
, 0), true);
2736 result
= fold_convert (TREE_TYPE (rhs
),
2737 build_fold_addr_expr_loc (loc
, tem
));
2740 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2741 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2742 && (CONSTRUCTOR_NELTS (rhs
)
2743 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2745 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
2749 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2750 if (TREE_CODE (val
) != INTEGER_CST
2751 && TREE_CODE (val
) != REAL_CST
2752 && TREE_CODE (val
) != FIXED_CST
)
2755 return build_vector_from_ctor (TREE_TYPE (rhs
),
2756 CONSTRUCTOR_ELTS (rhs
));
2759 else if (DECL_P (rhs
))
2760 return get_symbol_constant_value (rhs
);
2762 /* If we couldn't fold the RHS, hand over to the generic
2764 if (result
== NULL_TREE
)
2765 result
= fold (rhs
);
2767 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2768 that may have been added by fold, and "useless" type
2769 conversions that might now be apparent due to propagation. */
2770 STRIP_USELESS_TYPE_CONVERSION (result
);
2772 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
2779 case GIMPLE_UNARY_RHS
:
2781 tree rhs
= gimple_assign_rhs1 (stmt
);
2783 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
2786 /* If the operation was a conversion do _not_ mark a
2787 resulting constant with TREE_OVERFLOW if the original
2788 constant was not. These conversions have implementation
2789 defined behavior and retaining the TREE_OVERFLOW flag
2790 here would confuse later passes such as VRP. */
2791 if (CONVERT_EXPR_CODE_P (subcode
)
2792 && TREE_CODE (result
) == INTEGER_CST
2793 && TREE_CODE (rhs
) == INTEGER_CST
)
2794 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
2796 STRIP_USELESS_TYPE_CONVERSION (result
);
2797 if (valid_gimple_rhs_p (result
))
2800 else if (CONVERT_EXPR_CODE_P (subcode
)
2801 && POINTER_TYPE_P (gimple_expr_type (stmt
))
2802 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
2804 tree type
= gimple_expr_type (stmt
);
2805 tree t
= maybe_fold_offset_to_address (loc
,
2806 gimple_assign_rhs1 (stmt
),
2807 integer_zero_node
, type
);
2814 case GIMPLE_BINARY_RHS
:
2815 /* Try to fold pointer addition. */
2816 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2818 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2819 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
2821 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
2822 if (!useless_type_conversion_p
2823 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
2824 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2826 result
= maybe_fold_stmt_addition (gimple_location (stmt
),
2828 gimple_assign_rhs1 (stmt
),
2829 gimple_assign_rhs2 (stmt
));
2833 result
= fold_binary_loc (loc
, subcode
,
2834 TREE_TYPE (gimple_assign_lhs (stmt
)),
2835 gimple_assign_rhs1 (stmt
),
2836 gimple_assign_rhs2 (stmt
));
2840 STRIP_USELESS_TYPE_CONVERSION (result
);
2841 if (valid_gimple_rhs_p (result
))
2844 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2845 we lose canonicalization opportunities. Do not go again
2846 through fold here though, or the same non-GIMPLE will be
2848 if (commutative_tree_code (subcode
)
2849 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
2850 gimple_assign_rhs2 (stmt
), false))
2851 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
2852 gimple_assign_rhs2 (stmt
),
2853 gimple_assign_rhs1 (stmt
));
2857 case GIMPLE_INVALID_RHS
:
2864 /* Attempt to fold a conditional statement. Return true if any changes were
2865 made. We only attempt to fold the condition expression, and do not perform
2866 any transformation that would require alteration of the cfg. It is
2867 assumed that the operands have been previously folded. */
2870 fold_gimple_cond (gimple stmt
)
2872 tree result
= fold_binary_loc (gimple_location (stmt
),
2873 gimple_cond_code (stmt
),
2875 gimple_cond_lhs (stmt
),
2876 gimple_cond_rhs (stmt
));
2880 STRIP_USELESS_TYPE_CONVERSION (result
);
2881 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
2883 gimple_cond_set_condition_from_tree (stmt
, result
);
2892 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2893 The statement may be replaced by another statement, e.g., if the call
2894 simplifies to a constant value. Return true if any changes were made.
2895 It is assumed that the operands have been previously folded. */
2898 fold_gimple_call (gimple_stmt_iterator
*gsi
)
2900 gimple stmt
= gsi_stmt (*gsi
);
2902 tree callee
= gimple_call_fndecl (stmt
);
2904 /* Check for builtins that CCP can handle using information not
2905 available in the generic fold routines. */
2906 if (callee
&& DECL_BUILT_IN (callee
))
2908 tree result
= ccp_fold_builtin (stmt
);
2911 return update_call_from_tree (gsi
, result
);
2915 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2916 here are when we've propagated the address of a decl into the
2918 /* ??? Should perhaps do this in fold proper. However, doing it
2919 there requires that we create a new CALL_EXPR, and that requires
2920 copying EH region info to the new node. Easier to just do it
2921 here where we can just smash the call operand. */
2922 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2923 callee
= gimple_call_fn (stmt
);
2924 if (TREE_CODE (callee
) == OBJ_TYPE_REF
2925 && lang_hooks
.fold_obj_type_ref
2926 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
2927 && DECL_P (TREE_OPERAND
2928 (OBJ_TYPE_REF_OBJECT (callee
), 0)))
2932 /* ??? Caution: Broken ADDR_EXPR semantics means that
2933 looking at the type of the operand of the addr_expr
2934 can yield an array type. See silly exception in
2935 check_pointer_types_r. */
2936 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
2937 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
2940 gimple_call_set_fn (stmt
, t
);
2949 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2950 distinguishes both cases. */
2953 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
2955 bool changed
= false;
2956 gimple stmt
= gsi_stmt (*gsi
);
2959 /* Fold the main computation performed by the statement. */
2960 switch (gimple_code (stmt
))
2964 unsigned old_num_ops
= gimple_num_ops (stmt
);
2965 tree new_rhs
= fold_gimple_assign (gsi
);
2966 if (new_rhs
!= NULL_TREE
2968 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
2970 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
2977 changed
|= fold_gimple_cond (stmt
);
2981 /* Fold *& in call arguments. */
2982 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2983 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2985 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2988 gimple_call_set_arg (stmt
, i
, tmp
);
2992 /* The entire statement may be replaced in this case. */
2994 changed
|= fold_gimple_call (gsi
);
2998 /* Fold *& in asm operands. */
2999 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
3001 tree link
= gimple_asm_output_op (stmt
, i
);
3002 tree op
= TREE_VALUE (link
);
3003 if (REFERENCE_CLASS_P (op
)
3004 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3006 TREE_VALUE (link
) = op
;
3010 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
3012 tree link
= gimple_asm_input_op (stmt
, i
);
3013 tree op
= TREE_VALUE (link
);
3014 if (REFERENCE_CLASS_P (op
)
3015 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
3017 TREE_VALUE (link
) = op
;
3026 stmt
= gsi_stmt (*gsi
);
3028 /* Fold *& on the lhs. */
3029 if (gimple_has_lhs (stmt
))
3031 tree lhs
= gimple_get_lhs (stmt
);
3032 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3034 tree new_lhs
= maybe_fold_reference (lhs
, true);
3037 gimple_set_lhs (stmt
, new_lhs
);
3046 /* Fold the statement pointed to by GSI. In some cases, this function may
3047 replace the whole statement with a new one. Returns true iff folding
3049 The statement pointed to by GSI should be in valid gimple form but may
3050 be in unfolded state as resulting from for example constant propagation
3051 which can produce *&x = 0. */
3054 fold_stmt (gimple_stmt_iterator
*gsi
)
3056 return fold_stmt_1 (gsi
, false);
3059 /* Perform the minimal folding on statement STMT. Only operations like
3060 *&x created by constant propagation are handled. The statement cannot
3061 be replaced with a new one. Return true if the statement was
3062 changed, false otherwise.
3063 The statement STMT should be in valid gimple form but may
3064 be in unfolded state as resulting from for example constant propagation
3065 which can produce *&x = 0. */
3068 fold_stmt_inplace (gimple stmt
)
3070 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3071 bool changed
= fold_stmt_1 (&gsi
, true);
3072 gcc_assert (gsi_stmt (gsi
) == stmt
);
3076 /* Try to optimize out __builtin_stack_restore. Optimize it out
3077 if there is another __builtin_stack_restore in the same basic
3078 block and no calls or ASM_EXPRs are in between, or if this block's
3079 only outgoing edge is to EXIT_BLOCK and there are no calls or
3080 ASM_EXPRs after this __builtin_stack_restore. */
3083 optimize_stack_restore (gimple_stmt_iterator i
)
3086 gimple stmt
, stack_save
;
3087 gimple_stmt_iterator stack_save_gsi
;
3089 basic_block bb
= gsi_bb (i
);
3090 gimple call
= gsi_stmt (i
);
3092 if (gimple_code (call
) != GIMPLE_CALL
3093 || gimple_call_num_args (call
) != 1
3094 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
3095 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
3098 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
3100 stmt
= gsi_stmt (i
);
3101 if (gimple_code (stmt
) == GIMPLE_ASM
)
3103 if (gimple_code (stmt
) != GIMPLE_CALL
)
3106 callee
= gimple_call_fndecl (stmt
);
3107 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
3110 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
3115 && (! single_succ_p (bb
)
3116 || single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR
))
3119 stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
3120 if (gimple_code (stack_save
) != GIMPLE_CALL
3121 || gimple_call_lhs (stack_save
) != gimple_call_arg (call
, 0)
3122 || stmt_could_throw_p (stack_save
)
3123 || !has_single_use (gimple_call_arg (call
, 0)))
3126 callee
= gimple_call_fndecl (stack_save
);
3128 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3129 || DECL_FUNCTION_CODE (callee
) != BUILT_IN_STACK_SAVE
3130 || gimple_call_num_args (stack_save
) != 0)
3133 stack_save_gsi
= gsi_for_stmt (stack_save
);
3134 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
3135 if (!update_call_from_tree (&stack_save_gsi
, rhs
))
3138 /* No effect, so the statement will be deleted. */
3139 return integer_zero_node
;
3142 /* If va_list type is a simple pointer and nothing special is needed,
3143 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3144 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3145 pointer assignment. */
3148 optimize_stdarg_builtin (gimple call
)
3150 tree callee
, lhs
, rhs
, cfun_va_list
;
3151 bool va_list_simple_ptr
;
3152 location_t loc
= gimple_location (call
);
3154 if (gimple_code (call
) != GIMPLE_CALL
)
3157 callee
= gimple_call_fndecl (call
);
3159 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
3160 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
3161 && (TREE_TYPE (cfun_va_list
) == void_type_node
3162 || TREE_TYPE (cfun_va_list
) == char_type_node
);
3164 switch (DECL_FUNCTION_CODE (callee
))
3166 case BUILT_IN_VA_START
:
3167 if (!va_list_simple_ptr
3168 || targetm
.expand_builtin_va_start
!= NULL
3169 || built_in_decls
[BUILT_IN_NEXT_ARG
] == NULL
)
3172 if (gimple_call_num_args (call
) != 2)
3175 lhs
= gimple_call_arg (call
, 0);
3176 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3177 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3178 != TYPE_MAIN_VARIANT (cfun_va_list
))
3181 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
3182 rhs
= build_call_expr_loc (loc
, built_in_decls
[BUILT_IN_NEXT_ARG
],
3183 1, integer_zero_node
);
3184 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
3185 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3187 case BUILT_IN_VA_COPY
:
3188 if (!va_list_simple_ptr
)
3191 if (gimple_call_num_args (call
) != 2)
3194 lhs
= gimple_call_arg (call
, 0);
3195 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3196 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3197 != TYPE_MAIN_VARIANT (cfun_va_list
))
3200 lhs
= build_fold_indirect_ref_loc (loc
, lhs
);
3201 rhs
= gimple_call_arg (call
, 1);
3202 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
3203 != TYPE_MAIN_VARIANT (cfun_va_list
))
3206 rhs
= fold_convert_loc (loc
, TREE_TYPE (lhs
), rhs
);
3207 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3209 case BUILT_IN_VA_END
:
3210 /* No effect, so the statement will be deleted. */
3211 return integer_zero_node
;
3218 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3219 RHS of an assignment. Insert the necessary statements before
3220 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3221 is replaced. If the call is expected to produces a result, then it
3222 is replaced by an assignment of the new RHS to the result variable.
3223 If the result is to be ignored, then the call is replaced by a
3227 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
3230 tree tmp
= NULL_TREE
; /* Silence warning. */
3231 gimple stmt
, new_stmt
;
3232 gimple_stmt_iterator i
;
3233 gimple_seq stmts
= gimple_seq_alloc();
3234 struct gimplify_ctx gctx
;
3236 stmt
= gsi_stmt (*si_p
);
3238 gcc_assert (is_gimple_call (stmt
));
3240 lhs
= gimple_call_lhs (stmt
);
3242 push_gimplify_context (&gctx
);
3244 if (lhs
== NULL_TREE
)
3245 gimplify_and_add (expr
, &stmts
);
3247 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
3249 pop_gimplify_context (NULL
);
3251 if (gimple_has_location (stmt
))
3252 annotate_all_with_location (stmts
, gimple_location (stmt
));
3254 /* The replacement can expose previously unreferenced variables. */
3255 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
3257 new_stmt
= gsi_stmt (i
);
3258 find_new_referenced_vars (new_stmt
);
3259 gsi_insert_before (si_p
, new_stmt
, GSI_NEW_STMT
);
3260 mark_symbols_for_renaming (new_stmt
);
3264 if (lhs
== NULL_TREE
)
3266 new_stmt
= gimple_build_nop ();
3267 unlink_stmt_vdef (stmt
);
3268 release_defs (stmt
);
3272 new_stmt
= gimple_build_assign (lhs
, tmp
);
3273 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3274 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3275 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
3278 gimple_set_location (new_stmt
, gimple_location (stmt
));
3279 gsi_replace (si_p
, new_stmt
, false);
3282 /* A simple pass that attempts to fold all builtin functions. This pass
3283 is run after we've propagated as many constants as we can. */
3286 execute_fold_all_builtins (void)
3288 bool cfg_changed
= false;
3290 unsigned int todoflags
= 0;
3294 gimple_stmt_iterator i
;
3295 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
3297 gimple stmt
, old_stmt
;
3298 tree callee
, result
;
3299 enum built_in_function fcode
;
3301 stmt
= gsi_stmt (i
);
3303 if (gimple_code (stmt
) != GIMPLE_CALL
)
3308 callee
= gimple_call_fndecl (stmt
);
3309 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
3314 fcode
= DECL_FUNCTION_CODE (callee
);
3316 result
= ccp_fold_builtin (stmt
);
3319 gimple_remove_stmt_histograms (cfun
, stmt
);
3322 switch (DECL_FUNCTION_CODE (callee
))
3324 case BUILT_IN_CONSTANT_P
:
3325 /* Resolve __builtin_constant_p. If it hasn't been
3326 folded to integer_one_node by now, it's fairly
3327 certain that the value simply isn't constant. */
3328 result
= integer_zero_node
;
3331 case BUILT_IN_STACK_RESTORE
:
3332 result
= optimize_stack_restore (i
);
3338 case BUILT_IN_VA_START
:
3339 case BUILT_IN_VA_END
:
3340 case BUILT_IN_VA_COPY
:
3341 /* These shouldn't be folded before pass_stdarg. */
3342 result
= optimize_stdarg_builtin (stmt
);
3352 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3354 fprintf (dump_file
, "Simplified\n ");
3355 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3359 if (!update_call_from_tree (&i
, result
))
3361 gimplify_and_update_call_from_tree (&i
, result
);
3362 todoflags
|= TODO_update_address_taken
;
3365 stmt
= gsi_stmt (i
);
3368 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
3369 && gimple_purge_dead_eh_edges (bb
))
3372 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3374 fprintf (dump_file
, "to\n ");
3375 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3376 fprintf (dump_file
, "\n");
3379 /* Retry the same statement if it changed into another
3380 builtin, there might be new opportunities now. */
3381 if (gimple_code (stmt
) != GIMPLE_CALL
)
3386 callee
= gimple_call_fndecl (stmt
);
3388 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3389 || DECL_FUNCTION_CODE (callee
) == fcode
)
3394 /* Delete unreachable blocks. */
3396 todoflags
|= TODO_cleanup_cfg
;
3402 struct gimple_opt_pass pass_fold_builtins
=
3408 execute_fold_all_builtins
, /* execute */
3411 0, /* static_pass_number */
3412 TV_NONE
, /* tv_id */
3413 PROP_cfg
| PROP_ssa
, /* properties_required */
3414 0, /* properties_provided */
3415 0, /* properties_destroyed */
3416 0, /* todo_flags_start */
3419 | TODO_update_ssa
/* todo_flags_finish */