1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
214 /* Possible lattice values. */
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
229 static prop_value_t
*const_val
;
231 static void canonicalize_float_value (prop_value_t
*);
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
236 dump_lattice_value (FILE *outf
, const char *prefix
, prop_value_t val
)
238 switch (val
.lattice_val
)
241 fprintf (outf
, "%sUNINITIALIZED", prefix
);
244 fprintf (outf
, "%sUNDEFINED", prefix
);
247 fprintf (outf
, "%sVARYING", prefix
);
250 fprintf (outf
, "%sCONSTANT ", prefix
);
251 print_generic_expr (outf
, val
.value
, dump_flags
);
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val
);
264 debug_lattice_value (prop_value_t val
)
266 dump_lattice_value (stderr
, "", val
);
267 fprintf (stderr
, "\n");
272 /* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
276 get_symbol_constant_value (tree sym
)
278 if (TREE_STATIC (sym
)
279 && TREE_READONLY (sym
))
281 tree val
= DECL_INITIAL (sym
);
284 STRIP_USELESS_TYPE_CONVERSION (val
);
285 if (is_gimple_min_invariant (val
))
287 if (TREE_CODE (val
) == ADDR_EXPR
)
289 tree base
= get_base_address (TREE_OPERAND (val
, 0));
290 if (base
&& TREE_CODE (base
) == VAR_DECL
)
291 add_referenced_var (base
);
296 /* Variables declared 'const' without an initializer
297 have zero as the initializer if they may not be
298 overridden at link or run time. */
300 && !DECL_EXTERNAL (sym
)
301 && targetm
.binds_local_p (sym
)
302 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
303 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
304 return fold_convert (TREE_TYPE (sym
), integer_zero_node
);
310 /* Compute a default value for variable VAR and store it in the
311 CONST_VAL array. The following rules are used to get default
314 1- Global and static variables that are declared constant are
317 2- Any other value is considered UNDEFINED. This is useful when
318 considering PHI nodes. PHI arguments that are undefined do not
319 change the constant value of the PHI node, which allows for more
320 constants to be propagated.
322 3- Variables defined by statements other than assignments and PHI
323 nodes are considered VARYING.
325 4- Initial values of variables that are not GIMPLE registers are
326 considered VARYING. */
329 get_default_value (tree var
)
331 tree sym
= SSA_NAME_VAR (var
);
332 prop_value_t val
= { UNINITIALIZED
, NULL_TREE
};
335 stmt
= SSA_NAME_DEF_STMT (var
);
337 if (gimple_nop_p (stmt
))
339 /* Variables defined by an empty statement are those used
340 before being initialized. If VAR is a local variable, we
341 can assume initially that it is UNDEFINED, otherwise we must
342 consider it VARYING. */
343 if (is_gimple_reg (sym
) && TREE_CODE (sym
) != PARM_DECL
)
344 val
.lattice_val
= UNDEFINED
;
346 val
.lattice_val
= VARYING
;
348 else if (is_gimple_assign (stmt
)
349 /* Value-returning GIMPLE_CALL statements assign to
350 a variable, and are treated similarly to GIMPLE_ASSIGN. */
351 || (is_gimple_call (stmt
)
352 && gimple_call_lhs (stmt
) != NULL_TREE
)
353 || gimple_code (stmt
) == GIMPLE_PHI
)
356 if (gimple_assign_single_p (stmt
)
357 && DECL_P (gimple_assign_rhs1 (stmt
))
358 && (cst
= get_symbol_constant_value (gimple_assign_rhs1 (stmt
))))
360 val
.lattice_val
= CONSTANT
;
364 /* Any other variable defined by an assignment or a PHI node
365 is considered UNDEFINED. */
366 val
.lattice_val
= UNDEFINED
;
370 /* Otherwise, VAR will never take on a constant value. */
371 val
.lattice_val
= VARYING
;
378 /* Get the constant value associated with variable VAR. */
380 static inline prop_value_t
*
385 if (const_val
== NULL
)
388 val
= &const_val
[SSA_NAME_VERSION (var
)];
389 if (val
->lattice_val
== UNINITIALIZED
)
390 *val
= get_default_value (var
);
392 canonicalize_float_value (val
);
397 /* Sets the value associated with VAR to VARYING. */
400 set_value_varying (tree var
)
402 prop_value_t
*val
= &const_val
[SSA_NAME_VERSION (var
)];
404 val
->lattice_val
= VARYING
;
405 val
->value
= NULL_TREE
;
408 /* For float types, modify the value of VAL to make ccp work correctly
409 for non-standard values (-0, NaN):
411 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
412 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
413 This is to fix the following problem (see PR 29921): Suppose we have
417 and we set value of y to NaN. This causes value of x to be set to NaN.
418 When we later determine that y is in fact VARYING, fold uses the fact
419 that HONOR_NANS is false, and we try to change the value of x to 0,
420 causing an ICE. With HONOR_NANS being false, the real appearance of
421 NaN would cause undefined behavior, though, so claiming that y (and x)
422 are UNDEFINED initially is correct. */
425 canonicalize_float_value (prop_value_t
*val
)
427 enum machine_mode mode
;
431 if (val
->lattice_val
!= CONSTANT
432 || TREE_CODE (val
->value
) != REAL_CST
)
435 d
= TREE_REAL_CST (val
->value
);
436 type
= TREE_TYPE (val
->value
);
437 mode
= TYPE_MODE (type
);
439 if (!HONOR_SIGNED_ZEROS (mode
)
440 && REAL_VALUE_MINUS_ZERO (d
))
442 val
->value
= build_real (type
, dconst0
);
446 if (!HONOR_NANS (mode
)
447 && REAL_VALUE_ISNAN (d
))
449 val
->lattice_val
= UNDEFINED
;
455 /* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
459 set_lattice_value (tree var
, prop_value_t new_val
)
461 prop_value_t
*old_val
= get_value (var
);
463 canonicalize_float_value (&new_val
);
465 /* Lattice transitions must always be monotonically increasing in
466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
469 gcc_assert (old_val
->lattice_val
< new_val
.lattice_val
470 || (old_val
->lattice_val
== new_val
.lattice_val
471 && ((!old_val
->value
&& !new_val
.value
)
472 || operand_equal_p (old_val
->value
, new_val
.value
, 0))));
474 if (old_val
->lattice_val
!= new_val
.lattice_val
)
476 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
478 dump_lattice_value (dump_file
, "Lattice value changed to ", new_val
);
479 fprintf (dump_file
, ". Adding SSA edges to worklist.\n");
484 gcc_assert (new_val
.lattice_val
!= UNDEFINED
);
492 /* Return the likely CCP lattice value for STMT.
494 If STMT has no operands, then return CONSTANT.
496 Else if undefinedness of operands of STMT cause its value to be
497 undefined, then return UNDEFINED.
499 Else if any operands of STMT are constants, then return CONSTANT.
501 Else return VARYING. */
504 likely_value (gimple stmt
)
506 bool has_constant_operand
, has_undefined_operand
, all_undefined_operands
;
511 enum gimple_code code
= gimple_code (stmt
);
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code
== GIMPLE_ASSIGN
516 || code
== GIMPLE_CALL
517 || code
== GIMPLE_COND
518 || code
== GIMPLE_SWITCH
);
520 /* If the statement has volatile operands, it won't fold to a
522 if (gimple_has_volatile_ops (stmt
))
525 /* Arrive here for more complex cases. */
526 has_constant_operand
= false;
527 has_undefined_operand
= false;
528 all_undefined_operands
= true;
529 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, iter
, SSA_OP_USE
)
531 prop_value_t
*val
= get_value (use
);
533 if (val
->lattice_val
== UNDEFINED
)
534 has_undefined_operand
= true;
536 all_undefined_operands
= false;
538 if (val
->lattice_val
== CONSTANT
)
539 has_constant_operand
= true;
542 /* There may be constants in regular rhs operands. For calls we
543 have to ignore lhs, fndecl and static chain, otherwise only
545 for (i
= (is_gimple_call (stmt
) ? 2 : 0) + gimple_has_lhs (stmt
);
546 i
< gimple_num_ops (stmt
); ++i
)
548 tree op
= gimple_op (stmt
, i
);
549 if (!op
|| TREE_CODE (op
) == SSA_NAME
)
551 if (is_gimple_min_invariant (op
))
552 has_constant_operand
= true;
555 /* If the operation combines operands like COMPLEX_EXPR make sure to
556 not mark the result UNDEFINED if only one part of the result is
558 if (has_undefined_operand
&& all_undefined_operands
)
560 else if (code
== GIMPLE_ASSIGN
&& has_undefined_operand
)
562 switch (gimple_assign_rhs_code (stmt
))
564 /* Unary operators are handled with all_undefined_operands. */
567 case POINTER_PLUS_EXPR
:
568 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
569 Not bitwise operators, one VARYING operand may specify the
570 result completely. Not logical operators for the same reason.
571 Not COMPLEX_EXPR as one VARYING operand makes the result partly
572 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
573 the undefined operand may be promoted. */
580 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
581 fall back to VARYING even if there were CONSTANT operands. */
582 if (has_undefined_operand
)
585 /* We do not consider virtual operands here -- load from read-only
586 memory may have only VARYING virtual operands, but still be
588 if (has_constant_operand
589 || gimple_references_memory_p (stmt
))
595 /* Returns true if STMT cannot be constant. */
598 surely_varying_stmt_p (gimple stmt
)
600 /* If the statement has operands that we cannot handle, it cannot be
602 if (gimple_has_volatile_ops (stmt
))
605 /* If it is a call and does not return a value or is not a
606 builtin and not an indirect call, it is varying. */
607 if (is_gimple_call (stmt
))
610 if (!gimple_call_lhs (stmt
)
611 || ((fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
612 && !DECL_BUILT_IN (fndecl
)))
616 /* Any other store operation is not interesting. */
617 else if (gimple_vdef (stmt
))
620 /* Anything other than assignments and conditional jumps are not
621 interesting for CCP. */
622 if (gimple_code (stmt
) != GIMPLE_ASSIGN
623 && gimple_code (stmt
) != GIMPLE_COND
624 && gimple_code (stmt
) != GIMPLE_SWITCH
625 && gimple_code (stmt
) != GIMPLE_CALL
)
631 /* Initialize local data structures for CCP. */
634 ccp_initialize (void)
638 const_val
= XCNEWVEC (prop_value_t
, num_ssa_names
);
640 /* Initialize simulation flags for PHI nodes and statements. */
643 gimple_stmt_iterator i
;
645 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
647 gimple stmt
= gsi_stmt (i
);
648 bool is_varying
= surely_varying_stmt_p (stmt
);
655 /* If the statement will not produce a constant, mark
656 all its outputs VARYING. */
657 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
658 set_value_varying (def
);
660 prop_set_simulate_again (stmt
, !is_varying
);
664 /* Now process PHI nodes. We never clear the simulate_again flag on
665 phi nodes, since we do not know which edges are executable yet,
666 except for phi nodes for virtual operands when we do not do store ccp. */
669 gimple_stmt_iterator i
;
671 for (i
= gsi_start_phis (bb
); !gsi_end_p (i
); gsi_next (&i
))
673 gimple phi
= gsi_stmt (i
);
675 if (!is_gimple_reg (gimple_phi_result (phi
)))
676 prop_set_simulate_again (phi
, false);
678 prop_set_simulate_again (phi
, true);
683 /* Debug count support. Reset the values of ssa names
684 VARYING when the total number ssa names analyzed is
685 beyond the debug count specified. */
691 for (i
= 0; i
< num_ssa_names
; i
++)
695 const_val
[i
].lattice_val
= VARYING
;
696 const_val
[i
].value
= NULL_TREE
;
702 /* Do final substitution of propagated values, cleanup the flowgraph and
703 free allocated storage.
705 Return TRUE when something was optimized. */
710 bool something_changed
;
713 /* Perform substitutions based on the known constant values. */
714 something_changed
= substitute_and_fold (const_val
, false);
718 return something_changed
;;
722 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
725 any M UNDEFINED = any
726 any M VARYING = VARYING
727 Ci M Cj = Ci if (i == j)
728 Ci M Cj = VARYING if (i != j)
732 ccp_lattice_meet (prop_value_t
*val1
, prop_value_t
*val2
)
734 if (val1
->lattice_val
== UNDEFINED
)
736 /* UNDEFINED M any = any */
739 else if (val2
->lattice_val
== UNDEFINED
)
741 /* any M UNDEFINED = any
742 Nothing to do. VAL1 already contains the value we want. */
745 else if (val1
->lattice_val
== VARYING
746 || val2
->lattice_val
== VARYING
)
748 /* any M VARYING = VARYING. */
749 val1
->lattice_val
= VARYING
;
750 val1
->value
= NULL_TREE
;
752 else if (val1
->lattice_val
== CONSTANT
753 && val2
->lattice_val
== CONSTANT
754 && simple_cst_equal (val1
->value
, val2
->value
) == 1)
756 /* Ci M Cj = Ci if (i == j)
757 Ci M Cj = VARYING if (i != j)
759 If these two values come from memory stores, make sure that
760 they come from the same memory reference. */
761 val1
->lattice_val
= CONSTANT
;
762 val1
->value
= val1
->value
;
766 /* Any other combination is VARYING. */
767 val1
->lattice_val
= VARYING
;
768 val1
->value
= NULL_TREE
;
773 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
774 lattice values to determine PHI_NODE's lattice value. The value of a
775 PHI node is determined calling ccp_lattice_meet with all the arguments
776 of the PHI node that are incoming via executable edges. */
778 static enum ssa_prop_result
779 ccp_visit_phi_node (gimple phi
)
782 prop_value_t
*old_val
, new_val
;
784 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
786 fprintf (dump_file
, "\nVisiting PHI node: ");
787 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
790 old_val
= get_value (gimple_phi_result (phi
));
791 switch (old_val
->lattice_val
)
794 return SSA_PROP_VARYING
;
801 new_val
.lattice_val
= UNDEFINED
;
802 new_val
.value
= NULL_TREE
;
809 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
811 /* Compute the meet operator over all the PHI arguments flowing
812 through executable edges. */
813 edge e
= gimple_phi_arg_edge (phi
, i
);
815 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
818 "\n Argument #%d (%d -> %d %sexecutable)\n",
819 i
, e
->src
->index
, e
->dest
->index
,
820 (e
->flags
& EDGE_EXECUTABLE
) ? "" : "not ");
823 /* If the incoming edge is executable, Compute the meet operator for
824 the existing value of the PHI node and the current PHI argument. */
825 if (e
->flags
& EDGE_EXECUTABLE
)
827 tree arg
= gimple_phi_arg (phi
, i
)->def
;
828 prop_value_t arg_val
;
830 if (is_gimple_min_invariant (arg
))
832 arg_val
.lattice_val
= CONSTANT
;
836 arg_val
= *(get_value (arg
));
838 ccp_lattice_meet (&new_val
, &arg_val
);
840 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
842 fprintf (dump_file
, "\t");
843 print_generic_expr (dump_file
, arg
, dump_flags
);
844 dump_lattice_value (dump_file
, "\tValue: ", arg_val
);
845 fprintf (dump_file
, "\n");
848 if (new_val
.lattice_val
== VARYING
)
853 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
855 dump_lattice_value (dump_file
, "\n PHI node value: ", new_val
);
856 fprintf (dump_file
, "\n\n");
859 /* Make the transition to the new value. */
860 if (set_lattice_value (gimple_phi_result (phi
), new_val
))
862 if (new_val
.lattice_val
== VARYING
)
863 return SSA_PROP_VARYING
;
865 return SSA_PROP_INTERESTING
;
868 return SSA_PROP_NOT_INTERESTING
;
871 /* Return true if we may propagate the address expression ADDR into the
872 dereference DEREF and cancel them. */
875 may_propagate_address_into_dereference (tree addr
, tree deref
)
877 gcc_assert (INDIRECT_REF_P (deref
)
878 && TREE_CODE (addr
) == ADDR_EXPR
);
880 /* Don't propagate if ADDR's operand has incomplete type. */
881 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr
, 0))))
884 /* If the address is invariant then we do not need to preserve restrict
885 qualifications. But we do need to preserve volatile qualifiers until
886 we can annotate the folded dereference itself properly. */
887 if (is_gimple_min_invariant (addr
)
888 && (!TREE_THIS_VOLATILE (deref
)
889 || TYPE_VOLATILE (TREE_TYPE (addr
))))
890 return useless_type_conversion_p (TREE_TYPE (deref
),
891 TREE_TYPE (TREE_OPERAND (addr
, 0)));
893 /* Else both the address substitution and the folding must result in
894 a valid useless type conversion sequence. */
895 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref
, 0)),
897 && useless_type_conversion_p (TREE_TYPE (deref
),
898 TREE_TYPE (TREE_OPERAND (addr
, 0))));
901 /* CCP specific front-end to the non-destructive constant folding
904 Attempt to simplify the RHS of STMT knowing that one or more
905 operands are constants.
907 If simplification is possible, return the simplified RHS,
908 otherwise return the original RHS or NULL_TREE. */
911 ccp_fold (gimple stmt
)
913 switch (gimple_code (stmt
))
917 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
919 switch (get_gimple_rhs_class (subcode
))
921 case GIMPLE_SINGLE_RHS
:
923 tree rhs
= gimple_assign_rhs1 (stmt
);
924 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
926 if (TREE_CODE (rhs
) == SSA_NAME
)
928 /* If the RHS is an SSA_NAME, return its known constant value,
930 return get_value (rhs
)->value
;
932 /* Handle propagating invariant addresses into address operations.
933 The folding we do here matches that in tree-ssa-forwprop.c. */
934 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
937 base
= &TREE_OPERAND (rhs
, 0);
938 while (handled_component_p (*base
))
939 base
= &TREE_OPERAND (*base
, 0);
940 if (TREE_CODE (*base
) == INDIRECT_REF
941 && TREE_CODE (TREE_OPERAND (*base
, 0)) == SSA_NAME
)
943 prop_value_t
*val
= get_value (TREE_OPERAND (*base
, 0));
944 if (val
->lattice_val
== CONSTANT
945 && TREE_CODE (val
->value
) == ADDR_EXPR
946 && may_propagate_address_into_dereference
949 /* We need to return a new tree, not modify the IL
950 or share parts of it. So play some tricks to
951 avoid manually building it. */
952 tree ret
, save
= *base
;
953 *base
= TREE_OPERAND (val
->value
, 0);
954 ret
= unshare_expr (rhs
);
955 recompute_tree_invariant_for_addr_expr (ret
);
961 else if (TREE_CODE (rhs
) == CONSTRUCTOR
962 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
963 && (CONSTRUCTOR_NELTS (rhs
)
964 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
970 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
972 if (TREE_CODE (val
) == SSA_NAME
973 && get_value (val
)->lattice_val
== CONSTANT
)
974 val
= get_value (val
)->value
;
975 if (TREE_CODE (val
) == INTEGER_CST
976 || TREE_CODE (val
) == REAL_CST
977 || TREE_CODE (val
) == FIXED_CST
)
978 list
= tree_cons (NULL_TREE
, val
, list
);
983 return build_vector (TREE_TYPE (rhs
), nreverse (list
));
986 if (kind
== tcc_reference
)
988 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
989 || TREE_CODE (rhs
) == REALPART_EXPR
990 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
991 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
993 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
994 if (val
->lattice_val
== CONSTANT
)
995 return fold_unary (TREE_CODE (rhs
),
996 TREE_TYPE (rhs
), val
->value
);
998 else if (TREE_CODE (rhs
) == INDIRECT_REF
999 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
1001 prop_value_t
*val
= get_value (TREE_OPERAND (rhs
, 0));
1002 if (val
->lattice_val
== CONSTANT
1003 && TREE_CODE (val
->value
) == ADDR_EXPR
1004 && useless_type_conversion_p (TREE_TYPE (rhs
),
1005 TREE_TYPE (TREE_TYPE (val
->value
))))
1006 rhs
= TREE_OPERAND (val
->value
, 0);
1008 return fold_const_aggregate_ref (rhs
);
1010 else if (kind
== tcc_declaration
)
1011 return get_symbol_constant_value (rhs
);
1015 case GIMPLE_UNARY_RHS
:
1017 /* Handle unary operators that can appear in GIMPLE form.
1018 Note that we know the single operand must be a constant,
1019 so this should almost always return a simplified RHS. */
1020 tree lhs
= gimple_assign_lhs (stmt
);
1021 tree op0
= gimple_assign_rhs1 (stmt
);
1023 /* Simplify the operand down to a constant. */
1024 if (TREE_CODE (op0
) == SSA_NAME
)
1026 prop_value_t
*val
= get_value (op0
);
1027 if (val
->lattice_val
== CONSTANT
)
1028 op0
= get_value (op0
)->value
;
1031 /* Conversions are useless for CCP purposes if they are
1032 value-preserving. Thus the restrictions that
1033 useless_type_conversion_p places for pointer type conversions
1034 do not apply here. Substitution later will only substitute to
1036 if (CONVERT_EXPR_CODE_P (subcode
)
1037 && POINTER_TYPE_P (TREE_TYPE (lhs
))
1038 && POINTER_TYPE_P (TREE_TYPE (op0
))
1039 /* Do not allow differences in volatile qualification
1040 as this might get us confused as to whether a
1041 propagation destination statement is volatile
1042 or not. See PR36988. */
1043 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs
)))
1044 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0
)))))
1047 /* Still try to generate a constant of correct type. */
1048 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
1050 && ((tem
= maybe_fold_offset_to_address
1051 (op0
, integer_zero_node
, TREE_TYPE (lhs
)))
1057 return fold_unary_ignore_overflow (subcode
,
1058 gimple_expr_type (stmt
), op0
);
1061 case GIMPLE_BINARY_RHS
:
1063 /* Handle binary operators that can appear in GIMPLE form. */
1064 tree op0
= gimple_assign_rhs1 (stmt
);
1065 tree op1
= gimple_assign_rhs2 (stmt
);
1067 /* Simplify the operands down to constants when appropriate. */
1068 if (TREE_CODE (op0
) == SSA_NAME
)
1070 prop_value_t
*val
= get_value (op0
);
1071 if (val
->lattice_val
== CONSTANT
)
1075 if (TREE_CODE (op1
) == SSA_NAME
)
1077 prop_value_t
*val
= get_value (op1
);
1078 if (val
->lattice_val
== CONSTANT
)
1082 /* Fold &foo + CST into an invariant reference if possible. */
1083 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1084 && TREE_CODE (op0
) == ADDR_EXPR
1085 && TREE_CODE (op1
) == INTEGER_CST
)
1087 tree lhs
= gimple_assign_lhs (stmt
);
1088 tree tem
= maybe_fold_offset_to_address (op0
, op1
,
1090 if (tem
!= NULL_TREE
)
1094 return fold_binary (subcode
, gimple_expr_type (stmt
), op0
, op1
);
1105 tree fn
= gimple_call_fn (stmt
);
1108 if (TREE_CODE (fn
) == SSA_NAME
)
1110 val
= get_value (fn
);
1111 if (val
->lattice_val
== CONSTANT
)
1114 if (TREE_CODE (fn
) == ADDR_EXPR
1115 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1116 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
1118 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
1121 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1123 args
[i
] = gimple_call_arg (stmt
, i
);
1124 if (TREE_CODE (args
[i
]) == SSA_NAME
)
1126 val
= get_value (args
[i
]);
1127 if (val
->lattice_val
== CONSTANT
)
1128 args
[i
] = val
->value
;
1131 call
= build_call_array (gimple_call_return_type (stmt
),
1132 fn
, gimple_call_num_args (stmt
), args
);
1133 retval
= fold_call_expr (call
, false);
1135 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1136 STRIP_NOPS (retval
);
1144 /* Handle comparison operators that can appear in GIMPLE form. */
1145 tree op0
= gimple_cond_lhs (stmt
);
1146 tree op1
= gimple_cond_rhs (stmt
);
1147 enum tree_code code
= gimple_cond_code (stmt
);
1149 /* Simplify the operands down to constants when appropriate. */
1150 if (TREE_CODE (op0
) == SSA_NAME
)
1152 prop_value_t
*val
= get_value (op0
);
1153 if (val
->lattice_val
== CONSTANT
)
1157 if (TREE_CODE (op1
) == SSA_NAME
)
1159 prop_value_t
*val
= get_value (op1
);
1160 if (val
->lattice_val
== CONSTANT
)
1164 return fold_binary (code
, boolean_type_node
, op0
, op1
);
1169 tree rhs
= gimple_switch_index (stmt
);
1171 if (TREE_CODE (rhs
) == SSA_NAME
)
1173 /* If the RHS is an SSA_NAME, return its known constant value,
1175 return get_value (rhs
)->value
;
1187 /* Return the tree representing the element referenced by T if T is an
1188 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1189 NULL_TREE otherwise. */
1192 fold_const_aggregate_ref (tree t
)
1194 prop_value_t
*value
;
1195 tree base
, ctor
, idx
, field
;
1196 unsigned HOST_WIDE_INT cnt
;
1199 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
1200 return get_symbol_constant_value (t
);
1202 switch (TREE_CODE (t
))
1205 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1206 DECL_INITIAL. If BASE is a nested reference into another
1207 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1208 the inner reference. */
1209 base
= TREE_OPERAND (t
, 0);
1210 switch (TREE_CODE (base
))
1213 if (!TREE_READONLY (base
)
1214 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
1215 || !targetm
.binds_local_p (base
))
1218 ctor
= DECL_INITIAL (base
);
1223 ctor
= fold_const_aggregate_ref (base
);
1235 if (ctor
== NULL_TREE
1236 || (TREE_CODE (ctor
) != CONSTRUCTOR
1237 && TREE_CODE (ctor
) != STRING_CST
)
1238 || !TREE_STATIC (ctor
))
1241 /* Get the index. If we have an SSA_NAME, try to resolve it
1242 with the current lattice value for the SSA_NAME. */
1243 idx
= TREE_OPERAND (t
, 1);
1244 switch (TREE_CODE (idx
))
1247 if ((value
= get_value (idx
))
1248 && value
->lattice_val
== CONSTANT
1249 && TREE_CODE (value
->value
) == INTEGER_CST
)
1262 /* Fold read from constant string. */
1263 if (TREE_CODE (ctor
) == STRING_CST
)
1265 if ((TYPE_MODE (TREE_TYPE (t
))
1266 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1267 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
1269 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
1270 && compare_tree_int (idx
, TREE_STRING_LENGTH (ctor
)) < 0)
1271 return build_int_cst_type (TREE_TYPE (t
),
1272 (TREE_STRING_POINTER (ctor
)
1273 [TREE_INT_CST_LOW (idx
)]));
1277 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1278 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1279 if (tree_int_cst_equal (cfield
, idx
))
1281 STRIP_USELESS_TYPE_CONVERSION (cval
);
1282 if (TREE_CODE (cval
) == ADDR_EXPR
)
1284 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1285 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1286 add_referenced_var (base
);
1293 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1294 DECL_INITIAL. If BASE is a nested reference into another
1295 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1296 the inner reference. */
1297 base
= TREE_OPERAND (t
, 0);
1298 switch (TREE_CODE (base
))
1301 if (!TREE_READONLY (base
)
1302 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1303 || !targetm
.binds_local_p (base
))
1306 ctor
= DECL_INITIAL (base
);
1311 ctor
= fold_const_aggregate_ref (base
);
1318 if (ctor
== NULL_TREE
1319 || TREE_CODE (ctor
) != CONSTRUCTOR
1320 || !TREE_STATIC (ctor
))
1323 field
= TREE_OPERAND (t
, 1);
1325 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
1327 /* FIXME: Handle bit-fields. */
1328 && ! DECL_BIT_FIELD (cfield
))
1330 STRIP_USELESS_TYPE_CONVERSION (cval
);
1331 if (TREE_CODE (cval
) == ADDR_EXPR
)
1333 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
1334 if (base
&& TREE_CODE (base
) == VAR_DECL
)
1335 add_referenced_var (base
);
1344 tree c
= fold_const_aggregate_ref (TREE_OPERAND (t
, 0));
1345 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
1346 return fold_build1 (TREE_CODE (t
), TREE_TYPE (t
), c
);
1352 tree base
= TREE_OPERAND (t
, 0);
1353 if (TREE_CODE (base
) == SSA_NAME
1354 && (value
= get_value (base
))
1355 && value
->lattice_val
== CONSTANT
1356 && TREE_CODE (value
->value
) == ADDR_EXPR
1357 && useless_type_conversion_p (TREE_TYPE (t
),
1358 TREE_TYPE (TREE_TYPE (value
->value
))))
1359 return fold_const_aggregate_ref (TREE_OPERAND (value
->value
, 0));
1370 /* Evaluate statement STMT.
1371 Valid only for assignments, calls, conditionals, and switches. */
1374 evaluate_stmt (gimple stmt
)
1377 tree simplified
= NULL_TREE
;
1378 ccp_lattice_t likelyvalue
= likely_value (stmt
);
1381 fold_defer_overflow_warnings ();
1383 /* If the statement is likely to have a CONSTANT result, then try
1384 to fold the statement to determine the constant value. */
1385 /* FIXME. This is the only place that we call ccp_fold.
1386 Since likely_value never returns CONSTANT for calls, we will
1387 not attempt to fold them, including builtins that may profit. */
1388 if (likelyvalue
== CONSTANT
)
1389 simplified
= ccp_fold (stmt
);
1390 /* If the statement is likely to have a VARYING result, then do not
1391 bother folding the statement. */
1392 else if (likelyvalue
== VARYING
)
1394 enum gimple_code code
= gimple_code (stmt
);
1395 if (code
== GIMPLE_ASSIGN
)
1397 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1399 /* Other cases cannot satisfy is_gimple_min_invariant
1401 if (get_gimple_rhs_class (subcode
) == GIMPLE_SINGLE_RHS
)
1402 simplified
= gimple_assign_rhs1 (stmt
);
1404 else if (code
== GIMPLE_SWITCH
)
1405 simplified
= gimple_switch_index (stmt
);
1407 /* These cannot satisfy is_gimple_min_invariant without folding. */
1408 gcc_assert (code
== GIMPLE_CALL
|| code
== GIMPLE_COND
);
1411 is_constant
= simplified
&& is_gimple_min_invariant (simplified
);
1413 fold_undefer_overflow_warnings (is_constant
, stmt
, 0);
1415 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1417 fprintf (dump_file
, "which is likely ");
1418 switch (likelyvalue
)
1421 fprintf (dump_file
, "CONSTANT");
1424 fprintf (dump_file
, "UNDEFINED");
1427 fprintf (dump_file
, "VARYING");
1431 fprintf (dump_file
, "\n");
1436 /* The statement produced a constant value. */
1437 val
.lattice_val
= CONSTANT
;
1438 val
.value
= simplified
;
1442 /* The statement produced a nonconstant value. If the statement
1443 had UNDEFINED operands, then the result of the statement
1444 should be UNDEFINED. Otherwise, the statement is VARYING. */
1445 if (likelyvalue
== UNDEFINED
)
1446 val
.lattice_val
= likelyvalue
;
1448 val
.lattice_val
= VARYING
;
1450 val
.value
= NULL_TREE
;
1456 /* Visit the assignment statement STMT. Set the value of its LHS to the
1457 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1458 creates virtual definitions, set the value of each new name to that
1459 of the RHS (if we can derive a constant out of the RHS).
1460 Value-returning call statements also perform an assignment, and
1461 are handled here. */
1463 static enum ssa_prop_result
1464 visit_assignment (gimple stmt
, tree
*output_p
)
1467 enum ssa_prop_result retval
;
1469 tree lhs
= gimple_get_lhs (stmt
);
1471 gcc_assert (gimple_code (stmt
) != GIMPLE_CALL
1472 || gimple_call_lhs (stmt
) != NULL_TREE
);
1474 if (gimple_assign_copy_p (stmt
))
1476 tree rhs
= gimple_assign_rhs1 (stmt
);
1478 if (TREE_CODE (rhs
) == SSA_NAME
)
1480 /* For a simple copy operation, we copy the lattice values. */
1481 prop_value_t
*nval
= get_value (rhs
);
1485 val
= evaluate_stmt (stmt
);
1488 /* Evaluate the statement, which could be
1489 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1490 val
= evaluate_stmt (stmt
);
1492 retval
= SSA_PROP_NOT_INTERESTING
;
1494 /* Set the lattice value of the statement's output. */
1495 if (TREE_CODE (lhs
) == SSA_NAME
)
1497 /* If STMT is an assignment to an SSA_NAME, we only have one
1499 if (set_lattice_value (lhs
, val
))
1502 if (val
.lattice_val
== VARYING
)
1503 retval
= SSA_PROP_VARYING
;
1505 retval
= SSA_PROP_INTERESTING
;
1513 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1514 if it can determine which edge will be taken. Otherwise, return
1515 SSA_PROP_VARYING. */
1517 static enum ssa_prop_result
1518 visit_cond_stmt (gimple stmt
, edge
*taken_edge_p
)
1523 block
= gimple_bb (stmt
);
1524 val
= evaluate_stmt (stmt
);
1526 /* Find which edge out of the conditional block will be taken and add it
1527 to the worklist. If no single edge can be determined statically,
1528 return SSA_PROP_VARYING to feed all the outgoing edges to the
1529 propagation engine. */
1530 *taken_edge_p
= val
.value
? find_taken_edge (block
, val
.value
) : 0;
1532 return SSA_PROP_INTERESTING
;
1534 return SSA_PROP_VARYING
;
1538 /* Evaluate statement STMT. If the statement produces an output value and
1539 its evaluation changes the lattice value of its output, return
1540 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1543 If STMT is a conditional branch and we can determine its truth
1544 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1545 value, return SSA_PROP_VARYING. */
1547 static enum ssa_prop_result
1548 ccp_visit_stmt (gimple stmt
, edge
*taken_edge_p
, tree
*output_p
)
1553 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1555 fprintf (dump_file
, "\nVisiting statement:\n");
1556 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1559 switch (gimple_code (stmt
))
1562 /* If the statement is an assignment that produces a single
1563 output value, evaluate its RHS to see if the lattice value of
1564 its output has changed. */
1565 return visit_assignment (stmt
, output_p
);
1568 /* A value-returning call also performs an assignment. */
1569 if (gimple_call_lhs (stmt
) != NULL_TREE
)
1570 return visit_assignment (stmt
, output_p
);
1575 /* If STMT is a conditional branch, see if we can determine
1576 which branch will be taken. */
1577 /* FIXME. It appears that we should be able to optimize
1578 computed GOTOs here as well. */
1579 return visit_cond_stmt (stmt
, taken_edge_p
);
1585 /* Any other kind of statement is not interesting for constant
1586 propagation and, therefore, not worth simulating. */
1587 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1588 fprintf (dump_file
, "No interesting values produced. Marked VARYING.\n");
1590 /* Definitions made by statements other than assignments to
1591 SSA_NAMEs represent unknown modifications to their outputs.
1592 Mark them VARYING. */
1593 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1595 prop_value_t v
= { VARYING
, NULL_TREE
};
1596 set_lattice_value (def
, v
);
1599 return SSA_PROP_VARYING
;
1603 /* Main entry point for SSA Conditional Constant Propagation. */
1609 ssa_propagate (ccp_visit_stmt
, ccp_visit_phi_node
);
1610 if (ccp_finalize ())
1611 return (TODO_cleanup_cfg
| TODO_update_ssa
| TODO_remove_unused_locals
);
1620 return flag_tree_ccp
!= 0;
1624 struct gimple_opt_pass pass_ccp
=
1629 gate_ccp
, /* gate */
1630 do_ssa_ccp
, /* execute */
1633 0, /* static_pass_number */
1634 TV_TREE_CCP
, /* tv_id */
1635 PROP_cfg
| PROP_ssa
, /* properties_required */
1636 0, /* properties_provided */
1637 0, /* properties_destroyed */
1638 0, /* todo_flags_start */
1639 TODO_dump_func
| TODO_verify_ssa
1640 | TODO_verify_stmts
| TODO_ggc_collect
/* todo_flags_finish */
1645 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
1646 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1647 is the desired result type. */
1650 maybe_fold_offset_to_array_ref (tree base
, tree offset
, tree orig_type
,
1651 bool allow_negative_idx
)
1653 tree min_idx
, idx
, idx_type
, elt_offset
= integer_zero_node
;
1654 tree array_type
, elt_type
, elt_size
;
1657 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1658 measured in units of the size of elements type) from that ARRAY_REF).
1659 We can't do anything if either is variable.
1661 The case we handle here is *(&A[N]+O). */
1662 if (TREE_CODE (base
) == ARRAY_REF
)
1664 tree low_bound
= array_ref_low_bound (base
);
1666 elt_offset
= TREE_OPERAND (base
, 1);
1667 if (TREE_CODE (low_bound
) != INTEGER_CST
1668 || TREE_CODE (elt_offset
) != INTEGER_CST
)
1671 elt_offset
= int_const_binop (MINUS_EXPR
, elt_offset
, low_bound
, 0);
1672 base
= TREE_OPERAND (base
, 0);
1675 /* Ignore stupid user tricks of indexing non-array variables. */
1676 array_type
= TREE_TYPE (base
);
1677 if (TREE_CODE (array_type
) != ARRAY_TYPE
)
1679 elt_type
= TREE_TYPE (array_type
);
1680 if (!useless_type_conversion_p (orig_type
, elt_type
))
1683 /* Use signed size type for intermediate computation on the index. */
1684 idx_type
= signed_type_for (size_type_node
);
1686 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1687 element type (so we can use the alignment if it's not constant).
1688 Otherwise, compute the offset as an index by using a division. If the
1689 division isn't exact, then don't do anything. */
1690 elt_size
= TYPE_SIZE_UNIT (elt_type
);
1693 if (integer_zerop (offset
))
1695 if (TREE_CODE (elt_size
) != INTEGER_CST
)
1696 elt_size
= size_int (TYPE_ALIGN (elt_type
));
1698 idx
= build_int_cst (idx_type
, 0);
1702 unsigned HOST_WIDE_INT lquo
, lrem
;
1703 HOST_WIDE_INT hquo
, hrem
;
1706 /* The final array offset should be signed, so we need
1707 to sign-extend the (possibly pointer) offset here
1708 and use signed division. */
1709 soffset
= double_int_sext (tree_to_double_int (offset
),
1710 TYPE_PRECISION (TREE_TYPE (offset
)));
1711 if (TREE_CODE (elt_size
) != INTEGER_CST
1712 || div_and_round_double (TRUNC_DIV_EXPR
, 0,
1713 soffset
.low
, soffset
.high
,
1714 TREE_INT_CST_LOW (elt_size
),
1715 TREE_INT_CST_HIGH (elt_size
),
1716 &lquo
, &hquo
, &lrem
, &hrem
)
1720 idx
= build_int_cst_wide (idx_type
, lquo
, hquo
);
1723 /* Assume the low bound is zero. If there is a domain type, get the
1724 low bound, if any, convert the index into that type, and add the
1726 min_idx
= build_int_cst (idx_type
, 0);
1727 domain_type
= TYPE_DOMAIN (array_type
);
1730 idx_type
= domain_type
;
1731 if (TYPE_MIN_VALUE (idx_type
))
1732 min_idx
= TYPE_MIN_VALUE (idx_type
);
1734 min_idx
= fold_convert (idx_type
, min_idx
);
1736 if (TREE_CODE (min_idx
) != INTEGER_CST
)
1739 elt_offset
= fold_convert (idx_type
, elt_offset
);
1742 if (!integer_zerop (min_idx
))
1743 idx
= int_const_binop (PLUS_EXPR
, idx
, min_idx
, 0);
1744 if (!integer_zerop (elt_offset
))
1745 idx
= int_const_binop (PLUS_EXPR
, idx
, elt_offset
, 0);
1747 /* Make sure to possibly truncate late after offsetting. */
1748 idx
= fold_convert (idx_type
, idx
);
1750 /* We don't want to construct access past array bounds. For example
1753 should not be simplified into (*c)[14] or tree-vrp will
1754 give false warnings. The same is true for
1755 struct A { long x; char d[0]; } *a;
1757 which should be not folded to &a->d[-8]. */
1759 && TYPE_MAX_VALUE (domain_type
)
1760 && TREE_CODE (TYPE_MAX_VALUE (domain_type
)) == INTEGER_CST
)
1762 tree up_bound
= TYPE_MAX_VALUE (domain_type
);
1764 if (tree_int_cst_lt (up_bound
, idx
)
1765 /* Accesses after the end of arrays of size 0 (gcc
1766 extension) and 1 are likely intentional ("struct
1768 && compare_tree_int (up_bound
, 1) > 0)
1772 && TYPE_MIN_VALUE (domain_type
))
1774 if (!allow_negative_idx
1775 && TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
1776 && tree_int_cst_lt (idx
, TYPE_MIN_VALUE (domain_type
)))
1779 else if (!allow_negative_idx
1780 && compare_tree_int (idx
, 0) < 0)
1783 return build4 (ARRAY_REF
, elt_type
, base
, idx
, NULL_TREE
, NULL_TREE
);
1787 /* Attempt to fold *(S+O) to S.X.
1788 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1789 is the desired result type. */
1792 maybe_fold_offset_to_component_ref (tree record_type
, tree base
, tree offset
,
1793 tree orig_type
, bool base_is_ptr
)
1795 tree f
, t
, field_type
, tail_array_field
, field_offset
;
1799 if (TREE_CODE (record_type
) != RECORD_TYPE
1800 && TREE_CODE (record_type
) != UNION_TYPE
1801 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1804 /* Short-circuit silly cases. */
1805 if (useless_type_conversion_p (record_type
, orig_type
))
1808 tail_array_field
= NULL_TREE
;
1809 for (f
= TYPE_FIELDS (record_type
); f
; f
= TREE_CHAIN (f
))
1813 if (TREE_CODE (f
) != FIELD_DECL
)
1815 if (DECL_BIT_FIELD (f
))
1818 if (!DECL_FIELD_OFFSET (f
))
1820 field_offset
= byte_position (f
);
1821 if (TREE_CODE (field_offset
) != INTEGER_CST
)
1824 /* ??? Java creates "interesting" fields for representing base classes.
1825 They have no name, and have no context. With no context, we get into
1826 trouble with nonoverlapping_component_refs_p. Skip them. */
1827 if (!DECL_FIELD_CONTEXT (f
))
1830 /* The previous array field isn't at the end. */
1831 tail_array_field
= NULL_TREE
;
1833 /* Check to see if this offset overlaps with the field. */
1834 cmp
= tree_int_cst_compare (field_offset
, offset
);
1838 field_type
= TREE_TYPE (f
);
1840 /* Here we exactly match the offset being checked. If the types match,
1841 then we can return that field. */
1843 && useless_type_conversion_p (orig_type
, field_type
))
1846 base
= build1 (INDIRECT_REF
, record_type
, base
);
1847 t
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1851 /* Don't care about offsets into the middle of scalars. */
1852 if (!AGGREGATE_TYPE_P (field_type
))
1855 /* Check for array at the end of the struct. This is often
1856 used as for flexible array members. We should be able to
1857 turn this into an array access anyway. */
1858 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1859 tail_array_field
= f
;
1861 /* Check the end of the field against the offset. */
1862 if (!DECL_SIZE_UNIT (f
)
1863 || TREE_CODE (DECL_SIZE_UNIT (f
)) != INTEGER_CST
)
1865 t
= int_const_binop (MINUS_EXPR
, offset
, field_offset
, 1);
1866 if (!tree_int_cst_lt (t
, DECL_SIZE_UNIT (f
)))
1869 /* If we matched, then set offset to the displacement into
1872 new_base
= build1 (INDIRECT_REF
, record_type
, base
);
1875 new_base
= build3 (COMPONENT_REF
, field_type
, new_base
, f
, NULL_TREE
);
1877 /* Recurse to possibly find the match. */
1878 ret
= maybe_fold_offset_to_array_ref (new_base
, t
, orig_type
,
1879 f
== TYPE_FIELDS (record_type
));
1882 ret
= maybe_fold_offset_to_component_ref (field_type
, new_base
, t
,
1888 if (!tail_array_field
)
1891 f
= tail_array_field
;
1892 field_type
= TREE_TYPE (f
);
1893 offset
= int_const_binop (MINUS_EXPR
, offset
, byte_position (f
), 1);
1895 /* If we get here, we've got an aggregate field, and a possibly
1896 nonzero offset into them. Recurse and hope for a valid match. */
1898 base
= build1 (INDIRECT_REF
, record_type
, base
);
1899 base
= build3 (COMPONENT_REF
, field_type
, base
, f
, NULL_TREE
);
1901 t
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
,
1902 f
== TYPE_FIELDS (record_type
));
1905 return maybe_fold_offset_to_component_ref (field_type
, base
, offset
,
1909 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1910 or BASE[index] or by combination of those.
1912 Before attempting the conversion strip off existing ADDR_EXPRs and
1913 handled component refs. */
1916 maybe_fold_offset_to_reference (tree base
, tree offset
, tree orig_type
)
1920 bool base_is_ptr
= true;
1923 if (TREE_CODE (base
) == ADDR_EXPR
)
1925 base_is_ptr
= false;
1927 base
= TREE_OPERAND (base
, 0);
1929 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1930 so it needs to be removed and new COMPONENT_REF constructed.
1931 The wrong COMPONENT_REF are often constructed by folding the
1932 (type *)&object within the expression (type *)&object+offset */
1933 if (handled_component_p (base
))
1935 HOST_WIDE_INT sub_offset
, size
, maxsize
;
1937 newbase
= get_ref_base_and_extent (base
, &sub_offset
,
1939 gcc_assert (newbase
);
1942 && !(sub_offset
& (BITS_PER_UNIT
- 1)))
1946 offset
= int_const_binop (PLUS_EXPR
, offset
,
1947 build_int_cst (TREE_TYPE (offset
),
1948 sub_offset
/ BITS_PER_UNIT
), 1);
1951 if (useless_type_conversion_p (orig_type
, TREE_TYPE (base
))
1952 && integer_zerop (offset
))
1954 type
= TREE_TYPE (base
);
1959 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
1961 type
= TREE_TYPE (TREE_TYPE (base
));
1963 ret
= maybe_fold_offset_to_component_ref (type
, base
, offset
,
1964 orig_type
, base_is_ptr
);
1968 base
= build1 (INDIRECT_REF
, type
, base
);
1969 ret
= maybe_fold_offset_to_array_ref (base
, offset
, orig_type
, true);
1974 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1975 or &BASE[index] or by combination of those.
1977 Before attempting the conversion strip off existing component refs. */
1980 maybe_fold_offset_to_address (tree addr
, tree offset
, tree orig_type
)
1984 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr
))
1985 && POINTER_TYPE_P (orig_type
));
1987 t
= maybe_fold_offset_to_reference (addr
, offset
, TREE_TYPE (orig_type
));
1993 /* For __builtin_object_size to function correctly we need to
1994 make sure not to fold address arithmetic so that we change
1995 reference from one array to another. This would happen for
1998 struct X { char s1[10]; char s2[10] } s;
1999 char *foo (void) { return &s.s2[-4]; }
2001 where we need to avoid generating &s.s1[6]. As the C and
2002 C++ frontends create different initial trees
2003 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
2004 sophisticated comparisons here. Note that checking for the
2005 condition after the fact is easier than trying to avoid doing
2008 if (TREE_CODE (orig
) == ADDR_EXPR
)
2009 orig
= TREE_OPERAND (orig
, 0);
2010 if ((TREE_CODE (orig
) == ARRAY_REF
2011 || (TREE_CODE (orig
) == COMPONENT_REF
2012 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig
, 1))) == ARRAY_TYPE
))
2013 && (TREE_CODE (t
) == ARRAY_REF
2014 || TREE_CODE (t
) == COMPONENT_REF
)
2015 && !operand_equal_p (TREE_CODE (orig
) == ARRAY_REF
2016 ? TREE_OPERAND (orig
, 0) : orig
,
2017 TREE_CODE (t
) == ARRAY_REF
2018 ? TREE_OPERAND (t
, 0) : t
, 0))
2021 ptr_type
= build_pointer_type (TREE_TYPE (t
));
2022 if (!useless_type_conversion_p (orig_type
, ptr_type
))
2024 return build_fold_addr_expr_with_type (t
, ptr_type
);
2030 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
2031 Return the simplified expression, or NULL if nothing could be done. */
2034 maybe_fold_stmt_indirect (tree expr
, tree base
, tree offset
)
2037 bool volatile_p
= TREE_THIS_VOLATILE (expr
);
2039 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2040 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2041 are sometimes added. */
2043 STRIP_TYPE_NOPS (base
);
2044 TREE_OPERAND (expr
, 0) = base
;
2046 /* One possibility is that the address reduces to a string constant. */
2047 t
= fold_read_from_constant_string (expr
);
2051 /* Add in any offset from a POINTER_PLUS_EXPR. */
2052 if (TREE_CODE (base
) == POINTER_PLUS_EXPR
)
2056 offset2
= TREE_OPERAND (base
, 1);
2057 if (TREE_CODE (offset2
) != INTEGER_CST
)
2059 base
= TREE_OPERAND (base
, 0);
2061 offset
= fold_convert (sizetype
,
2062 int_const_binop (PLUS_EXPR
, offset
, offset2
, 1));
2065 if (TREE_CODE (base
) == ADDR_EXPR
)
2067 tree base_addr
= base
;
2069 /* Strip the ADDR_EXPR. */
2070 base
= TREE_OPERAND (base
, 0);
2072 /* Fold away CONST_DECL to its value, if the type is scalar. */
2073 if (TREE_CODE (base
) == CONST_DECL
2074 && is_gimple_min_invariant (DECL_INITIAL (base
)))
2075 return DECL_INITIAL (base
);
2077 /* Try folding *(&B+O) to B.X. */
2078 t
= maybe_fold_offset_to_reference (base_addr
, offset
,
2082 /* Preserve volatileness of the original expression.
2083 We can end up with a plain decl here which is shared
2084 and we shouldn't mess with its flags. */
2086 TREE_THIS_VOLATILE (t
) = volatile_p
;
2092 /* We can get here for out-of-range string constant accesses,
2093 such as "_"[3]. Bail out of the entire substitution search
2094 and arrange for the entire statement to be replaced by a
2095 call to __builtin_trap. In all likelihood this will all be
2096 constant-folded away, but in the meantime we can't leave with
2097 something that get_expr_operands can't understand. */
2101 if (TREE_CODE (t
) == ADDR_EXPR
2102 && TREE_CODE (TREE_OPERAND (t
, 0)) == STRING_CST
)
2104 /* FIXME: Except that this causes problems elsewhere with dead
2105 code not being deleted, and we die in the rtl expanders
2106 because we failed to remove some ssa_name. In the meantime,
2107 just return zero. */
2108 /* FIXME2: This condition should be signaled by
2109 fold_read_from_constant_string directly, rather than
2110 re-checking for it here. */
2111 return integer_zero_node
;
2114 /* Try folding *(B+O) to B->X. Still an improvement. */
2115 if (POINTER_TYPE_P (TREE_TYPE (base
)))
2117 t
= maybe_fold_offset_to_reference (base
, offset
,
2124 /* Otherwise we had an offset that we could not simplify. */
2129 /* A quaint feature extant in our address arithmetic is that there
2130 can be hidden type changes here. The type of the result need
2131 not be the same as the type of the input pointer.
2133 What we're after here is an expression of the form
2134 (T *)(&array + const)
2135 where array is OP0, const is OP1, RES_TYPE is T and
2136 the cast doesn't actually exist, but is implicit in the
2137 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2139 which may be able to propagate further. */
2142 maybe_fold_stmt_addition (tree res_type
, tree op0
, tree op1
)
2147 /* The first operand should be an ADDR_EXPR. */
2148 if (TREE_CODE (op0
) != ADDR_EXPR
)
2150 op0
= TREE_OPERAND (op0
, 0);
2152 /* It had better be a constant. */
2153 if (TREE_CODE (op1
) != INTEGER_CST
)
2155 /* Or op0 should now be A[0] and the non-constant offset defined
2156 via a multiplication by the array element size. */
2157 if (TREE_CODE (op0
) == ARRAY_REF
2158 && integer_zerop (TREE_OPERAND (op0
, 1))
2159 && TREE_CODE (op1
) == SSA_NAME
2160 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0
)), 1))
2162 gimple offset_def
= SSA_NAME_DEF_STMT (op1
);
2163 if (!is_gimple_assign (offset_def
))
2166 if (gimple_assign_rhs_code (offset_def
) == MULT_EXPR
2167 && TREE_CODE (gimple_assign_rhs2 (offset_def
)) == INTEGER_CST
2168 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def
),
2169 TYPE_SIZE_UNIT (TREE_TYPE (op0
))))
2170 return build1 (ADDR_EXPR
, res_type
,
2171 build4 (ARRAY_REF
, TREE_TYPE (op0
),
2172 TREE_OPERAND (op0
, 0),
2173 gimple_assign_rhs1 (offset_def
),
2174 TREE_OPERAND (op0
, 2),
2175 TREE_OPERAND (op0
, 3)));
2176 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0
)))
2177 && gimple_assign_rhs_code (offset_def
) != MULT_EXPR
)
2178 return build1 (ADDR_EXPR
, res_type
,
2179 build4 (ARRAY_REF
, TREE_TYPE (op0
),
2180 TREE_OPERAND (op0
, 0),
2182 TREE_OPERAND (op0
, 2),
2183 TREE_OPERAND (op0
, 3)));
2188 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2189 the offset into it. */
2190 while (TREE_CODE (op0
) == ARRAY_REF
)
2192 tree array_obj
= TREE_OPERAND (op0
, 0);
2193 tree array_idx
= TREE_OPERAND (op0
, 1);
2194 tree elt_type
= TREE_TYPE (op0
);
2195 tree elt_size
= TYPE_SIZE_UNIT (elt_type
);
2198 if (TREE_CODE (array_idx
) != INTEGER_CST
)
2200 if (TREE_CODE (elt_size
) != INTEGER_CST
)
2203 /* Un-bias the index by the min index of the array type. */
2204 min_idx
= TYPE_DOMAIN (TREE_TYPE (array_obj
));
2207 min_idx
= TYPE_MIN_VALUE (min_idx
);
2210 if (TREE_CODE (min_idx
) != INTEGER_CST
)
2213 array_idx
= fold_convert (TREE_TYPE (min_idx
), array_idx
);
2214 if (!integer_zerop (min_idx
))
2215 array_idx
= int_const_binop (MINUS_EXPR
, array_idx
,
2220 /* Convert the index to a byte offset. */
2221 array_idx
= fold_convert (sizetype
, array_idx
);
2222 array_idx
= int_const_binop (MULT_EXPR
, array_idx
, elt_size
, 0);
2224 /* Update the operands for the next round, or for folding. */
2225 op1
= int_const_binop (PLUS_EXPR
,
2230 ptd_type
= TREE_TYPE (res_type
);
2231 /* If we want a pointer to void, reconstruct the reference from the
2232 array element type. A pointer to that can be trivially converted
2233 to void *. This happens as we fold (void *)(ptr p+ off). */
2234 if (VOID_TYPE_P (ptd_type
)
2235 && TREE_CODE (TREE_TYPE (op0
)) == ARRAY_TYPE
)
2236 ptd_type
= TREE_TYPE (TREE_TYPE (op0
));
2238 /* At which point we can try some of the same things as for indirects. */
2239 t
= maybe_fold_offset_to_array_ref (op0
, op1
, ptd_type
, true);
2241 t
= maybe_fold_offset_to_component_ref (TREE_TYPE (op0
), op0
, op1
,
2244 t
= build1 (ADDR_EXPR
, res_type
, t
);
2249 /* Subroutine of fold_stmt. We perform several simplifications of the
2250 memory reference tree EXPR and make sure to re-gimplify them properly
2251 after propagation of constant addresses. IS_LHS is true if the
2252 reference is supposed to be an lvalue. */
2255 maybe_fold_reference (tree expr
, bool is_lhs
)
2259 if (TREE_CODE (expr
) == ARRAY_REF
2262 tree tem
= fold_read_from_constant_string (expr
);
2267 /* ??? We might want to open-code the relevant remaining cases
2268 to avoid using the generic fold. */
2269 if (handled_component_p (*t
)
2270 && CONSTANT_CLASS_P (TREE_OPERAND (*t
, 0)))
2272 tree tem
= fold (*t
);
2277 while (handled_component_p (*t
))
2278 t
= &TREE_OPERAND (*t
, 0);
2280 if (TREE_CODE (*t
) == INDIRECT_REF
)
2282 tree tem
= maybe_fold_stmt_indirect (*t
, TREE_OPERAND (*t
, 0),
2284 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2285 if (is_lhs
&& tem
&& CONSTANT_CLASS_P (tem
))
2288 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
)
2289 /* If we had a good reason for propagating the address here,
2290 make sure we end up with valid gimple. See PR34989. */
2291 tem
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2296 tem
= maybe_fold_reference (expr
, is_lhs
);
2307 /* Return the string length, maximum string length or maximum value of
2309 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2310 is not NULL and, for TYPE == 0, its value is not equal to the length
2311 we determine or if we are unable to determine the length or value,
2312 return false. VISITED is a bitmap of visited variables.
2313 TYPE is 0 if string length should be returned, 1 for maximum string
2314 length and 2 for maximum value ARG can have. */
2317 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
2322 if (TREE_CODE (arg
) != SSA_NAME
)
2324 if (TREE_CODE (arg
) == COND_EXPR
)
2325 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
2326 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
2327 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2328 else if (TREE_CODE (arg
) == ADDR_EXPR
2329 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
2330 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
2332 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
2333 if (TREE_CODE (aop0
) == INDIRECT_REF
2334 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
2335 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
2336 length
, visited
, type
);
2342 if (TREE_CODE (val
) != INTEGER_CST
2343 || tree_int_cst_sgn (val
) < 0)
2347 val
= c_strlen (arg
, 1);
2355 if (TREE_CODE (*length
) != INTEGER_CST
2356 || TREE_CODE (val
) != INTEGER_CST
)
2359 if (tree_int_cst_lt (*length
, val
))
2363 else if (simple_cst_equal (val
, *length
) != 1)
2371 /* If we were already here, break the infinite cycle. */
2372 if (bitmap_bit_p (visited
, SSA_NAME_VERSION (arg
)))
2374 bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
));
2377 def_stmt
= SSA_NAME_DEF_STMT (var
);
2379 switch (gimple_code (def_stmt
))
2382 /* The RHS of the statement defining VAR must either have a
2383 constant length or come from another SSA_NAME with a constant
2385 if (gimple_assign_single_p (def_stmt
)
2386 || gimple_assign_unary_nop_p (def_stmt
))
2388 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2389 return get_maxval_strlen (rhs
, length
, visited
, type
);
2395 /* All the arguments of the PHI node must have the same constant
2399 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
2401 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
2403 /* If this PHI has itself as an argument, we cannot
2404 determine the string length of this argument. However,
2405 if we can find a constant string length for the other
2406 PHI args then we can still be sure that this is a
2407 constant string length. So be optimistic and just
2408 continue with the next argument. */
2409 if (arg
== gimple_phi_result (def_stmt
))
2412 if (!get_maxval_strlen (arg
, length
, visited
, type
))
2424 /* Fold builtin call in statement STMT. Returns a simplified tree.
2425 We may return a non-constant expression, including another call
2426 to a different function and with different arguments, e.g.,
2427 substituting memcpy for strcpy when the string length is known.
2428 Note that some builtins expand into inline code that may not
2429 be valid in GIMPLE. Callers must take care. */
2432 ccp_fold_builtin (gimple stmt
)
2434 tree result
, val
[3];
2441 gcc_assert (is_gimple_call (stmt
));
2443 ignore
= (gimple_call_lhs (stmt
) == NULL
);
2445 /* First try the generic builtin folder. If that succeeds, return the
2447 result
= fold_call_stmt (stmt
, ignore
);
2451 STRIP_NOPS (result
);
2455 /* Ignore MD builtins. */
2456 callee
= gimple_call_fndecl (stmt
);
2457 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
2460 /* If the builtin could not be folded, and it has no argument list,
2462 nargs
= gimple_call_num_args (stmt
);
2466 /* Limit the work only for builtins we know how to simplify. */
2467 switch (DECL_FUNCTION_CODE (callee
))
2469 case BUILT_IN_STRLEN
:
2470 case BUILT_IN_FPUTS
:
2471 case BUILT_IN_FPUTS_UNLOCKED
:
2475 case BUILT_IN_STRCPY
:
2476 case BUILT_IN_STRNCPY
:
2480 case BUILT_IN_MEMCPY_CHK
:
2481 case BUILT_IN_MEMPCPY_CHK
:
2482 case BUILT_IN_MEMMOVE_CHK
:
2483 case BUILT_IN_MEMSET_CHK
:
2484 case BUILT_IN_STRNCPY_CHK
:
2488 case BUILT_IN_STRCPY_CHK
:
2489 case BUILT_IN_STPCPY_CHK
:
2493 case BUILT_IN_SNPRINTF_CHK
:
2494 case BUILT_IN_VSNPRINTF_CHK
:
2502 if (arg_idx
>= nargs
)
2505 /* Try to use the dataflow information gathered by the CCP process. */
2506 visited
= BITMAP_ALLOC (NULL
);
2507 bitmap_clear (visited
);
2509 memset (val
, 0, sizeof (val
));
2510 a
= gimple_call_arg (stmt
, arg_idx
);
2511 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
2512 val
[arg_idx
] = NULL_TREE
;
2514 BITMAP_FREE (visited
);
2517 switch (DECL_FUNCTION_CODE (callee
))
2519 case BUILT_IN_STRLEN
:
2520 if (val
[0] && nargs
== 1)
2523 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
2525 /* If the result is not a valid gimple value, or not a cast
2526 of a valid gimple value, then we can not use the result. */
2527 if (is_gimple_val (new_val
)
2528 || (is_gimple_cast (new_val
)
2529 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
2534 case BUILT_IN_STRCPY
:
2535 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
2536 result
= fold_builtin_strcpy (callee
,
2537 gimple_call_arg (stmt
, 0),
2538 gimple_call_arg (stmt
, 1),
2542 case BUILT_IN_STRNCPY
:
2543 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2544 result
= fold_builtin_strncpy (callee
,
2545 gimple_call_arg (stmt
, 0),
2546 gimple_call_arg (stmt
, 1),
2547 gimple_call_arg (stmt
, 2),
2551 case BUILT_IN_FPUTS
:
2553 result
= fold_builtin_fputs (gimple_call_arg (stmt
, 0),
2554 gimple_call_arg (stmt
, 1),
2555 ignore
, false, val
[0]);
2558 case BUILT_IN_FPUTS_UNLOCKED
:
2560 result
= fold_builtin_fputs (gimple_call_arg (stmt
, 0),
2561 gimple_call_arg (stmt
, 1),
2562 ignore
, true, val
[0]);
2565 case BUILT_IN_MEMCPY_CHK
:
2566 case BUILT_IN_MEMPCPY_CHK
:
2567 case BUILT_IN_MEMMOVE_CHK
:
2568 case BUILT_IN_MEMSET_CHK
:
2569 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2570 result
= fold_builtin_memory_chk (callee
,
2571 gimple_call_arg (stmt
, 0),
2572 gimple_call_arg (stmt
, 1),
2573 gimple_call_arg (stmt
, 2),
2574 gimple_call_arg (stmt
, 3),
2576 DECL_FUNCTION_CODE (callee
));
2579 case BUILT_IN_STRCPY_CHK
:
2580 case BUILT_IN_STPCPY_CHK
:
2581 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
2582 result
= fold_builtin_stxcpy_chk (callee
,
2583 gimple_call_arg (stmt
, 0),
2584 gimple_call_arg (stmt
, 1),
2585 gimple_call_arg (stmt
, 2),
2587 DECL_FUNCTION_CODE (callee
));
2590 case BUILT_IN_STRNCPY_CHK
:
2591 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
2592 result
= fold_builtin_strncpy_chk (gimple_call_arg (stmt
, 0),
2593 gimple_call_arg (stmt
, 1),
2594 gimple_call_arg (stmt
, 2),
2595 gimple_call_arg (stmt
, 3),
2599 case BUILT_IN_SNPRINTF_CHK
:
2600 case BUILT_IN_VSNPRINTF_CHK
:
2601 if (val
[1] && is_gimple_val (val
[1]))
2602 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
2603 DECL_FUNCTION_CODE (callee
));
2610 if (result
&& ignore
)
2611 result
= fold_ignored_result (result
);
2615 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2616 replacement rhs for the statement or NULL_TREE if no simplification
2617 could be made. It is assumed that the operands have been previously
2621 fold_gimple_assign (gimple_stmt_iterator
*si
)
2623 gimple stmt
= gsi_stmt (*si
);
2624 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2626 tree result
= NULL_TREE
;
2628 switch (get_gimple_rhs_class (subcode
))
2630 case GIMPLE_SINGLE_RHS
:
2632 tree rhs
= gimple_assign_rhs1 (stmt
);
2634 /* Try to fold a conditional expression. */
2635 if (TREE_CODE (rhs
) == COND_EXPR
)
2637 tree op0
= COND_EXPR_COND (rhs
);
2641 if (COMPARISON_CLASS_P (op0
))
2643 fold_defer_overflow_warnings ();
2644 tem
= fold_binary (TREE_CODE (op0
), TREE_TYPE (op0
),
2645 TREE_OPERAND (op0
, 0),
2646 TREE_OPERAND (op0
, 1));
2647 /* This is actually a conditional expression, not a GIMPLE
2648 conditional statement, however, the valid_gimple_rhs_p
2649 test still applies. */
2650 set
= (tem
&& is_gimple_condexpr (tem
)
2651 && valid_gimple_rhs_p (tem
));
2652 fold_undefer_overflow_warnings (set
, stmt
, 0);
2654 else if (is_gimple_min_invariant (op0
))
2663 result
= fold_build3 (COND_EXPR
, TREE_TYPE (rhs
), tem
,
2664 COND_EXPR_THEN (rhs
), COND_EXPR_ELSE (rhs
));
2667 else if (TREE_CODE (rhs
) == TARGET_MEM_REF
)
2668 return maybe_fold_tmr (rhs
);
2670 else if (REFERENCE_CLASS_P (rhs
))
2671 return maybe_fold_reference (rhs
, false);
2673 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
2675 tree tem
= maybe_fold_reference (TREE_OPERAND (rhs
, 0), true);
2677 result
= fold_convert (TREE_TYPE (rhs
),
2678 build_fold_addr_expr (tem
));
2681 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2682 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2683 && (CONSTRUCTOR_NELTS (rhs
)
2684 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2686 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
2690 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2691 if (TREE_CODE (val
) != INTEGER_CST
2692 && TREE_CODE (val
) != REAL_CST
2693 && TREE_CODE (val
) != FIXED_CST
)
2696 return build_vector_from_ctor (TREE_TYPE (rhs
),
2697 CONSTRUCTOR_ELTS (rhs
));
2700 /* If we couldn't fold the RHS, hand over to the generic
2702 if (result
== NULL_TREE
)
2703 result
= fold (rhs
);
2705 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2706 that may have been added by fold, and "useless" type
2707 conversions that might now be apparent due to propagation. */
2708 STRIP_USELESS_TYPE_CONVERSION (result
);
2710 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
2717 case GIMPLE_UNARY_RHS
:
2719 tree rhs
= gimple_assign_rhs1 (stmt
);
2721 result
= fold_unary (subcode
, gimple_expr_type (stmt
), rhs
);
2724 /* If the operation was a conversion do _not_ mark a
2725 resulting constant with TREE_OVERFLOW if the original
2726 constant was not. These conversions have implementation
2727 defined behavior and retaining the TREE_OVERFLOW flag
2728 here would confuse later passes such as VRP. */
2729 if (CONVERT_EXPR_CODE_P (subcode
)
2730 && TREE_CODE (result
) == INTEGER_CST
2731 && TREE_CODE (rhs
) == INTEGER_CST
)
2732 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
2734 STRIP_USELESS_TYPE_CONVERSION (result
);
2735 if (valid_gimple_rhs_p (result
))
2738 else if (CONVERT_EXPR_CODE_P (subcode
)
2739 && POINTER_TYPE_P (gimple_expr_type (stmt
))
2740 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
2742 tree type
= gimple_expr_type (stmt
);
2743 tree t
= maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt
),
2744 integer_zero_node
, type
);
2751 case GIMPLE_BINARY_RHS
:
2752 /* Try to fold pointer addition. */
2753 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2755 tree type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2756 if (TREE_CODE (TREE_TYPE (type
)) == ARRAY_TYPE
)
2758 type
= build_pointer_type (TREE_TYPE (TREE_TYPE (type
)));
2759 if (!useless_type_conversion_p
2760 (TREE_TYPE (gimple_assign_lhs (stmt
)), type
))
2761 type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2763 result
= maybe_fold_stmt_addition (type
,
2764 gimple_assign_rhs1 (stmt
),
2765 gimple_assign_rhs2 (stmt
));
2769 result
= fold_binary (subcode
,
2770 TREE_TYPE (gimple_assign_lhs (stmt
)),
2771 gimple_assign_rhs1 (stmt
),
2772 gimple_assign_rhs2 (stmt
));
2776 STRIP_USELESS_TYPE_CONVERSION (result
);
2777 if (valid_gimple_rhs_p (result
))
2780 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2781 we lose canonicalization opportunities. Do not go again
2782 through fold here though, or the same non-GIMPLE will be
2784 if (commutative_tree_code (subcode
)
2785 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
2786 gimple_assign_rhs2 (stmt
), false))
2787 return build2 (subcode
, TREE_TYPE (gimple_assign_lhs (stmt
)),
2788 gimple_assign_rhs2 (stmt
),
2789 gimple_assign_rhs1 (stmt
));
2793 case GIMPLE_INVALID_RHS
:
2800 /* Attempt to fold a conditional statement. Return true if any changes were
2801 made. We only attempt to fold the condition expression, and do not perform
2802 any transformation that would require alteration of the cfg. It is
2803 assumed that the operands have been previously folded. */
2806 fold_gimple_cond (gimple stmt
)
2808 tree result
= fold_binary (gimple_cond_code (stmt
),
2810 gimple_cond_lhs (stmt
),
2811 gimple_cond_rhs (stmt
));
2815 STRIP_USELESS_TYPE_CONVERSION (result
);
2816 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
2818 gimple_cond_set_condition_from_tree (stmt
, result
);
2827 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2828 The statement may be replaced by another statement, e.g., if the call
2829 simplifies to a constant value. Return true if any changes were made.
2830 It is assumed that the operands have been previously folded. */
2833 fold_gimple_call (gimple_stmt_iterator
*gsi
)
2835 gimple stmt
= gsi_stmt (*gsi
);
2837 tree callee
= gimple_call_fndecl (stmt
);
2839 /* Check for builtins that CCP can handle using information not
2840 available in the generic fold routines. */
2841 if (callee
&& DECL_BUILT_IN (callee
))
2843 tree result
= ccp_fold_builtin (stmt
);
2846 return update_call_from_tree (gsi
, result
);
2850 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2851 here are when we've propagated the address of a decl into the
2853 /* ??? Should perhaps do this in fold proper. However, doing it
2854 there requires that we create a new CALL_EXPR, and that requires
2855 copying EH region info to the new node. Easier to just do it
2856 here where we can just smash the call operand. */
2857 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2858 callee
= gimple_call_fn (stmt
);
2859 if (TREE_CODE (callee
) == OBJ_TYPE_REF
2860 && lang_hooks
.fold_obj_type_ref
2861 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee
)) == ADDR_EXPR
2862 && DECL_P (TREE_OPERAND
2863 (OBJ_TYPE_REF_OBJECT (callee
), 0)))
2867 /* ??? Caution: Broken ADDR_EXPR semantics means that
2868 looking at the type of the operand of the addr_expr
2869 can yield an array type. See silly exception in
2870 check_pointer_types_r. */
2871 t
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee
)));
2872 t
= lang_hooks
.fold_obj_type_ref (callee
, t
);
2875 gimple_call_set_fn (stmt
, t
);
2884 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2885 distinguishes both cases. */
2888 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
2890 bool changed
= false;
2891 gimple stmt
= gsi_stmt (*gsi
);
2894 /* Fold the main computation performed by the statement. */
2895 switch (gimple_code (stmt
))
2899 unsigned old_num_ops
= gimple_num_ops (stmt
);
2900 tree new_rhs
= fold_gimple_assign (gsi
);
2901 if (new_rhs
!= NULL_TREE
2903 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
2905 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
2912 changed
|= fold_gimple_cond (stmt
);
2916 /* Fold *& in call arguments. */
2917 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2918 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2920 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2923 gimple_call_set_arg (stmt
, i
, tmp
);
2927 /* The entire statement may be replaced in this case. */
2929 changed
|= fold_gimple_call (gsi
);
2933 /* Fold *& in asm operands. */
2934 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
2936 tree link
= gimple_asm_output_op (stmt
, i
);
2937 tree op
= TREE_VALUE (link
);
2938 if (REFERENCE_CLASS_P (op
)
2939 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
2941 TREE_VALUE (link
) = op
;
2945 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
2947 tree link
= gimple_asm_input_op (stmt
, i
);
2948 tree op
= TREE_VALUE (link
);
2949 if (REFERENCE_CLASS_P (op
)
2950 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
2952 TREE_VALUE (link
) = op
;
2961 stmt
= gsi_stmt (*gsi
);
2963 /* Fold *& on the lhs. */
2964 if (gimple_has_lhs (stmt
))
2966 tree lhs
= gimple_get_lhs (stmt
);
2967 if (lhs
&& REFERENCE_CLASS_P (lhs
))
2969 tree new_lhs
= maybe_fold_reference (lhs
, true);
2972 gimple_set_lhs (stmt
, new_lhs
);
2981 /* Fold the statement pointed to by GSI. In some cases, this function may
2982 replace the whole statement with a new one. Returns true iff folding
2984 The statement pointed to by GSI should be in valid gimple form but may
2985 be in unfolded state as resulting from for example constant propagation
2986 which can produce *&x = 0. */
2989 fold_stmt (gimple_stmt_iterator
*gsi
)
2991 return fold_stmt_1 (gsi
, false);
2994 /* Perform the minimal folding on statement STMT. Only operations like
2995 *&x created by constant propagation are handled. The statement cannot
2996 be replaced with a new one. Return true if the statement was
2997 changed, false otherwise.
2998 The statement STMT should be in valid gimple form but may
2999 be in unfolded state as resulting from for example constant propagation
3000 which can produce *&x = 0. */
3003 fold_stmt_inplace (gimple stmt
)
3005 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3006 bool changed
= fold_stmt_1 (&gsi
, true);
3007 gcc_assert (gsi_stmt (gsi
) == stmt
);
3011 /* Try to optimize out __builtin_stack_restore. Optimize it out
3012 if there is another __builtin_stack_restore in the same basic
3013 block and no calls or ASM_EXPRs are in between, or if this block's
3014 only outgoing edge is to EXIT_BLOCK and there are no calls or
3015 ASM_EXPRs after this __builtin_stack_restore. */
3018 optimize_stack_restore (gimple_stmt_iterator i
)
3021 gimple stmt
, stack_save
;
3022 gimple_stmt_iterator stack_save_gsi
;
3024 basic_block bb
= gsi_bb (i
);
3025 gimple call
= gsi_stmt (i
);
3027 if (gimple_code (call
) != GIMPLE_CALL
3028 || gimple_call_num_args (call
) != 1
3029 || TREE_CODE (gimple_call_arg (call
, 0)) != SSA_NAME
3030 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
3033 for (gsi_next (&i
); !gsi_end_p (i
); gsi_next (&i
))
3035 stmt
= gsi_stmt (i
);
3036 if (gimple_code (stmt
) == GIMPLE_ASM
)
3038 if (gimple_code (stmt
) != GIMPLE_CALL
)
3041 callee
= gimple_call_fndecl (stmt
);
3042 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
3045 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STACK_RESTORE
)
3050 && (! single_succ_p (bb
)
3051 || single_succ_edge (bb
)->dest
!= EXIT_BLOCK_PTR
))
3054 stack_save
= SSA_NAME_DEF_STMT (gimple_call_arg (call
, 0));
3055 if (gimple_code (stack_save
) != GIMPLE_CALL
3056 || gimple_call_lhs (stack_save
) != gimple_call_arg (call
, 0)
3057 || stmt_could_throw_p (stack_save
)
3058 || !has_single_use (gimple_call_arg (call
, 0)))
3061 callee
= gimple_call_fndecl (stack_save
);
3063 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3064 || DECL_FUNCTION_CODE (callee
) != BUILT_IN_STACK_SAVE
3065 || gimple_call_num_args (stack_save
) != 0)
3068 stack_save_gsi
= gsi_for_stmt (stack_save
);
3069 rhs
= build_int_cst (TREE_TYPE (gimple_call_arg (call
, 0)), 0);
3070 if (!update_call_from_tree (&stack_save_gsi
, rhs
))
3073 /* No effect, so the statement will be deleted. */
3074 return integer_zero_node
;
3077 /* If va_list type is a simple pointer and nothing special is needed,
3078 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3079 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3080 pointer assignment. */
3083 optimize_stdarg_builtin (gimple call
)
3085 tree callee
, lhs
, rhs
, cfun_va_list
;
3086 bool va_list_simple_ptr
;
3088 if (gimple_code (call
) != GIMPLE_CALL
)
3091 callee
= gimple_call_fndecl (call
);
3093 cfun_va_list
= targetm
.fn_abi_va_list (callee
);
3094 va_list_simple_ptr
= POINTER_TYPE_P (cfun_va_list
)
3095 && (TREE_TYPE (cfun_va_list
) == void_type_node
3096 || TREE_TYPE (cfun_va_list
) == char_type_node
);
3098 switch (DECL_FUNCTION_CODE (callee
))
3100 case BUILT_IN_VA_START
:
3101 if (!va_list_simple_ptr
3102 || targetm
.expand_builtin_va_start
!= NULL
3103 || built_in_decls
[BUILT_IN_NEXT_ARG
] == NULL
)
3106 if (gimple_call_num_args (call
) != 2)
3109 lhs
= gimple_call_arg (call
, 0);
3110 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3111 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3112 != TYPE_MAIN_VARIANT (cfun_va_list
))
3115 lhs
= build_fold_indirect_ref (lhs
);
3116 rhs
= build_call_expr (built_in_decls
[BUILT_IN_NEXT_ARG
],
3117 1, integer_zero_node
);
3118 rhs
= fold_convert (TREE_TYPE (lhs
), rhs
);
3119 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3121 case BUILT_IN_VA_COPY
:
3122 if (!va_list_simple_ptr
)
3125 if (gimple_call_num_args (call
) != 2)
3128 lhs
= gimple_call_arg (call
, 0);
3129 if (!POINTER_TYPE_P (TREE_TYPE (lhs
))
3130 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs
)))
3131 != TYPE_MAIN_VARIANT (cfun_va_list
))
3134 lhs
= build_fold_indirect_ref (lhs
);
3135 rhs
= gimple_call_arg (call
, 1);
3136 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs
))
3137 != TYPE_MAIN_VARIANT (cfun_va_list
))
3140 rhs
= fold_convert (TREE_TYPE (lhs
), rhs
);
3141 return build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
3143 case BUILT_IN_VA_END
:
3144 /* No effect, so the statement will be deleted. */
3145 return integer_zero_node
;
3152 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3153 RHS of an assignment. Insert the necessary statements before
3154 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3155 is replaced. If the call is expected to produces a result, then it
3156 is replaced by an assignment of the new RHS to the result variable.
3157 If the result is to be ignored, then the call is replaced by a
3161 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
3164 tree tmp
= NULL_TREE
; /* Silence warning. */
3165 gimple stmt
, new_stmt
;
3166 gimple_stmt_iterator i
;
3167 gimple_seq stmts
= gimple_seq_alloc();
3168 struct gimplify_ctx gctx
;
3170 stmt
= gsi_stmt (*si_p
);
3172 gcc_assert (is_gimple_call (stmt
));
3174 lhs
= gimple_call_lhs (stmt
);
3176 push_gimplify_context (&gctx
);
3178 if (lhs
== NULL_TREE
)
3179 gimplify_and_add (expr
, &stmts
);
3181 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
3183 pop_gimplify_context (NULL
);
3185 if (gimple_has_location (stmt
))
3186 annotate_all_with_location (stmts
, gimple_location (stmt
));
3188 /* The replacement can expose previously unreferenced variables. */
3189 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
3191 new_stmt
= gsi_stmt (i
);
3192 find_new_referenced_vars (new_stmt
);
3193 gsi_insert_before (si_p
, new_stmt
, GSI_NEW_STMT
);
3194 mark_symbols_for_renaming (new_stmt
);
3198 if (lhs
== NULL_TREE
)
3200 new_stmt
= gimple_build_nop ();
3201 unlink_stmt_vdef (stmt
);
3202 release_defs (stmt
);
3206 new_stmt
= gimple_build_assign (lhs
, tmp
);
3207 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3208 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3209 move_ssa_defining_stmt_for_defs (new_stmt
, stmt
);
3212 gimple_set_location (new_stmt
, gimple_location (stmt
));
3213 gsi_replace (si_p
, new_stmt
, false);
3216 /* A simple pass that attempts to fold all builtin functions. This pass
3217 is run after we've propagated as many constants as we can. */
3220 execute_fold_all_builtins (void)
3222 bool cfg_changed
= false;
3224 unsigned int todoflags
= 0;
3228 gimple_stmt_iterator i
;
3229 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
3231 gimple stmt
, old_stmt
;
3232 tree callee
, result
;
3233 enum built_in_function fcode
;
3235 stmt
= gsi_stmt (i
);
3237 if (gimple_code (stmt
) != GIMPLE_CALL
)
3242 callee
= gimple_call_fndecl (stmt
);
3243 if (!callee
|| DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
)
3248 fcode
= DECL_FUNCTION_CODE (callee
);
3250 result
= ccp_fold_builtin (stmt
);
3253 gimple_remove_stmt_histograms (cfun
, stmt
);
3256 switch (DECL_FUNCTION_CODE (callee
))
3258 case BUILT_IN_CONSTANT_P
:
3259 /* Resolve __builtin_constant_p. If it hasn't been
3260 folded to integer_one_node by now, it's fairly
3261 certain that the value simply isn't constant. */
3262 result
= integer_zero_node
;
3265 case BUILT_IN_STACK_RESTORE
:
3266 result
= optimize_stack_restore (i
);
3272 case BUILT_IN_VA_START
:
3273 case BUILT_IN_VA_END
:
3274 case BUILT_IN_VA_COPY
:
3275 /* These shouldn't be folded before pass_stdarg. */
3276 result
= optimize_stdarg_builtin (stmt
);
3286 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3288 fprintf (dump_file
, "Simplified\n ");
3289 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3293 if (!update_call_from_tree (&i
, result
))
3295 gimplify_and_update_call_from_tree (&i
, result
);
3296 todoflags
|= TODO_update_address_taken
;
3299 stmt
= gsi_stmt (i
);
3302 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
)
3303 && gimple_purge_dead_eh_edges (bb
))
3306 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3308 fprintf (dump_file
, "to\n ");
3309 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
3310 fprintf (dump_file
, "\n");
3313 /* Retry the same statement if it changed into another
3314 builtin, there might be new opportunities now. */
3315 if (gimple_code (stmt
) != GIMPLE_CALL
)
3320 callee
= gimple_call_fndecl (stmt
);
3322 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
3323 || DECL_FUNCTION_CODE (callee
) == fcode
)
3328 /* Delete unreachable blocks. */
3330 todoflags
|= TODO_cleanup_cfg
;
3336 struct gimple_opt_pass pass_fold_builtins
=
3342 execute_fold_all_builtins
, /* execute */
3345 0, /* static_pass_number */
3346 TV_NONE
, /* tv_id */
3347 PROP_cfg
| PROP_ssa
, /* properties_required */
3348 0, /* properties_provided */
3349 0, /* properties_destroyed */
3350 0, /* todo_flags_start */
3353 | TODO_update_ssa
/* todo_flags_finish */