1 /* SSA operands management for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
37 /* This file contains the code required to manage the operands cache of the
38 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
39 annotation. This cache contains operands that will be of interest to
40 optimizers and other passes wishing to manipulate the IL.
42 The operand type are broken up into REAL and VIRTUAL operands. The real
43 operands are represented as pointers into the stmt's operand tree. Thus
44 any manipulation of the real operands will be reflected in the actual tree.
45 Virtual operands are represented solely in the cache, although the base
46 variable for the SSA_NAME may, or may not occur in the stmt's tree.
47 Manipulation of the virtual operands will not be reflected in the stmt tree.
49 The routines in this file are concerned with creating this operand cache
52 get_stmt_operands() in the primary entry point.
54 The operand tree is the parsed by the various get_* routines which look
55 through the stmt tree for the occurrence of operands which may be of
56 interest, and calls are made to the append_* routines whenever one is
57 found. There are 5 of these routines, each representing one of the
58 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
61 The append_* routines check for duplication, and simply keep a list of
62 unique objects for each operand type in the build_* extendable vectors.
64 Once the stmt tree is completely parsed, the finalize_ssa_operands()
65 routine is called, which proceeds to perform the finalization routine
66 on each of the 5 operand vectors which have been built up.
68 If the stmt had a previous operand cache, the finalization routines
69 attempt to match up the new operands with the old ones. If its a perfect
70 match, the old vector is simply reused. If it isn't a perfect match, then
71 a new vector is created and the new operands are placed there. For
72 virtual operands, if the previous cache had SSA_NAME version of a
73 variable, and that same variable occurs in the same operands cache, then
74 the new cache vector will also get the same SSA_NAME.
76 ie, if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
77 vector for VUSE, then the new vector will also be modified such that
78 it contains 'a_5' rather than 'a'.
83 /* Flags to describe operand properties in get_stmt_operands and helpers. */
85 /* By default, operands are loaded. */
88 /* Operand is the target of an assignment expression or a
89 call-clobbered variable */
90 #define opf_is_def (1 << 0)
92 /* Operand is the target of an assignment expression. */
93 #define opf_kill_def (1 << 1)
95 /* No virtual operands should be created in the expression. This is used
96 when traversing ADDR_EXPR nodes which have different semantics than
97 other expressions. Inside an ADDR_EXPR node, the only operands that we
98 need to consider are indices into arrays. For instance, &a.b[i] should
99 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
101 #define opf_no_vops (1 << 2)
103 /* Array for building all the def operands. */
104 static GTY (()) varray_type build_defs
;
106 /* Array for building all the use operands. */
107 static GTY (()) varray_type build_uses
;
109 /* Array for building all the v_may_def operands. */
110 static GTY (()) varray_type build_v_may_defs
;
112 /* Array for building all the vuse operands. */
113 static GTY (()) varray_type build_vuses
;
115 /* Array for building all the v_must_def operands. */
116 static GTY (()) varray_type build_v_must_defs
;
119 #ifdef ENABLE_CHECKING
120 /* Used to make sure operand construction is working on the proper stmt. */
121 tree check_build_stmt
;
124 def_operand_p NULL_DEF_OPERAND_P
= { NULL
};
125 use_operand_p NULL_USE_OPERAND_P
= { NULL
};
127 static void note_addressable (tree
, stmt_ann_t
);
128 static void get_expr_operands (tree
, tree
*, int);
129 static void get_asm_expr_operands (tree
);
130 static void get_indirect_ref_operands (tree
, tree
, int);
131 static void get_call_expr_operands (tree
, tree
);
132 static inline void append_def (tree
*);
133 static inline void append_use (tree
*);
134 static void append_v_may_def (tree
);
135 static void append_v_must_def (tree
);
136 static void add_call_clobber_ops (tree
);
137 static void add_call_read_ops (tree
);
138 static void add_stmt_operand (tree
*, tree
, int);
140 /* Return a vector of contiguous memory for NUM def operands. */
142 static inline def_optype
143 allocate_def_optype (unsigned num
)
147 size
= sizeof (struct def_optype_d
) + sizeof (tree
*) * (num
- 1);
148 def_ops
= ggc_alloc (size
);
149 def_ops
->num_defs
= num
;
154 /* Return a vector of contiguous memory for NUM use operands. */
156 static inline use_optype
157 allocate_use_optype (unsigned num
)
161 size
= sizeof (struct use_optype_d
) + sizeof (tree
*) * (num
- 1);
162 use_ops
= ggc_alloc (size
);
163 use_ops
->num_uses
= num
;
168 /* Return a vector of contiguous memory for NUM v_may_def operands. */
170 static inline v_may_def_optype
171 allocate_v_may_def_optype (unsigned num
)
173 v_may_def_optype v_may_def_ops
;
175 size
= sizeof (struct v_may_def_optype_d
)
176 + sizeof (v_may_def_operand_type_t
) * (num
- 1);
177 v_may_def_ops
= ggc_alloc (size
);
178 v_may_def_ops
->num_v_may_defs
= num
;
179 return v_may_def_ops
;
183 /* Return a vector of contiguous memory for NUM v_use operands. */
185 static inline vuse_optype
186 allocate_vuse_optype (unsigned num
)
188 vuse_optype vuse_ops
;
190 size
= sizeof (struct vuse_optype_d
) + sizeof (tree
) * (num
- 1);
191 vuse_ops
= ggc_alloc (size
);
192 vuse_ops
->num_vuses
= num
;
197 /* Return a vector of contiguous memory for NUM v_must_def operands. */
199 static inline v_must_def_optype
200 allocate_v_must_def_optype (unsigned num
)
202 v_must_def_optype v_must_def_ops
;
204 size
= sizeof (struct v_must_def_optype_d
) + sizeof (tree
) * (num
- 1);
205 v_must_def_ops
= ggc_alloc (size
);
206 v_must_def_ops
->num_v_must_defs
= num
;
207 return v_must_def_ops
;
211 /* Free memory for USES. */
214 free_uses (use_optype
*uses
)
224 /* Free memory for DEFS. */
227 free_defs (def_optype
*defs
)
237 /* Free memory for VUSES. */
240 free_vuses (vuse_optype
*vuses
)
250 /* Free memory for V_MAY_DEFS. */
253 free_v_may_defs (v_may_def_optype
*v_may_defs
)
257 ggc_free (*v_may_defs
);
263 /* Free memory for V_MUST_DEFS. */
266 free_v_must_defs (v_must_def_optype
*v_must_defs
)
270 ggc_free (*v_must_defs
);
276 /* Initialize the operand cache routines. */
279 init_ssa_operands (void)
281 VARRAY_TREE_PTR_INIT (build_defs
, 5, "build defs");
282 VARRAY_TREE_PTR_INIT (build_uses
, 10, "build uses");
283 VARRAY_TREE_INIT (build_v_may_defs
, 10, "build v_may_defs");
284 VARRAY_TREE_INIT (build_vuses
, 10, "build vuses");
285 VARRAY_TREE_INIT (build_v_must_defs
, 10, "build v_must_defs");
289 /* Dispose of anything required by the operand routines. */
292 fini_ssa_operands (void)
294 ggc_free (build_defs
);
295 ggc_free (build_uses
);
296 ggc_free (build_v_may_defs
);
297 ggc_free (build_vuses
);
298 ggc_free (build_v_must_defs
);
301 build_v_may_defs
= NULL
;
303 build_v_must_defs
= NULL
;
307 /* All the finalize_ssa_* routines do the work required to turn the build_
308 VARRAY into an operand_vector of the appropriate type. The original vector,
309 if any, is passed in for comparison and virtual SSA_NAME reuse. If the
310 old vector is reused, the pointer passed in is set to NULL so that
311 the memory is not freed when the old operands are freed. */
313 /* Return a new def operand vector for STMT, comparing to OLD_OPS_P. */
316 finalize_ssa_defs (def_optype
*old_ops_p
, tree stmt ATTRIBUTE_UNUSED
)
319 def_optype def_ops
, old_ops
;
322 num
= VARRAY_ACTIVE_SIZE (build_defs
);
326 /* There should only be a single real definition per assignment. */
327 gcc_assert (TREE_CODE (stmt
) != MODIFY_EXPR
|| num
<= 1);
329 old_ops
= *old_ops_p
;
331 /* Compare old vector and new array. */
333 if (old_ops
&& old_ops
->num_defs
== num
)
336 for (x
= 0; x
< num
; x
++)
337 if (old_ops
->defs
[x
].def
!= VARRAY_TREE_PTR (build_defs
, x
))
351 def_ops
= allocate_def_optype (num
);
352 for (x
= 0; x
< num
; x
++)
353 def_ops
->defs
[x
].def
= VARRAY_TREE_PTR (build_defs
, x
);
356 VARRAY_POP_ALL (build_defs
);
362 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
365 finalize_ssa_uses (use_optype
*old_ops_p
, tree stmt ATTRIBUTE_UNUSED
)
368 use_optype use_ops
, old_ops
;
371 num
= VARRAY_ACTIVE_SIZE (build_uses
);
375 #ifdef ENABLE_CHECKING
378 /* If the pointer to the operand is the statement itself, something is
379 wrong. It means that we are pointing to a local variable (the
380 initial call to get_stmt_operands does not pass a pointer to a
382 for (x
= 0; x
< num
; x
++)
383 gcc_assert (*(VARRAY_TREE_PTR (build_uses
, x
)) != stmt
);
386 old_ops
= *old_ops_p
;
388 /* Check if the old vector and the new array are the same. */
390 if (old_ops
&& old_ops
->num_uses
== num
)
393 for (x
= 0; x
< num
; x
++)
394 if (old_ops
->uses
[x
].use
!= VARRAY_TREE_PTR (build_uses
, x
))
408 use_ops
= allocate_use_optype (num
);
409 for (x
= 0; x
< num
; x
++)
410 use_ops
->uses
[x
].use
= VARRAY_TREE_PTR (build_uses
, x
);
412 VARRAY_POP_ALL (build_uses
);
418 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
420 static v_may_def_optype
421 finalize_ssa_v_may_defs (v_may_def_optype
*old_ops_p
)
423 unsigned num
, x
, i
, old_num
;
424 v_may_def_optype v_may_def_ops
, old_ops
;
428 num
= VARRAY_ACTIVE_SIZE (build_v_may_defs
);
432 old_ops
= *old_ops_p
;
434 /* Check if the old vector and the new array are the same. */
436 if (old_ops
&& old_ops
->num_v_may_defs
== num
)
440 for (x
= 0; x
< num
; x
++)
442 var
= old_ops
->v_may_defs
[x
].def
;
443 if (TREE_CODE (var
) == SSA_NAME
)
444 var
= SSA_NAME_VAR (var
);
445 if (var
!= VARRAY_TREE (build_v_may_defs
, x
))
453 old_num
= (old_ops
? old_ops
->num_v_may_defs
: 0);
457 v_may_def_ops
= old_ops
;
462 v_may_def_ops
= allocate_v_may_def_optype (num
);
463 for (x
= 0; x
< num
; x
++)
465 var
= VARRAY_TREE (build_v_may_defs
, x
);
466 /* Look for VAR in the old operands vector. */
467 for (i
= 0; i
< old_num
; i
++)
469 result
= old_ops
->v_may_defs
[i
].def
;
470 if (TREE_CODE (result
) == SSA_NAME
)
471 result
= SSA_NAME_VAR (result
);
474 v_may_def_ops
->v_may_defs
[x
] = old_ops
->v_may_defs
[i
];
480 v_may_def_ops
->v_may_defs
[x
].def
= var
;
481 v_may_def_ops
->v_may_defs
[x
].use
= var
;
486 /* Empty the V_MAY_DEF build vector after VUSES have been processed. */
488 return v_may_def_ops
;
492 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
495 finalize_ssa_vuses (vuse_optype
*old_ops_p
)
497 unsigned num
, x
, i
, num_v_may_defs
, old_num
;
498 vuse_optype vuse_ops
, old_ops
;
501 num
= VARRAY_ACTIVE_SIZE (build_vuses
);
504 VARRAY_POP_ALL (build_v_may_defs
);
508 /* Remove superfluous VUSE operands. If the statement already has a
509 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
510 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
511 suppose that variable 'a' is aliased:
514 # a_3 = V_MAY_DEF <a_2>
517 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
520 num_v_may_defs
= VARRAY_ACTIVE_SIZE (build_v_may_defs
);
522 if (num_v_may_defs
> 0)
526 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_vuses
); i
++)
528 vuse
= VARRAY_TREE (build_vuses
, i
);
529 for (j
= 0; j
< num_v_may_defs
; j
++)
531 if (vuse
== VARRAY_TREE (build_v_may_defs
, j
))
535 /* If we found a useless VUSE operand, remove it from the
536 operand array by replacing it with the last active element
537 in the operand array (unless the useless VUSE was the
538 last operand, in which case we simply remove it. */
539 if (j
!= num_v_may_defs
)
541 if (i
!= VARRAY_ACTIVE_SIZE (build_vuses
) - 1)
543 VARRAY_TREE (build_vuses
, i
)
544 = VARRAY_TREE (build_vuses
,
545 VARRAY_ACTIVE_SIZE (build_vuses
) - 1);
547 VARRAY_POP (build_vuses
);
549 /* We want to rescan the element at this index, unless
550 this was the last element, in which case the loop
557 num
= VARRAY_ACTIVE_SIZE (build_vuses
);
558 /* We could have reduced the size to zero now, however. */
561 VARRAY_POP_ALL (build_v_may_defs
);
565 old_ops
= *old_ops_p
;
567 /* Determine whether vuses is the same as the old vector. */
569 if (old_ops
&& old_ops
->num_vuses
== num
)
573 for (x
= 0; x
< num
; x
++)
576 v
= old_ops
->vuses
[x
];
577 if (TREE_CODE (v
) == SSA_NAME
)
578 v
= SSA_NAME_VAR (v
);
579 if (v
!= VARRAY_TREE (build_vuses
, x
))
587 old_num
= (old_ops
? old_ops
->num_vuses
: 0);
596 vuse_ops
= allocate_vuse_optype (num
);
597 for (x
= 0; x
< num
; x
++)
599 tree result
, var
= VARRAY_TREE (build_vuses
, x
);
600 /* Look for VAR in the old vector, and use that SSA_NAME. */
601 for (i
= 0; i
< old_num
; i
++)
603 result
= old_ops
->vuses
[i
];
604 if (TREE_CODE (result
) == SSA_NAME
)
605 result
= SSA_NAME_VAR (result
);
608 vuse_ops
->vuses
[x
] = old_ops
->vuses
[i
];
613 vuse_ops
->vuses
[x
] = var
;
617 /* The v_may_def build vector wasn't freed because we needed it here.
618 Free it now with the vuses build vector. */
619 VARRAY_POP_ALL (build_vuses
);
620 VARRAY_POP_ALL (build_v_may_defs
);
625 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
627 static v_must_def_optype
628 finalize_ssa_v_must_defs (v_must_def_optype
*old_ops_p
,
629 tree stmt ATTRIBUTE_UNUSED
)
631 unsigned num
, x
, i
, old_num
= 0;
632 v_must_def_optype v_must_def_ops
, old_ops
;
635 num
= VARRAY_ACTIVE_SIZE (build_v_must_defs
);
639 /* There should only be a single V_MUST_DEF per assignment. */
640 gcc_assert (TREE_CODE (stmt
) != MODIFY_EXPR
|| num
<= 1);
642 old_ops
= *old_ops_p
;
644 /* Check if the old vector and the new array are the same. */
646 if (old_ops
&& old_ops
->num_v_must_defs
== num
)
650 for (x
= 0; x
< num
; x
++)
652 tree var
= old_ops
->v_must_defs
[x
];
653 if (TREE_CODE (var
) == SSA_NAME
)
654 var
= SSA_NAME_VAR (var
);
655 if (var
!= VARRAY_TREE (build_v_must_defs
, x
))
663 old_num
= (old_ops
? old_ops
->num_v_must_defs
: 0);
667 v_must_def_ops
= old_ops
;
672 v_must_def_ops
= allocate_v_must_def_optype (num
);
673 for (x
= 0; x
< num
; x
++)
675 tree result
, var
= VARRAY_TREE (build_v_must_defs
, x
);
676 /* Look for VAR in the original vector. */
677 for (i
= 0; i
< old_num
; i
++)
679 result
= old_ops
->v_must_defs
[i
];
680 if (TREE_CODE (result
) == SSA_NAME
)
681 result
= SSA_NAME_VAR (result
);
684 v_must_def_ops
->v_must_defs
[x
] = old_ops
->v_must_defs
[i
];
689 v_must_def_ops
->v_must_defs
[x
] = var
;
692 VARRAY_POP_ALL (build_v_must_defs
);
694 return v_must_def_ops
;
698 /* Finalize all the build vectors, fill the new ones into INFO. */
701 finalize_ssa_stmt_operands (tree stmt
, stmt_operands_p old_ops
,
702 stmt_operands_p new_ops
)
704 new_ops
->def_ops
= finalize_ssa_defs (&(old_ops
->def_ops
), stmt
);
705 new_ops
->use_ops
= finalize_ssa_uses (&(old_ops
->use_ops
), stmt
);
706 new_ops
->v_must_def_ops
707 = finalize_ssa_v_must_defs (&(old_ops
->v_must_def_ops
), stmt
);
708 new_ops
->v_may_def_ops
= finalize_ssa_v_may_defs (&(old_ops
->v_may_def_ops
));
709 new_ops
->vuse_ops
= finalize_ssa_vuses (&(old_ops
->vuse_ops
));
713 /* Start the process of building up operands vectors in INFO. */
716 start_ssa_stmt_operands (void)
718 gcc_assert (VARRAY_ACTIVE_SIZE (build_defs
) == 0);
719 gcc_assert (VARRAY_ACTIVE_SIZE (build_uses
) == 0);
720 gcc_assert (VARRAY_ACTIVE_SIZE (build_vuses
) == 0);
721 gcc_assert (VARRAY_ACTIVE_SIZE (build_v_may_defs
) == 0);
722 gcc_assert (VARRAY_ACTIVE_SIZE (build_v_must_defs
) == 0);
726 /* Add DEF_P to the list of pointers to operands. */
729 append_def (tree
*def_p
)
731 VARRAY_PUSH_TREE_PTR (build_defs
, def_p
);
735 /* Add USE_P to the list of pointers to operands. */
738 append_use (tree
*use_p
)
740 VARRAY_PUSH_TREE_PTR (build_uses
, use_p
);
744 /* Add a new virtual may def for variable VAR to the build array. */
747 append_v_may_def (tree var
)
751 /* Don't allow duplicate entries. */
752 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_v_may_defs
); i
++)
753 if (var
== VARRAY_TREE (build_v_may_defs
, i
))
756 VARRAY_PUSH_TREE (build_v_may_defs
, var
);
760 /* Add VAR to the list of virtual uses. */
763 append_vuse (tree var
)
767 /* Don't allow duplicate entries. */
768 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_vuses
); i
++)
769 if (var
== VARRAY_TREE (build_vuses
, i
))
772 VARRAY_PUSH_TREE (build_vuses
, var
);
776 /* Add VAR to the list of virtual must definitions for INFO. */
779 append_v_must_def (tree var
)
783 /* Don't allow duplicate entries. */
784 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_v_must_defs
); i
++)
785 if (var
== VARRAY_TREE (build_v_must_defs
, i
))
788 VARRAY_PUSH_TREE (build_v_must_defs
, var
);
791 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
792 original operands, and if ANN is non-null, appropriate stmt flags are set
793 in the stmt's annotation. Note that some fields in old_ops may
794 change to NULL, although none of the memory they originally pointed to
795 will be destroyed. It is appropriate to call free_stmt_operands() on
796 the value returned in old_ops.
798 The rationale for this: Certain optimizations wish to examine the difference
799 between new_ops and old_ops after processing. If a set of operands don't
800 change, new_ops will simply assume the pointer in old_ops, and the old_ops
801 pointer will be set to NULL, indicating no memory needs to be cleared.
802 Usage might appear something like:
804 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
805 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
806 <* compare old_ops_copy and new_ops *>
807 free_ssa_operands (old_ops); */
810 build_ssa_operands (tree stmt
, stmt_ann_t ann
, stmt_operands_p old_ops
,
811 stmt_operands_p new_ops
)
814 tree_ann_t saved_ann
= stmt
->common
.ann
;
816 /* Replace stmt's annotation with the one passed in for the duration
817 of the operand building process. This allows "fake" stmts to be built
818 and not be included in other data structures which can be built here. */
819 stmt
->common
.ann
= (tree_ann_t
) ann
;
821 /* Initially assume that the statement has no volatile operands, nor
822 makes aliased loads or stores. */
825 ann
->has_volatile_ops
= false;
826 ann
->makes_aliased_stores
= false;
827 ann
->makes_aliased_loads
= false;
830 start_ssa_stmt_operands ();
832 code
= TREE_CODE (stmt
);
836 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 1), opf_none
);
837 if (TREE_CODE (TREE_OPERAND (stmt
, 0)) == ARRAY_REF
838 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == ARRAY_RANGE_REF
839 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == COMPONENT_REF
840 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == REALPART_EXPR
841 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == IMAGPART_EXPR
842 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
843 modified in that case. FIXME we should represent somehow
844 that it is killed on the fallthrough path. */
845 || tree_could_throw_p (TREE_OPERAND (stmt
, 1)))
846 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_is_def
);
848 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0),
849 opf_is_def
| opf_kill_def
);
853 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_none
);
857 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_none
);
861 get_asm_expr_operands (stmt
);
865 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_none
);
869 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_none
);
873 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_none
);
876 /* These nodes contain no variable references. */
878 case CASE_LABEL_EXPR
:
880 case TRY_FINALLY_EXPR
:
887 /* Notice that if get_expr_operands tries to use &STMT as the operand
888 pointer (which may only happen for USE operands), we will abort in
889 append_use. This default will handle statements like empty
890 statements, or CALL_EXPRs that may appear on the RHS of a statement
891 or as statements themselves. */
892 get_expr_operands (stmt
, &stmt
, opf_none
);
896 finalize_ssa_stmt_operands (stmt
, old_ops
, new_ops
);
897 stmt
->common
.ann
= saved_ann
;
901 /* Free any operands vectors in OPS. */
904 free_ssa_operands (stmt_operands_p ops
)
907 free_defs (&(ops
->def_ops
));
909 free_uses (&(ops
->use_ops
));
911 free_vuses (&(ops
->vuse_ops
));
912 if (ops
->v_may_def_ops
)
913 free_v_may_defs (&(ops
->v_may_def_ops
));
914 if (ops
->v_must_def_ops
)
915 free_v_must_defs (&(ops
->v_must_def_ops
));
919 /* Get the operands of statement STMT. Note that repeated calls to
920 get_stmt_operands for the same statement will do nothing until the
921 statement is marked modified by a call to modify_stmt(). */
924 get_stmt_operands (tree stmt
)
927 stmt_operands_t old_operands
;
929 /* The optimizers cannot handle statements that are nothing but a
930 _DECL. This indicates a bug in the gimplifier. */
931 gcc_assert (!SSA_VAR_P (stmt
));
933 /* Ignore error statements. */
934 if (TREE_CODE (stmt
) == ERROR_MARK
)
937 ann
= get_stmt_ann (stmt
);
939 /* If the statement has not been modified, the operands are still valid. */
943 timevar_push (TV_TREE_OPS
);
945 old_operands
= ann
->operands
;
946 memset (&(ann
->operands
), 0, sizeof (stmt_operands_t
));
948 build_ssa_operands (stmt
, ann
, &old_operands
, &(ann
->operands
));
949 free_ssa_operands (&old_operands
);
951 /* Clear the modified bit for STMT. Subsequent calls to
952 get_stmt_operands for this statement will do nothing until the
953 statement is marked modified by a call to modify_stmt(). */
956 timevar_pop (TV_TREE_OPS
);
960 /* Recursively scan the expression pointed by EXPR_P in statement referred to
961 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
965 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
971 if (expr
== NULL
|| expr
== error_mark_node
)
974 code
= TREE_CODE (expr
);
975 class = TREE_CODE_CLASS (code
);
980 /* We could have the address of a component, array member,
981 etc which has interesting variable references. */
982 /* Taking the address of a variable does not represent a
983 reference to it, but the fact that the stmt takes its address will be
984 of interest to some passes (e.g. alias resolution). */
985 add_stmt_operand (expr_p
, stmt
, 0);
987 /* If the address is invariant, there may be no interesting variable
988 references inside. */
989 if (is_gimple_min_invariant (expr
))
992 /* There should be no VUSEs created, since the referenced objects are
993 not really accessed. The only operands that we should find here
994 are ARRAY_REF indices which will always be real operands (GIMPLE
995 does not allow non-registers as array indices). */
996 flags
|= opf_no_vops
;
998 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1005 /* If we found a variable, add it to DEFS or USES depending
1006 on the operand flags. */
1007 add_stmt_operand (expr_p
, stmt
, flags
);
1011 get_indirect_ref_operands (stmt
, expr
, flags
);
1015 case ARRAY_RANGE_REF
:
1016 /* Treat array references as references to the virtual variable
1017 representing the array. The virtual variable for an ARRAY_REF
1018 is the VAR_DECL for the array. */
1020 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1021 according to the value of IS_DEF. Recurse if the LHS of the
1022 ARRAY_REF node is not a regular variable. */
1023 if (SSA_VAR_P (TREE_OPERAND (expr
, 0)))
1024 add_stmt_operand (expr_p
, stmt
, flags
);
1026 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1028 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1029 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1030 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_none
);
1036 /* Similarly to arrays, references to compound variables (complex
1037 types and structures/unions) are globbed.
1039 FIXME: This means that
1045 will not be constant propagated because the two partial
1046 definitions to 'a' will kill each other. Note that SRA may be
1047 able to fix this problem if 'a' can be scalarized. */
1049 /* If the LHS of the compound reference is not a regular variable,
1050 recurse to keep looking for more operands in the subexpression. */
1051 if (SSA_VAR_P (TREE_OPERAND (expr
, 0)))
1052 add_stmt_operand (expr_p
, stmt
, flags
);
1054 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1056 if (code
== COMPONENT_REF
)
1057 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1060 case WITH_SIZE_EXPR
:
1061 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1062 and an rvalue reference to its second argument. */
1063 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1064 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1068 get_call_expr_operands (stmt
, expr
);
1072 get_expr_operands (stmt
, &COND_EXPR_COND (expr
), opf_none
);
1073 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1074 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1082 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1084 op
= TREE_OPERAND (expr
, 0);
1085 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
1086 op
= TREE_OPERAND (expr
, 0);
1087 if (TREE_CODE (op
) == ARRAY_REF
1088 || TREE_CODE (op
) == ARRAY_RANGE_REF
1089 || TREE_CODE (op
) == COMPONENT_REF
1090 || TREE_CODE (op
) == REALPART_EXPR
1091 || TREE_CODE (op
) == IMAGPART_EXPR
)
1092 subflags
= opf_is_def
;
1094 subflags
= opf_is_def
| opf_kill_def
;
1096 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), subflags
);
1102 /* General aggregate CONSTRUCTORs have been decomposed, but they
1103 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1106 for (t
= TREE_OPERAND (expr
, 0); t
; t
= TREE_CHAIN (t
))
1107 get_expr_operands (stmt
, &TREE_VALUE (t
), opf_none
);
1112 case TRUTH_NOT_EXPR
:
1114 case VIEW_CONVERT_EXPR
:
1116 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1119 case TRUTH_AND_EXPR
:
1121 case TRUTH_XOR_EXPR
:
1126 tree op0
= TREE_OPERAND (expr
, 0);
1127 tree op1
= TREE_OPERAND (expr
, 1);
1129 /* If it would be profitable to swap the operands, then do so to
1130 canonicalize the statement, enabling better optimization.
1132 By placing canonicalization of such expressions here we
1133 transparently keep statements in canonical form, even
1134 when the statement is modified. */
1135 if (tree_swap_operands_p (op0
, op1
, false))
1137 /* For relationals we need to swap the operands
1138 and change the code. */
1144 TREE_SET_CODE (expr
, swap_tree_comparison (code
));
1145 TREE_OPERAND (expr
, 0) = op1
;
1146 TREE_OPERAND (expr
, 1) = op0
;
1149 /* For a commutative operator we can just swap the operands. */
1150 else if (commutative_tree_code (code
))
1152 TREE_OPERAND (expr
, 0) = op1
;
1153 TREE_OPERAND (expr
, 1) = op0
;
1157 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1158 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1167 /* Expressions that make no memory references. */
1173 if (class == '2' || class == '<')
1175 if (class == 'c' || class == 't')
1179 /* If we get here, something has gone wrong. */
1180 #ifdef ENABLE_CHECKING
1181 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
1183 fputs ("\n", stderr
);
1184 internal_error ("internal error");
1190 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1193 get_asm_expr_operands (tree stmt
)
1195 stmt_ann_t s_ann
= stmt_ann (stmt
);
1196 int noutputs
= list_length (ASM_OUTPUTS (stmt
));
1197 const char **oconstraints
1198 = (const char **) alloca ((noutputs
) * sizeof (const char *));
1201 const char *constraint
;
1202 bool allows_mem
, allows_reg
, is_inout
;
1204 for (i
=0, link
= ASM_OUTPUTS (stmt
); link
; ++i
, link
= TREE_CHAIN (link
))
1206 oconstraints
[i
] = constraint
1207 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1208 parse_output_constraint (&constraint
, i
, 0, 0,
1209 &allows_mem
, &allows_reg
, &is_inout
);
1211 /* This should have been split in gimplify_asm_expr. */
1212 gcc_assert (!allows_reg
|| !is_inout
);
1214 /* Memory operands are addressable. Note that STMT needs the
1215 address of this operand. */
1216 if (!allows_reg
&& allows_mem
)
1218 tree t
= get_base_address (TREE_VALUE (link
));
1219 if (t
&& DECL_P (t
))
1220 note_addressable (t
, s_ann
);
1223 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_is_def
);
1226 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1229 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1230 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1231 oconstraints
, &allows_mem
, &allows_reg
);
1233 /* Memory operands are addressable. Note that STMT needs the
1234 address of this operand. */
1235 if (!allows_reg
&& allows_mem
)
1237 tree t
= get_base_address (TREE_VALUE (link
));
1238 if (t
&& DECL_P (t
))
1239 note_addressable (t
, s_ann
);
1242 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1246 /* Clobber memory for asm ("" : : : "memory"); */
1247 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1248 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1252 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1253 decided to group them). */
1255 add_stmt_operand (&global_var
, stmt
, opf_is_def
);
1257 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1259 tree var
= referenced_var (i
);
1260 add_stmt_operand (&var
, stmt
, opf_is_def
);
1263 /* Now clobber all addressables. */
1264 EXECUTE_IF_SET_IN_BITMAP (addressable_vars
, 0, i
,
1266 tree var
= referenced_var (i
);
1267 add_stmt_operand (&var
, stmt
, opf_is_def
);
1274 /* A subroutine of get_expr_operands to handle INDIRECT_REF. */
1277 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
)
1279 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1281 stmt_ann_t ann
= stmt_ann (stmt
);
1283 /* Stores into INDIRECT_REF operands are never killing definitions. */
1284 flags
&= ~opf_kill_def
;
1286 if (REF_ORIGINAL (expr
))
1288 enum tree_code ocode
= TREE_CODE (REF_ORIGINAL (expr
));
1290 /* If we originally accessed part of a structure, we do it still. */
1291 if (ocode
== ARRAY_REF
1292 || ocode
== COMPONENT_REF
1293 || ocode
== REALPART_EXPR
1294 || ocode
== IMAGPART_EXPR
)
1295 flags
&= ~opf_kill_def
;
1298 if (SSA_VAR_P (ptr
))
1300 struct ptr_info_def
*pi
= NULL
;
1302 /* If PTR has flow-sensitive points-to information, use it. */
1303 if (TREE_CODE (ptr
) == SSA_NAME
1304 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1305 && pi
->name_mem_tag
)
1307 /* PTR has its own memory tag. Use it. */
1308 add_stmt_operand (&pi
->name_mem_tag
, stmt
, flags
);
1312 /* If PTR is not an SSA_NAME or it doesn't have a name
1313 tag, use its type memory tag. */
1316 /* If we are emitting debugging dumps, display a warning if
1317 PTR is an SSA_NAME with no flow-sensitive alias
1318 information. That means that we may need to compute
1321 && TREE_CODE (ptr
) == SSA_NAME
1325 "NOTE: no flow-sensitive alias info for ");
1326 print_generic_expr (dump_file
, ptr
, dump_flags
);
1327 fprintf (dump_file
, " in ");
1328 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1331 if (TREE_CODE (ptr
) == SSA_NAME
)
1332 ptr
= SSA_NAME_VAR (ptr
);
1333 ann
= var_ann (ptr
);
1334 if (ann
->type_mem_tag
)
1335 add_stmt_operand (&ann
->type_mem_tag
, stmt
, flags
);
1339 /* If a constant is used as a pointer, we can't generate a real
1340 operand for it but we mark the statement volatile to prevent
1341 optimizations from messing things up. */
1342 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1345 ann
->has_volatile_ops
= true;
1349 /* Everything else *should* have been folded elsewhere, but users
1350 are smarter than we in finding ways to write invalid code. We
1351 cannot just abort here. If we were absolutely certain that we
1352 do handle all valid cases, then we could just do nothing here.
1353 That seems optimistic, so attempt to do something logical... */
1354 else if ((TREE_CODE (ptr
) == PLUS_EXPR
|| TREE_CODE (ptr
) == MINUS_EXPR
)
1355 && TREE_CODE (TREE_OPERAND (ptr
, 0)) == ADDR_EXPR
1356 && TREE_CODE (TREE_OPERAND (ptr
, 1)) == INTEGER_CST
)
1358 /* Make sure we know the object is addressable. */
1359 pptr
= &TREE_OPERAND (ptr
, 0);
1360 add_stmt_operand (pptr
, stmt
, 0);
1362 /* Mark the object itself with a VUSE. */
1363 pptr
= &TREE_OPERAND (*pptr
, 0);
1364 get_expr_operands (stmt
, pptr
, flags
);
1368 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1372 /* Add a USE operand for the base pointer. */
1373 get_expr_operands (stmt
, pptr
, opf_none
);
1376 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1379 get_call_expr_operands (tree stmt
, tree expr
)
1382 int call_flags
= call_expr_flags (expr
);
1384 /* Find uses in the called function. */
1385 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1387 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1388 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_none
);
1390 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1392 if (bitmap_first_set_bit (call_clobbered_vars
) >= 0)
1394 /* A 'pure' or a 'const' functions never call clobber anything.
1395 A 'noreturn' function might, but since we don't return anyway
1396 there is no point in recording that. */
1397 if (TREE_SIDE_EFFECTS (expr
)
1398 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1399 add_call_clobber_ops (stmt
);
1400 else if (!(call_flags
& ECF_CONST
))
1401 add_call_read_ops (stmt
);
1406 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1407 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1408 the statement's real operands, otherwise it is added to virtual
1412 add_stmt_operand (tree
*var_p
, tree stmt
, int flags
)
1416 stmt_ann_t s_ann
= stmt_ann (stmt
);
1422 /* If the operand is an ADDR_EXPR, add its operand to the list of
1423 variables that have had their address taken in this statement. */
1424 if (TREE_CODE (var
) == ADDR_EXPR
)
1426 note_addressable (TREE_OPERAND (var
, 0), s_ann
);
1430 /* If the original variable is not a scalar, it will be added to the list
1431 of virtual operands. In that case, use its base symbol as the virtual
1432 variable representing it. */
1433 is_real_op
= is_gimple_reg (var
);
1434 if (!is_real_op
&& !DECL_P (var
))
1435 var
= get_virtual_var (var
);
1437 /* If VAR is not a variable that we care to optimize, do nothing. */
1438 if (var
== NULL_TREE
|| !SSA_VAR_P (var
))
1441 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1442 v_ann
= var_ann (sym
);
1444 /* Don't expose volatile variables to the optimizers. */
1445 if (TREE_THIS_VOLATILE (sym
))
1448 s_ann
->has_volatile_ops
= true;
1454 /* The variable is a GIMPLE register. Add it to real operands. */
1455 if (flags
& opf_is_def
)
1462 varray_type aliases
;
1464 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1465 virtual operands, unless the caller has specifically requested
1466 not to add virtual operands (used when adding operands inside an
1467 ADDR_EXPR expression). */
1468 if (flags
& opf_no_vops
)
1471 aliases
= v_ann
->may_aliases
;
1473 if (aliases
== NULL
)
1475 /* The variable is not aliased or it is an alias tag. */
1476 if (flags
& opf_is_def
)
1478 if (flags
& opf_kill_def
)
1480 /* Only regular variables may get a V_MUST_DEF
1482 gcc_assert (v_ann
->mem_tag_kind
== NOT_A_TAG
);
1483 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1484 variable definitions. */
1485 append_v_must_def (var
);
1489 /* Add a V_MAY_DEF for call-clobbered variables and
1491 append_v_may_def (var
);
1497 if (s_ann
&& v_ann
->is_alias_tag
)
1498 s_ann
->makes_aliased_loads
= 1;
1505 /* The variable is aliased. Add its aliases to the virtual
1507 gcc_assert (VARRAY_ACTIVE_SIZE (aliases
) != 0);
1509 if (flags
& opf_is_def
)
1511 /* If the variable is also an alias tag, add a virtual
1512 operand for it, otherwise we will miss representing
1513 references to the members of the variable's alias set.
1514 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1515 if (v_ann
->is_alias_tag
)
1516 append_v_may_def (var
);
1518 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1519 append_v_may_def (VARRAY_TREE (aliases
, i
));
1522 s_ann
->makes_aliased_stores
= 1;
1526 /* Similarly, append a virtual uses for VAR itself, when
1527 it is an alias tag. */
1528 if (v_ann
->is_alias_tag
)
1531 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1532 append_vuse (VARRAY_TREE (aliases
, i
));
1535 s_ann
->makes_aliased_loads
= 1;
1542 /* Record that VAR had its address taken in the statement with annotations
1546 note_addressable (tree var
, stmt_ann_t s_ann
)
1551 var
= get_base_address (var
);
1552 if (var
&& SSA_VAR_P (var
))
1554 if (s_ann
->addresses_taken
== NULL
)
1555 s_ann
->addresses_taken
= BITMAP_GGC_ALLOC ();
1556 bitmap_set_bit (s_ann
->addresses_taken
, var_ann (var
)->uid
);
1561 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1562 clobbered variables in the function. */
1565 add_call_clobber_ops (tree stmt
)
1567 /* Functions that are not const, pure or never return may clobber
1568 call-clobbered variables. */
1569 if (stmt_ann (stmt
))
1570 stmt_ann (stmt
)->makes_clobbering_call
= true;
1572 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1573 a V_MAY_DEF operand for every call clobbered variable. See
1574 compute_may_aliases for the heuristic used to decide whether
1575 to create .GLOBAL_VAR or not. */
1577 add_stmt_operand (&global_var
, stmt
, opf_is_def
);
1582 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1584 tree var
= referenced_var (i
);
1586 /* If VAR is read-only, don't add a V_MAY_DEF, just a
1588 if (!TREE_READONLY (var
))
1589 add_stmt_operand (&var
, stmt
, opf_is_def
);
1591 add_stmt_operand (&var
, stmt
, opf_none
);
1597 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1601 add_call_read_ops (tree stmt
)
1603 /* Otherwise, if the function is not pure, it may reference memory. Add
1604 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1605 for each call-clobbered variable. See add_referenced_var for the
1606 heuristic used to decide whether to create .GLOBAL_VAR. */
1608 add_stmt_operand (&global_var
, stmt
, opf_none
);
1613 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1615 tree var
= referenced_var (i
);
1616 add_stmt_operand (&var
, stmt
, opf_none
);
1621 /* Copies virtual operands from SRC to DST. */
1624 copy_virtual_operands (tree dst
, tree src
)
1627 vuse_optype vuses
= STMT_VUSE_OPS (src
);
1628 v_may_def_optype v_may_defs
= STMT_V_MAY_DEF_OPS (src
);
1629 v_must_def_optype v_must_defs
= STMT_V_MUST_DEF_OPS (src
);
1630 vuse_optype
*vuses_new
= &stmt_ann (dst
)->operands
.vuse_ops
;
1631 v_may_def_optype
*v_may_defs_new
= &stmt_ann (dst
)->operands
.v_may_def_ops
;
1632 v_must_def_optype
*v_must_defs_new
= &stmt_ann (dst
)->operands
.v_must_def_ops
;
1636 *vuses_new
= allocate_vuse_optype (NUM_VUSES (vuses
));
1637 for (i
= 0; i
< NUM_VUSES (vuses
); i
++)
1638 SET_VUSE_OP (*vuses_new
, i
, VUSE_OP (vuses
, i
));
1643 *v_may_defs_new
= allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs
));
1644 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
1646 SET_V_MAY_DEF_OP (*v_may_defs_new
, i
, V_MAY_DEF_OP (v_may_defs
, i
));
1647 SET_V_MAY_DEF_RESULT (*v_may_defs_new
, i
,
1648 V_MAY_DEF_RESULT (v_may_defs
, i
));
1654 *v_must_defs_new
= allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs
));
1655 for (i
= 0; i
< NUM_V_MUST_DEFS (v_must_defs
); i
++)
1656 SET_V_MUST_DEF_OP (*v_must_defs_new
, i
, V_MUST_DEF_OP (v_must_defs
, i
));
1661 /* Specifically for use in DOM's expression analysis. Given a store, we
1662 create an artificial stmt which looks like a load from the store, this can
1663 be used to eliminate redundant loads. OLD_OPS are the operands from the
1664 store stmt, and NEW_STMT is the new load which represents a load of the
1668 create_ssa_artficial_load_stmt (stmt_operands_p old_ops
, tree new_stmt
)
1672 stmt_operands_t tmp
;
1675 memset (&tmp
, 0, sizeof (stmt_operands_t
));
1676 ann
= get_stmt_ann (new_stmt
);
1678 /* Free operands just in case is was an existing stmt. */
1679 free_ssa_operands (&(ann
->operands
));
1681 build_ssa_operands (new_stmt
, NULL
, &tmp
, &(ann
->operands
));
1682 free_vuses (&(ann
->operands
.vuse_ops
));
1683 free_v_may_defs (&(ann
->operands
.v_may_def_ops
));
1684 free_v_must_defs (&(ann
->operands
.v_must_def_ops
));
1686 /* For each VDEF on the original statement, we want to create a
1687 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1689 for (j
= 0; j
< NUM_V_MAY_DEFS (old_ops
->v_may_def_ops
); j
++)
1691 op
= V_MAY_DEF_RESULT (old_ops
->v_may_def_ops
, j
);
1695 for (j
= 0; j
< NUM_V_MUST_DEFS (old_ops
->v_must_def_ops
); j
++)
1697 op
= V_MUST_DEF_OP (old_ops
->v_must_def_ops
, j
);
1701 /* Now set the vuses for this new stmt. */
1702 ann
->operands
.vuse_ops
= finalize_ssa_vuses (&(tmp
.vuse_ops
));
1705 #include "gt-tree-ssa-operands.h"