1 /* SSA operands management for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
36 /* This file contains the code required to mnage the operands cache of the
37 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
38 annotation. This cache contains operands that will be of interets to
39 optimizers and other passes wishing to manipulate the IL.
41 The operand type are broken up into REAL and VIRTUAL operands. The real
42 operands are represented as pointers into the stmt's operand tree. Thus
43 any manipulation of the real operands will be reflected in the actual tree.
44 Virtual operands are represented solely in the cache, although the base
45 variable for the SSA_NAME may, or may not occur in the stmt's tree.
46 Manipulation of the virtual operands will not be reflected in the stmt tree.
48 The routines in this file are concerned with creating this operand cache
51 get_stmt_operands() in the primary entry point.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If its a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 ie, if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in get_stmt_operands and helpers. */
84 /* By default, operands are loaded. */
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 2)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
100 #define opf_no_vops (1 << 1)
102 /* Array for building all the def operands. */
103 static GTY (()) varray_type build_defs
;
105 /* Array for building all the use operands. */
106 static GTY (()) varray_type build_uses
;
108 /* Array for building all the v_may_def operands. */
109 static GTY (()) varray_type build_v_may_defs
;
111 /* Array for building all the vuse operands. */
112 static GTY (()) varray_type build_vuses
;
114 /* Array for building all the v_must_def operands. */
115 static GTY (()) varray_type build_v_must_defs
;
117 #ifdef ENABLE_CHECKING
118 /* Used to make sure operand construction is working on the proper stmt. */
119 tree check_build_stmt
;
122 static void note_addressable (tree
, stmt_ann_t
);
123 static void get_expr_operands (tree
, tree
*, int);
124 static void get_asm_expr_operands (tree
);
125 static void get_indirect_ref_operands (tree
, tree
, int);
126 static void get_call_expr_operands (tree
, tree
);
127 static inline void append_def (tree
*);
128 static inline void append_use (tree
*);
129 static void append_v_may_def (tree
);
130 static void append_v_must_def (tree
);
131 static void add_call_clobber_ops (tree
);
132 static void add_call_read_ops (tree
);
133 static void add_stmt_operand (tree
*, tree
, int);
135 /* Return a vector of contiguous memory for NUM def operands. */
137 static inline def_optype
138 allocate_def_optype (unsigned num
)
142 size
= sizeof (struct def_optype_d
) + sizeof (tree
*) * (num
- 1);
143 def_ops
= ggc_alloc (size
);
144 def_ops
->num_defs
= num
;
149 /* Return a vector of contiguous memory for NUM use operands. */
151 static inline use_optype
152 allocate_use_optype (unsigned num
)
156 size
= sizeof (struct use_optype_d
) + sizeof (tree
*) * (num
- 1);
157 use_ops
= ggc_alloc (size
);
158 use_ops
->num_uses
= num
;
163 /* Return a vector of contiguous memory for NUM v_may_def operands. */
165 static inline v_may_def_optype
166 allocate_v_may_def_optype (unsigned num
)
168 v_may_def_optype v_may_def_ops
;
170 size
= sizeof (struct v_may_def_optype_d
)
171 + sizeof (v_may_def_operand_type_t
) * (num
- 1);
172 v_may_def_ops
= ggc_alloc (size
);
173 v_may_def_ops
->num_v_may_defs
= num
;
174 return v_may_def_ops
;
178 /* Return a vector of contiguous memory for NUM v_use operands. */
180 static inline vuse_optype
181 allocate_vuse_optype (unsigned num
)
183 vuse_optype vuse_ops
;
185 size
= sizeof (struct vuse_optype_d
) + sizeof (tree
) * (num
- 1);
186 vuse_ops
= ggc_alloc (size
);
187 vuse_ops
->num_vuses
= num
;
192 /* Return a vector of contiguous memory for NUM v_must_def operands. */
194 static inline v_must_def_optype
195 allocate_v_must_def_optype (unsigned num
)
197 v_must_def_optype v_must_def_ops
;
199 size
= sizeof (struct v_must_def_optype_d
) + sizeof (tree
) * (num
- 1);
200 v_must_def_ops
= ggc_alloc (size
);
201 v_must_def_ops
->num_v_must_defs
= num
;
202 return v_must_def_ops
;
206 /* Free memory for USES. */
209 free_uses (use_optype
*uses
)
219 /* Free memory for DEFS. */
222 free_defs (def_optype
*defs
)
232 /* Free memory for VUSES. */
235 free_vuses (vuse_optype
*vuses
)
245 /* Free memory for V_MAY_DEFS. */
248 free_v_may_defs (v_may_def_optype
*v_may_defs
)
252 ggc_free (*v_may_defs
);
258 /* Free memory for V_MUST_DEFS. */
261 free_v_must_defs (v_must_def_optype
*v_must_defs
)
265 ggc_free (*v_must_defs
);
271 /* Initialize the operand cache routines. */
274 init_ssa_operands (void)
276 VARRAY_TREE_PTR_INIT (build_defs
, 5, "build defs");
277 VARRAY_TREE_PTR_INIT (build_uses
, 10, "build uses");
278 VARRAY_TREE_INIT (build_v_may_defs
, 10, "build v_may_defs");
279 VARRAY_TREE_INIT (build_vuses
, 10, "build vuses");
280 VARRAY_TREE_INIT (build_v_must_defs
, 10, "build v_must_defs");
284 /* Dispose of anything required by the operand routines. */
287 fini_ssa_operands (void)
292 /* All the finalize_ssa_* routines do the work required to turn the build_
293 VARRAY into an operand_vector of the appropriate type. The original vector,
294 if any, is passed in for comparison and virtual SSA_NAME reuse. If the
295 old vector is reused, the pointer passed in is set to NULL so that
296 the memory is not freed when the old operands are freed. */
298 /* Return a new def operand vector for STMT, comparing to OLD_OPS_P. */
301 finalize_ssa_defs (def_optype
*old_ops_p
, tree stmt ATTRIBUTE_UNUSED
)
304 def_optype def_ops
, old_ops
;
307 num
= VARRAY_ACTIVE_SIZE (build_defs
);
311 #ifdef ENABLE_CHECKING
312 /* There should only be a single real definition per assignment. */
313 if (TREE_CODE (stmt
) == MODIFY_EXPR
&& num
> 1)
317 old_ops
= *old_ops_p
;
319 /* Compare old vector and new array. */
321 if (old_ops
&& old_ops
->num_defs
== num
)
324 for (x
= 0; x
< num
; x
++)
325 if (old_ops
->defs
[x
].def
!= VARRAY_TREE_PTR (build_defs
, x
))
339 def_ops
= allocate_def_optype (num
);
340 for (x
= 0; x
< num
; x
++)
341 def_ops
->defs
[x
].def
= VARRAY_TREE_PTR (build_defs
, x
);
344 VARRAY_POP_ALL (build_defs
);
350 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
353 finalize_ssa_uses (use_optype
*old_ops_p
, tree stmt ATTRIBUTE_UNUSED
)
356 use_optype use_ops
, old_ops
;
359 num
= VARRAY_ACTIVE_SIZE (build_uses
);
363 #ifdef ENABLE_CHECKING
366 /* If the pointer to the operand is the statement itself, something is
367 wrong. It means that we are pointing to a local variable (the
368 initial call to get_stmt_operands does not pass a pointer to a
370 for (x
= 0; x
< num
; x
++)
371 if (*(VARRAY_TREE_PTR (build_uses
, x
)) == stmt
)
375 old_ops
= *old_ops_p
;
377 /* Check if the old vector and the new array are the same. */
379 if (old_ops
&& old_ops
->num_uses
== num
)
382 for (x
= 0; x
< num
; x
++)
383 if (old_ops
->uses
[x
].use
!= VARRAY_TREE_PTR (build_uses
, x
))
397 use_ops
= allocate_use_optype (num
);
398 for (x
= 0; x
< num
; x
++)
399 use_ops
->uses
[x
].use
= VARRAY_TREE_PTR (build_uses
, x
);
401 VARRAY_POP_ALL (build_uses
);
407 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
409 static v_may_def_optype
410 finalize_ssa_v_may_defs (v_may_def_optype
*old_ops_p
)
412 unsigned num
, x
, i
, old_num
;
413 v_may_def_optype v_may_def_ops
, old_ops
;
417 num
= VARRAY_ACTIVE_SIZE (build_v_may_defs
);
421 old_ops
= *old_ops_p
;
423 /* Check if the old vector and the new array are the same. */
425 if (old_ops
&& old_ops
->num_v_may_defs
== num
)
429 for (x
= 0; x
< num
; x
++)
431 var
= old_ops
->v_may_defs
[x
].def
;
432 if (TREE_CODE (var
) == SSA_NAME
)
433 var
= SSA_NAME_VAR (var
);
434 if (var
!= VARRAY_TREE (build_v_may_defs
, x
))
442 old_num
= (old_ops
? old_ops
->num_v_may_defs
: 0);
446 v_may_def_ops
= old_ops
;
451 v_may_def_ops
= allocate_v_may_def_optype (num
);
452 for (x
= 0; x
< num
; x
++)
454 var
= VARRAY_TREE (build_v_may_defs
, x
);
455 /* Look for VAR in the old operands vector. */
456 for (i
= 0; i
< old_num
; i
++)
458 result
= old_ops
->v_may_defs
[i
].def
;
459 if (TREE_CODE (result
) == SSA_NAME
)
460 result
= SSA_NAME_VAR (result
);
463 v_may_def_ops
->v_may_defs
[x
] = old_ops
->v_may_defs
[i
];
469 v_may_def_ops
->v_may_defs
[x
].def
= var
;
470 v_may_def_ops
->v_may_defs
[x
].use
= var
;
475 /* Empty the V_MAY_DEF build vector after VUSES have been processed. */
477 return v_may_def_ops
;
481 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
484 finalize_ssa_vuses (vuse_optype
*old_ops_p
)
486 unsigned num
, x
, i
, num_v_may_defs
, old_num
;
487 vuse_optype vuse_ops
, old_ops
;
490 num
= VARRAY_ACTIVE_SIZE (build_vuses
);
493 VARRAY_POP_ALL (build_v_may_defs
);
497 /* Remove superfluous VUSE operands. If the statement already has a
498 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
499 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
500 suppose that variable 'a' is aliased:
503 # a_3 = V_MAY_DEF <a_2>
506 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
509 num_v_may_defs
= VARRAY_ACTIVE_SIZE (build_v_may_defs
);
511 if (num_v_may_defs
> 0)
515 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_vuses
); i
++)
517 vuse
= VARRAY_TREE (build_vuses
, i
);
518 for (j
= 0; j
< num_v_may_defs
; j
++)
520 if (vuse
== VARRAY_TREE (build_v_may_defs
, j
))
524 /* If we found a useless VUSE operand, remove it from the
525 operand array by replacing it with the last active element
526 in the operand array (unless the useless VUSE was the
527 last operand, in which case we simply remove it. */
528 if (j
!= num_v_may_defs
)
530 if (i
!= VARRAY_ACTIVE_SIZE (build_vuses
) - 1)
532 VARRAY_TREE (build_vuses
, i
)
533 = VARRAY_TREE (build_vuses
,
534 VARRAY_ACTIVE_SIZE (build_vuses
) - 1);
536 VARRAY_POP (build_vuses
);
538 /* We want to rescan the element at this index, unless
539 this was the last element, in which case the loop
546 num
= VARRAY_ACTIVE_SIZE (build_vuses
);
547 /* We could have reduced the size to zero now, however. */
550 VARRAY_POP_ALL (build_v_may_defs
);
554 old_ops
= *old_ops_p
;
556 /* Determine whether vuses is the same as the old vector. */
558 if (old_ops
&& old_ops
->num_vuses
== num
)
562 for (x
= 0; x
< num
; x
++)
565 v
= old_ops
->vuses
[x
];
566 if (TREE_CODE (v
) == SSA_NAME
)
567 v
= SSA_NAME_VAR (v
);
568 if (v
!= VARRAY_TREE (build_vuses
, x
))
576 old_num
= (old_ops
? old_ops
->num_vuses
: 0);
585 vuse_ops
= allocate_vuse_optype (num
);
586 for (x
= 0; x
< num
; x
++)
588 tree result
, var
= VARRAY_TREE (build_vuses
, x
);
589 /* Look for VAR in the old vector, and use that SSA_NAME. */
590 for (i
= 0; i
< old_num
; i
++)
592 result
= old_ops
->vuses
[i
];
593 if (TREE_CODE (result
) == SSA_NAME
)
594 result
= SSA_NAME_VAR (result
);
597 vuse_ops
->vuses
[x
] = old_ops
->vuses
[i
];
602 vuse_ops
->vuses
[x
] = var
;
606 /* The v_may_def build vector wasn't freed because we needed it here.
607 Free it now with the vuses build vector. */
608 VARRAY_POP_ALL (build_vuses
);
609 VARRAY_POP_ALL (build_v_may_defs
);
615 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
617 static v_must_def_optype
618 finalize_ssa_v_must_defs (v_must_def_optype
*old_ops_p
,
619 tree stmt ATTRIBUTE_UNUSED
)
621 unsigned num
, x
, i
, old_num
= 0;
622 v_must_def_optype v_must_def_ops
, old_ops
;
625 num
= VARRAY_ACTIVE_SIZE (build_v_must_defs
);
629 #ifdef ENABLE_CHECKING
630 /* There should only be a single V_MUST_DEF per assignment. */
631 if (TREE_CODE (stmt
) == MODIFY_EXPR
&& num
> 1)
635 old_ops
= *old_ops_p
;
637 /* Check if the old vector and the new array are the same. */
639 if (old_ops
&& old_ops
->num_v_must_defs
== num
)
643 for (x
= 0; x
< num
; x
++)
645 tree var
= old_ops
->v_must_defs
[x
];
646 if (TREE_CODE (var
) == SSA_NAME
)
647 var
= SSA_NAME_VAR (var
);
648 if (var
!= VARRAY_TREE (build_v_must_defs
, x
))
656 old_num
= (old_ops
? old_ops
->num_v_must_defs
: 0);
660 v_must_def_ops
= old_ops
;
665 v_must_def_ops
= allocate_v_must_def_optype (num
);
666 for (x
= 0; x
< num
; x
++)
668 tree result
, var
= VARRAY_TREE (build_v_must_defs
, x
);
669 /* Look for VAR in the original vector. */
670 for (i
= 0; i
< old_num
; i
++)
672 result
= old_ops
->v_must_defs
[i
];
673 if (TREE_CODE (result
) == SSA_NAME
)
674 result
= SSA_NAME_VAR (result
);
677 v_must_def_ops
->v_must_defs
[x
] = old_ops
->v_must_defs
[i
];
682 v_must_def_ops
->v_must_defs
[x
] = var
;
685 VARRAY_POP_ALL (build_v_must_defs
);
687 return v_must_def_ops
;
691 /* Finalize all the build vectors, fill the new ones into INFO. */
694 finalize_ssa_stmt_operands (tree stmt
, stmt_operands_p old_ops
,
695 stmt_operands_p new_ops
)
697 new_ops
->def_ops
= finalize_ssa_defs (&(old_ops
->def_ops
), stmt
);
698 new_ops
->use_ops
= finalize_ssa_uses (&(old_ops
->use_ops
), stmt
);
699 new_ops
->v_must_def_ops
700 = finalize_ssa_v_must_defs (&(old_ops
->v_must_def_ops
), stmt
);
701 new_ops
->v_may_def_ops
= finalize_ssa_v_may_defs (&(old_ops
->v_may_def_ops
));
702 new_ops
->vuse_ops
= finalize_ssa_vuses (&(old_ops
->vuse_ops
));
706 /* Start the process of building up operands vectors in INFO. */
709 start_ssa_stmt_operands (void)
711 #ifdef ENABLE_CHECKING
712 if (VARRAY_ACTIVE_SIZE (build_defs
) > 0
713 || VARRAY_ACTIVE_SIZE (build_uses
) > 0
714 || VARRAY_ACTIVE_SIZE (build_vuses
) > 0
715 || VARRAY_ACTIVE_SIZE (build_v_may_defs
) > 0
716 || VARRAY_ACTIVE_SIZE (build_v_must_defs
) > 0)
722 /* Add DEF_P to the list of pointers to operands. */
725 append_def (tree
*def_p
)
727 VARRAY_PUSH_TREE_PTR (build_defs
, def_p
);
731 /* Add USE_P to the list of pointers to operands. */
734 append_use (tree
*use_p
)
736 VARRAY_PUSH_TREE_PTR (build_uses
, use_p
);
740 /* Add a new virtual may def for variable VAR to the build array. */
743 append_v_may_def (tree var
)
747 /* Don't allow duplicate entries. */
748 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_v_may_defs
); i
++)
749 if (var
== VARRAY_TREE (build_v_may_defs
, i
))
752 VARRAY_PUSH_TREE (build_v_may_defs
, var
);
756 /* Add VAR to the list of virtual uses. */
759 append_vuse (tree var
)
763 /* Don't allow duplicate entries. */
764 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_vuses
); i
++)
765 if (var
== VARRAY_TREE (build_vuses
, i
))
768 VARRAY_PUSH_TREE (build_vuses
, var
);
772 /* Add VAR to the list of virtual must definitions for INFO. */
775 append_v_must_def (tree var
)
779 /* Don't allow duplicate entries. */
780 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_v_must_defs
); i
++)
781 if (var
== VARRAY_TREE (build_v_must_defs
, i
))
784 VARRAY_PUSH_TREE (build_v_must_defs
, var
);
787 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
788 original operands, and if ANN is non-null, appropriate stmt flags are set
789 in the stmt's annotation. Note that some fields in old_ops may
790 change to NULL, although none of the memory they originally pointed to
791 will be destroyed. It is appropriate to call free_stmt_operands() on
792 the value returned in old_ops.
794 The rationale for this: Certain optimizations wish to exmaine the difference
795 between new_ops and old_ops after processing. If a set of operands don't
796 change, new_ops will simply assume the pointer in old_ops, and the old_ops
797 pointer will be set to NULL, indicating no memory needs to be cleared.
798 Usage might appear something like:
800 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
801 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
802 <* compare old_ops_copy and new_ops *>
803 free_ssa_operands (old_ops); */
806 build_ssa_operands (tree stmt
, stmt_ann_t ann
, stmt_operands_p old_ops
,
807 stmt_operands_p new_ops
)
810 tree_ann_t saved_ann
= stmt
->common
.ann
;
812 /* Replace stmt's annotation with the one passed in for the duration
813 of the operand building process. This allows "fake" stmts to be built
814 and not be included in other data structures which can be built here. */
815 stmt
->common
.ann
= (tree_ann_t
) ann
;
817 /* Initially assume that the statement has no volatile operands, nor
818 makes aliased loads or stores. */
821 ann
->has_volatile_ops
= false;
822 ann
->makes_aliased_stores
= false;
823 ann
->makes_aliased_loads
= false;
826 start_ssa_stmt_operands ();
828 code
= TREE_CODE (stmt
);
832 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 1), opf_none
);
833 if (TREE_CODE (TREE_OPERAND (stmt
, 0)) == ARRAY_REF
834 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == ARRAY_RANGE_REF
835 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == COMPONENT_REF
836 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == REALPART_EXPR
837 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == IMAGPART_EXPR
838 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
839 modified in that case. FIXME we should represent somehow
840 that it is killed on the fallthrough path. */
841 || tree_could_throw_p (TREE_OPERAND (stmt
, 1)))
842 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_is_def
);
844 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0),
845 opf_is_def
| opf_kill_def
);
849 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_none
);
853 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_none
);
857 get_asm_expr_operands (stmt
);
861 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_none
);
865 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_none
);
869 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_none
);
872 /* These nodes contain no variable references. */
874 case CASE_LABEL_EXPR
:
876 case TRY_FINALLY_EXPR
:
883 /* Notice that if get_expr_operands tries to use &STMT as the operand
884 pointer (which may only happen for USE operands), we will abort in
885 append_use. This default will handle statements like empty
886 statements, or CALL_EXPRs that may appear on the RHS of a statement
887 or as statements themselves. */
888 get_expr_operands (stmt
, &stmt
, opf_none
);
892 finalize_ssa_stmt_operands (stmt
, old_ops
, new_ops
);
893 stmt
->common
.ann
= saved_ann
;
897 /* Free any operands vectors in OPS. */
900 free_ssa_operands (stmt_operands_p ops
)
903 free_defs (&(ops
->def_ops
));
905 free_uses (&(ops
->use_ops
));
907 free_vuses (&(ops
->vuse_ops
));
908 if (ops
->v_may_def_ops
)
909 free_v_may_defs (&(ops
->v_may_def_ops
));
910 if (ops
->v_must_def_ops
)
911 free_v_must_defs (&(ops
->v_must_def_ops
));
915 /* Get the operands of statement STMT. Note that repeated calls to
916 get_stmt_operands for the same statement will do nothing until the
917 statement is marked modified by a call to modify_stmt(). */
920 get_stmt_operands (tree stmt
)
923 stmt_operands_t old_operands
;
925 #if defined ENABLE_CHECKING
926 /* The optimizers cannot handle statements that are nothing but a
927 _DECL. This indicates a bug in the gimplifier. */
928 if (SSA_VAR_P (stmt
))
932 /* Ignore error statements. */
933 if (TREE_CODE (stmt
) == ERROR_MARK
)
936 ann
= get_stmt_ann (stmt
);
938 /* If the statement has not been modified, the operands are still valid. */
942 timevar_push (TV_TREE_OPS
);
944 old_operands
= ann
->operands
;
945 memset (&(ann
->operands
), 0, sizeof (stmt_operands_t
));
947 build_ssa_operands (stmt
, ann
, &old_operands
, &(ann
->operands
));
948 free_ssa_operands (&old_operands
);
950 /* Clear the modified bit for STMT. Subsequent calls to
951 get_stmt_operands for this statement will do nothing until the
952 statement is marked modified by a call to modify_stmt(). */
955 timevar_pop (TV_TREE_OPS
);
959 /* Recursively scan the expression pointed by EXPR_P in statement referred to
960 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
964 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
970 if (expr
== NULL
|| expr
== error_mark_node
)
973 code
= TREE_CODE (expr
);
974 class = TREE_CODE_CLASS (code
);
979 /* We could have the address of a component, array member,
980 etc which has interesting variable references. */
981 /* Taking the address of a variable does not represent a
982 reference to it, but the fact that the stmt takes its address will be
983 of interest to some passes (e.g. alias resolution). */
984 add_stmt_operand (expr_p
, stmt
, 0);
986 /* If the address is invariant, there may be no interesting variable
987 references inside. */
988 if (is_gimple_min_invariant (expr
))
991 /* There should be no VUSEs created, since the referenced objects are
992 not really accessed. The only operands that we should find here
993 are ARRAY_REF indices which will always be real operands (GIMPLE
994 does not allow non-registers as array indices). */
995 flags
|= opf_no_vops
;
997 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1004 /* If we found a variable, add it to DEFS or USES depending
1005 on the operand flags. */
1006 add_stmt_operand (expr_p
, stmt
, flags
);
1010 get_indirect_ref_operands (stmt
, expr
, flags
);
1014 case ARRAY_RANGE_REF
:
1015 /* Treat array references as references to the virtual variable
1016 representing the array. The virtual variable for an ARRAY_REF
1017 is the VAR_DECL for the array. */
1019 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1020 according to the value of IS_DEF. Recurse if the LHS of the
1021 ARRAY_REF node is not a regular variable. */
1022 if (SSA_VAR_P (TREE_OPERAND (expr
, 0)))
1023 add_stmt_operand (expr_p
, stmt
, flags
);
1025 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1027 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1028 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1029 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_none
);
1035 /* Similarly to arrays, references to compound variables (complex
1036 types and structures/unions) are globbed.
1038 FIXME: This means that
1044 will not be constant propagated because the two partial
1045 definitions to 'a' will kill each other. Note that SRA may be
1046 able to fix this problem if 'a' can be scalarized. */
1048 /* If the LHS of the compound reference is not a regular variable,
1049 recurse to keep looking for more operands in the subexpression. */
1050 if (SSA_VAR_P (TREE_OPERAND (expr
, 0)))
1051 add_stmt_operand (expr_p
, stmt
, flags
);
1053 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1055 if (code
== COMPONENT_REF
)
1056 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1059 case WITH_SIZE_EXPR
:
1060 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1061 and an rvalue reference to its second argument. */
1062 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1063 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1067 get_call_expr_operands (stmt
, expr
);
1075 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1077 op
= TREE_OPERAND (expr
, 0);
1078 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
1079 op
= TREE_OPERAND (expr
, 0);
1080 if (TREE_CODE (op
) == ARRAY_REF
1081 || TREE_CODE (op
) == ARRAY_RANGE_REF
1082 || TREE_CODE (op
) == COMPONENT_REF
1083 || TREE_CODE (op
) == REALPART_EXPR
1084 || TREE_CODE (op
) == IMAGPART_EXPR
)
1085 subflags
= opf_is_def
;
1087 subflags
= opf_is_def
| opf_kill_def
;
1089 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), subflags
);
1095 /* General aggregate CONSTRUCTORs have been decomposed, but they
1096 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1099 for (t
= TREE_OPERAND (expr
, 0); t
; t
= TREE_CHAIN (t
))
1100 get_expr_operands (stmt
, &TREE_VALUE (t
), opf_none
);
1105 case TRUTH_NOT_EXPR
:
1107 case VIEW_CONVERT_EXPR
:
1109 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1112 case TRUTH_AND_EXPR
:
1114 case TRUTH_XOR_EXPR
:
1119 tree op0
= TREE_OPERAND (expr
, 0);
1120 tree op1
= TREE_OPERAND (expr
, 1);
1122 /* If it would be profitable to swap the operands, then do so to
1123 canonicalize the statement, enabling better optimization.
1125 By placing canonicalization of such expressions here we
1126 transparently keep statements in canonical form, even
1127 when the statement is modified. */
1128 if (tree_swap_operands_p (op0
, op1
, false))
1130 /* For relationals we need to swap the operands
1131 and change the code. */
1137 TREE_SET_CODE (expr
, swap_tree_comparison (code
));
1138 TREE_OPERAND (expr
, 0) = op1
;
1139 TREE_OPERAND (expr
, 1) = op0
;
1142 /* For a commutative operator we can just swap the operands. */
1143 else if (commutative_tree_code (code
))
1145 TREE_OPERAND (expr
, 0) = op1
;
1146 TREE_OPERAND (expr
, 1) = op0
;
1150 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1151 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1160 /* Expressions that make no memory references. */
1166 if (class == '2' || class == '<')
1168 if (class == 'c' || class == 't')
1172 /* If we get here, something has gone wrong. */
1173 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
1175 fputs ("\n", stderr
);
1180 /* Scan operands in the ASM_EXPR stmt refered to in INFO. */
1183 get_asm_expr_operands (tree stmt
)
1185 stmt_ann_t s_ann
= stmt_ann (stmt
);
1186 int noutputs
= list_length (ASM_OUTPUTS (stmt
));
1187 const char **oconstraints
1188 = (const char **) alloca ((noutputs
) * sizeof (const char *));
1191 const char *constraint
;
1192 bool allows_mem
, allows_reg
, is_inout
;
1194 for (i
=0, link
= ASM_OUTPUTS (stmt
); link
; ++i
, link
= TREE_CHAIN (link
))
1196 oconstraints
[i
] = constraint
1197 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1198 parse_output_constraint (&constraint
, i
, 0, 0,
1199 &allows_mem
, &allows_reg
, &is_inout
);
1201 #if defined ENABLE_CHECKING
1202 /* This should have been split in gimplify_asm_expr. */
1203 if (allows_reg
&& is_inout
)
1207 /* Memory operands are addressable. Note that STMT needs the
1208 address of this operand. */
1209 if (!allows_reg
&& allows_mem
)
1211 tree t
= get_base_address (TREE_VALUE (link
));
1212 if (t
&& DECL_P (t
))
1213 note_addressable (t
, s_ann
);
1216 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_is_def
);
1219 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1222 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1223 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1224 oconstraints
, &allows_mem
, &allows_reg
);
1226 /* Memory operands are addressable. Note that STMT needs the
1227 address of this operand. */
1228 if (!allows_reg
&& allows_mem
)
1230 tree t
= get_base_address (TREE_VALUE (link
));
1231 if (t
&& DECL_P (t
))
1232 note_addressable (t
, s_ann
);
1235 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1239 /* Clobber memory for asm ("" : : : "memory"); */
1240 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1241 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1245 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1246 decided to group them). */
1248 add_stmt_operand (&global_var
, stmt
, opf_is_def
);
1250 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1252 tree var
= referenced_var (i
);
1253 add_stmt_operand (&var
, stmt
, opf_is_def
);
1256 /* Now clobber all addressables. */
1257 EXECUTE_IF_SET_IN_BITMAP (addressable_vars
, 0, i
,
1259 tree var
= referenced_var (i
);
1260 add_stmt_operand (&var
, stmt
, opf_is_def
);
1267 /* A subroutine of get_expr_operands to handle INDIRECT_REF. */
1270 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
)
1272 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1274 stmt_ann_t ann
= stmt_ann (stmt
);
1277 if (SSA_VAR_P (ptr
))
1279 struct ptr_info_def
*pi
= NULL
;
1281 /* If PTR has flow-sensitive points-to information, use it. */
1282 if (TREE_CODE (ptr
) == SSA_NAME
1283 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1284 && pi
->name_mem_tag
)
1286 /* PTR has its own memory tag. Use it. */
1287 add_stmt_operand (&pi
->name_mem_tag
, stmt
, flags
);
1291 /* If PTR is not an SSA_NAME or it doesn't have a name
1292 tag, use its type memory tag. */
1295 /* If we are emitting debugging dumps, display a warning if
1296 PTR is an SSA_NAME with no flow-sensitive alias
1297 information. That means that we may need to compute
1300 && TREE_CODE (ptr
) == SSA_NAME
1304 "NOTE: no flow-sensitive alias info for ");
1305 print_generic_expr (dump_file
, ptr
, dump_flags
);
1306 fprintf (dump_file
, " in ");
1307 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1310 if (TREE_CODE (ptr
) == SSA_NAME
)
1311 ptr
= SSA_NAME_VAR (ptr
);
1312 ann
= var_ann (ptr
);
1313 if (ann
->type_mem_tag
)
1314 add_stmt_operand (&ann
->type_mem_tag
, stmt
, flags
);
1318 /* If a constant is used as a pointer, we can't generate a real
1319 operand for it but we mark the statement volatile to prevent
1320 optimizations from messing things up. */
1321 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1324 ann
->has_volatile_ops
= true;
1328 /* Everything else *should* have been folded elsewhere, but users
1329 are smarter than we in finding ways to write invalid code. We
1330 cannot just abort here. If we were absolutely certain that we
1331 do handle all valid cases, then we could just do nothing here.
1332 That seems optimistic, so attempt to do something logical... */
1333 else if ((TREE_CODE (ptr
) == PLUS_EXPR
|| TREE_CODE (ptr
) == MINUS_EXPR
)
1334 && TREE_CODE (TREE_OPERAND (ptr
, 0)) == ADDR_EXPR
1335 && TREE_CODE (TREE_OPERAND (ptr
, 1)) == INTEGER_CST
)
1337 /* Make sure we know the object is addressable. */
1338 pptr
= &TREE_OPERAND (ptr
, 0);
1339 add_stmt_operand (pptr
, stmt
, 0);
1341 /* Mark the object itself with a VUSE. */
1342 pptr
= &TREE_OPERAND (*pptr
, 0);
1343 get_expr_operands (stmt
, pptr
, flags
);
1347 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1351 /* Add a USE operand for the base pointer. */
1352 get_expr_operands (stmt
, pptr
, opf_none
);
1355 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1358 get_call_expr_operands (tree stmt
, tree expr
)
1361 int call_flags
= call_expr_flags (expr
);
1363 /* Find uses in the called function. */
1364 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1366 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1367 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_none
);
1369 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1371 if (bitmap_first_set_bit (call_clobbered_vars
) >= 0)
1373 /* A 'pure' or a 'const' functions never call clobber anything.
1374 A 'noreturn' function might, but since we don't return anyway
1375 there is no point in recording that. */
1376 if (TREE_SIDE_EFFECTS (expr
)
1377 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1378 add_call_clobber_ops (stmt
);
1379 else if (!(call_flags
& (ECF_CONST
| ECF_NORETURN
)))
1380 add_call_read_ops (stmt
);
1385 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1386 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1387 the statement's real operands, otherwise it is added to virtual
1391 add_stmt_operand (tree
*var_p
, tree stmt
, int flags
)
1395 stmt_ann_t s_ann
= stmt_ann (stmt
);
1401 /* If the operand is an ADDR_EXPR, add its operand to the list of
1402 variables that have had their address taken in this statement. */
1403 if (TREE_CODE (var
) == ADDR_EXPR
)
1405 note_addressable (TREE_OPERAND (var
, 0), s_ann
);
1409 /* If the original variable is not a scalar, it will be added to the list
1410 of virtual operands. In that case, use its base symbol as the virtual
1411 variable representing it. */
1412 is_real_op
= is_gimple_reg (var
);
1413 if (!is_real_op
&& !DECL_P (var
))
1414 var
= get_virtual_var (var
);
1416 /* If VAR is not a variable that we care to optimize, do nothing. */
1417 if (var
== NULL_TREE
|| !SSA_VAR_P (var
))
1420 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1421 v_ann
= var_ann (sym
);
1423 /* Don't expose volatile variables to the optimizers. */
1424 if (TREE_THIS_VOLATILE (sym
))
1427 s_ann
->has_volatile_ops
= true;
1433 /* The variable is a GIMPLE register. Add it to real operands. */
1434 if (flags
& opf_is_def
)
1441 varray_type aliases
;
1443 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1444 virtual operands, unless the caller has specifically requested
1445 not to add virtual operands (used when adding operands inside an
1446 ADDR_EXPR expression). */
1447 if (flags
& opf_no_vops
)
1450 aliases
= v_ann
->may_aliases
;
1452 if (aliases
== NULL
)
1454 /* The variable is not aliased or it is an alias tag. */
1455 if (flags
& opf_is_def
)
1457 if (v_ann
->is_alias_tag
)
1459 /* Alias tagged vars get regular V_MAY_DEF */
1461 s_ann
->makes_aliased_stores
= 1;
1462 append_v_may_def (var
);
1464 else if ((flags
& opf_kill_def
)
1465 && v_ann
->mem_tag_kind
== NOT_A_TAG
)
1466 /* V_MUST_DEF for non-aliased non-GIMPLE register
1467 variable definitions. Avoid memory tags. */
1468 append_v_must_def (var
);
1470 /* Call-clobbered variables & memory tags get
1472 append_v_may_def (var
);
1477 if (s_ann
&& v_ann
->is_alias_tag
)
1478 s_ann
->makes_aliased_loads
= 1;
1485 /* The variable is aliased. Add its aliases to the virtual
1487 #if defined ENABLE_CHECKING
1488 if (VARRAY_ACTIVE_SIZE (aliases
) == 0)
1492 if (flags
& opf_is_def
)
1494 /* If the variable is also an alias tag, add a virtual
1495 operand for it, otherwise we will miss representing
1496 references to the members of the variable's alias set.
1497 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1498 if (v_ann
->is_alias_tag
)
1499 append_v_may_def (var
);
1501 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1502 append_v_may_def (VARRAY_TREE (aliases
, i
));
1505 s_ann
->makes_aliased_stores
= 1;
1509 if (v_ann
->is_alias_tag
)
1512 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1513 append_vuse (VARRAY_TREE (aliases
, i
));
1516 s_ann
->makes_aliased_loads
= 1;
1523 /* Record that VAR had its address taken in the statement with annotations
1527 note_addressable (tree var
, stmt_ann_t s_ann
)
1532 var
= get_base_address (var
);
1533 if (var
&& SSA_VAR_P (var
))
1535 if (s_ann
->addresses_taken
== NULL
)
1536 s_ann
->addresses_taken
= BITMAP_GGC_ALLOC ();
1537 bitmap_set_bit (s_ann
->addresses_taken
, var_ann (var
)->uid
);
1542 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1543 clobbered variables in the function. */
1546 add_call_clobber_ops (tree stmt
)
1548 /* Functions that are not const, pure or never return may clobber
1549 call-clobbered variables. */
1550 if (stmt_ann (stmt
))
1551 stmt_ann (stmt
)->makes_clobbering_call
= true;
1553 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1554 a V_MAY_DEF operand for every call clobbered variable. See
1555 compute_may_aliases for the heuristic used to decide whether
1556 to create .GLOBAL_VAR or not. */
1558 add_stmt_operand (&global_var
, stmt
, opf_is_def
);
1563 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1565 tree var
= referenced_var (i
);
1567 /* If VAR is read-only, don't add a V_MAY_DEF, just a
1569 if (!TREE_READONLY (var
))
1570 add_stmt_operand (&var
, stmt
, opf_is_def
);
1572 add_stmt_operand (&var
, stmt
, opf_none
);
1578 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1582 add_call_read_ops (tree stmt
)
1584 /* Otherwise, if the function is not pure, it may reference memory. Add
1585 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1586 for each call-clobbered variable. See add_referenced_var for the
1587 heuristic used to decide whether to create .GLOBAL_VAR. */
1589 add_stmt_operand (&global_var
, stmt
, opf_none
);
1594 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1596 tree var
= referenced_var (i
);
1597 add_stmt_operand (&var
, stmt
, opf_none
);
1602 /* Copies virtual operands from SRC to DST. */
1605 copy_virtual_operands (tree dst
, tree src
)
1608 vuse_optype vuses
= STMT_VUSE_OPS (src
);
1609 v_may_def_optype v_may_defs
= STMT_V_MAY_DEF_OPS (src
);
1610 v_must_def_optype v_must_defs
= STMT_V_MUST_DEF_OPS (src
);
1611 vuse_optype
*vuses_new
= &stmt_ann (dst
)->operands
.vuse_ops
;
1612 v_may_def_optype
*v_may_defs_new
= &stmt_ann (dst
)->operands
.v_may_def_ops
;
1613 v_must_def_optype
*v_must_defs_new
= &stmt_ann (dst
)->operands
.v_must_def_ops
;
1617 *vuses_new
= allocate_vuse_optype (NUM_VUSES (vuses
));
1618 for (i
= 0; i
< NUM_VUSES (vuses
); i
++)
1619 SET_VUSE_OP (*vuses_new
, i
, VUSE_OP (vuses
, i
));
1624 *v_may_defs_new
= allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs
));
1625 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
1627 SET_V_MAY_DEF_OP (*v_may_defs_new
, i
, V_MAY_DEF_OP (v_may_defs
, i
));
1628 SET_V_MAY_DEF_RESULT (*v_may_defs_new
, i
,
1629 V_MAY_DEF_RESULT (v_may_defs
, i
));
1635 *v_must_defs_new
= allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs
));
1636 for (i
= 0; i
< NUM_V_MUST_DEFS (v_must_defs
); i
++)
1637 SET_V_MUST_DEF_OP (*v_must_defs_new
, i
, V_MUST_DEF_OP (v_must_defs
, i
));
1642 /* Specifically for use in DOM's expression analysis. Given a store, we
1643 create an artifical stmt which looks like a load from the store, this can
1644 be used to eliminate redundant loads. OLD_OPS are the operands from the
1645 store stmt, and NEW_STMT is the new load which reperesent a load of the
1649 create_ssa_artficial_load_stmt (stmt_operands_p old_ops
, tree new_stmt
)
1653 stmt_operands_t tmp
;
1656 memset (&tmp
, 0, sizeof (stmt_operands_t
));
1657 ann
= get_stmt_ann (new_stmt
);
1659 /* Free operands just in case is was an existing stmt. */
1660 free_ssa_operands (&(ann
->operands
));
1662 build_ssa_operands (new_stmt
, NULL
, &tmp
, &(ann
->operands
));
1663 free_vuses (&(ann
->operands
.vuse_ops
));
1664 free_v_may_defs (&(ann
->operands
.v_may_def_ops
));
1665 free_v_must_defs (&(ann
->operands
.v_must_def_ops
));
1667 /* For each VDEF on the original statement, we want to create a
1668 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1670 for (j
= 0; j
< NUM_V_MAY_DEFS (old_ops
->v_may_def_ops
); j
++)
1672 op
= V_MAY_DEF_RESULT (old_ops
->v_may_def_ops
, j
);
1676 for (j
= 0; j
< NUM_V_MUST_DEFS (old_ops
->v_must_def_ops
); j
++)
1678 op
= V_MUST_DEF_OP (old_ops
->v_must_def_ops
, j
);
1682 /* Now set the vuses for this new stmt. */
1683 ann
->operands
.vuse_ops
= finalize_ssa_vuses (&(tmp
.vuse_ops
));
1686 #include "gt-tree-ssa-operands.h"