1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
100 #define opf_no_vops (1 << 2)
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
106 /* This structure maintain a sorted list of operands which is created by
107 parse_ssa_operand. */
108 struct opbuild_list_d
GTY (())
110 varray_type vars
; /* The VAR_DECLS tree. */
111 varray_type uid
; /* The sort value for virtual symbols. */
112 varray_type next
; /* The next index in the sorted list. */
113 int first
; /* First element in list. */
114 unsigned num
; /* Number of elements. */
117 #define OPBUILD_LAST -1
120 /* Array for building all the def operands. */
121 static GTY (()) struct opbuild_list_d build_defs
;
123 /* Array for building all the use operands. */
124 static GTY (()) struct opbuild_list_d build_uses
;
126 /* Array for building all the v_may_def operands. */
127 static GTY (()) struct opbuild_list_d build_v_may_defs
;
129 /* Array for building all the vuse operands. */
130 static GTY (()) struct opbuild_list_d build_vuses
;
132 /* Array for building all the v_must_def operands. */
133 static GTY (()) struct opbuild_list_d build_v_must_defs
;
135 /* True if the operands for call clobbered vars are cached and valid. */
136 bool ssa_call_clobbered_cache_valid
;
137 bool ssa_ro_call_cache_valid
;
139 /* These arrays are the cached operand vectors for call clobbered calls. */
140 static VEC(tree
,heap
) *clobbered_v_may_defs
;
141 static VEC(tree
,heap
) *clobbered_vuses
;
142 static VEC(tree
,heap
) *ro_call_vuses
;
143 static bool clobbered_aliased_loads
;
144 static bool clobbered_aliased_stores
;
145 static bool ro_call_aliased_loads
;
146 static bool ops_active
= false;
148 static GTY (()) struct ssa_operand_memory_d
*operand_memory
= NULL
;
149 static unsigned operand_memory_index
;
151 static void get_expr_operands (tree
, tree
*, int);
152 static void get_asm_expr_operands (tree
);
153 static void get_indirect_ref_operands (tree
, tree
, int);
154 static void get_tmr_operands (tree
, tree
, int);
155 static void get_call_expr_operands (tree
, tree
);
156 static inline void append_def (tree
*);
157 static inline void append_use (tree
*);
158 static void append_v_may_def (tree
);
159 static void append_v_must_def (tree
);
160 static void add_call_clobber_ops (tree
, tree
);
161 static void add_call_read_ops (tree
);
162 static void add_stmt_operand (tree
*, stmt_ann_t
, int);
163 static void build_ssa_operands (tree stmt
);
165 static def_optype_p free_defs
= NULL
;
166 static use_optype_p free_uses
= NULL
;
167 static vuse_optype_p free_vuses
= NULL
;
168 static maydef_optype_p free_maydefs
= NULL
;
169 static mustdef_optype_p free_mustdefs
= NULL
;
171 /* Initialize a virtual operand build LIST called NAME with NUM elements. */
174 opbuild_initialize_virtual (struct opbuild_list_d
*list
, int num
,
177 list
->first
= OPBUILD_LAST
;
179 VARRAY_TREE_INIT (list
->vars
, num
, name
);
180 VARRAY_UINT_INIT (list
->uid
, num
, "List UID");
181 VARRAY_INT_INIT (list
->next
, num
, "List NEXT");
185 /* Initialize a real operand build LIST called NAME with NUM elements. */
188 opbuild_initialize_real (struct opbuild_list_d
*list
, int num
, const char *name
)
190 list
->first
= OPBUILD_LAST
;
192 VARRAY_TREE_PTR_INIT (list
->vars
, num
, name
);
193 VARRAY_INT_INIT (list
->next
, num
, "List NEXT");
194 /* The UID field is not needed since we sort based on the pointer value. */
199 /* Free memory used in virtual operand build object LIST. */
202 opbuild_free (struct opbuild_list_d
*list
)
210 /* Number of elements in an opbuild list. */
212 static inline unsigned
213 opbuild_num_elems (struct opbuild_list_d
*list
)
219 /* Add VAR to the real operand list LIST, keeping it sorted and avoiding
220 duplicates. The actual sort value is the tree pointer value. */
223 opbuild_append_real (struct opbuild_list_d
*list
, tree
*var
)
227 #ifdef ENABLE_CHECKING
228 /* Ensure the real operand doesn't exist already. */
229 for (index
= list
->first
;
230 index
!= OPBUILD_LAST
;
231 index
= VARRAY_INT (list
->next
, index
))
232 gcc_assert (VARRAY_TREE_PTR (list
->vars
, index
) != var
);
235 /* First item in the list. */
236 index
= VARRAY_ACTIVE_SIZE (list
->vars
);
240 VARRAY_INT (list
->next
, index
- 1) = index
;
241 VARRAY_PUSH_INT (list
->next
, OPBUILD_LAST
);
242 VARRAY_PUSH_TREE_PTR (list
->vars
, var
);
247 /* Add VAR to the virtual operand list LIST, keeping it sorted and avoiding
248 duplicates. The actual sort value is the DECL UID of the base variable. */
251 opbuild_append_virtual (struct opbuild_list_d
*list
, tree var
)
253 int index
, curr
, last
;
254 unsigned int var_uid
;
256 if (TREE_CODE (var
) != SSA_NAME
)
257 var_uid
= DECL_UID (var
);
259 var_uid
= DECL_UID (SSA_NAME_VAR (var
));
261 index
= VARRAY_ACTIVE_SIZE (list
->vars
);
265 VARRAY_PUSH_TREE (list
->vars
, var
);
266 VARRAY_PUSH_UINT (list
->uid
, var_uid
);
267 VARRAY_PUSH_INT (list
->next
, OPBUILD_LAST
);
274 /* Find the correct spot in the sorted list. */
275 for (curr
= list
->first
;
276 curr
!= OPBUILD_LAST
;
277 last
= curr
, curr
= VARRAY_INT (list
->next
, curr
))
279 if (VARRAY_UINT (list
->uid
, curr
) > var_uid
)
283 if (last
== OPBUILD_LAST
)
285 /* First item in the list. */
286 VARRAY_PUSH_INT (list
->next
, list
->first
);
291 /* Don't enter duplicates at all. */
292 if (VARRAY_UINT (list
->uid
, last
) == var_uid
)
295 VARRAY_PUSH_INT (list
->next
, VARRAY_INT (list
->next
, last
));
296 VARRAY_INT (list
->next
, last
) = index
;
298 VARRAY_PUSH_TREE (list
->vars
, var
);
299 VARRAY_PUSH_UINT (list
->uid
, var_uid
);
304 /* Return the first element index in LIST. OPBUILD_LAST means there are no
308 opbuild_first (struct opbuild_list_d
*list
)
317 /* Return the next element after PREV in LIST. */
320 opbuild_next (struct opbuild_list_d
*list
, int prev
)
322 return VARRAY_INT (list
->next
, prev
);
326 /* Return the real element at index ELEM in LIST. */
329 opbuild_elem_real (struct opbuild_list_d
*list
, int elem
)
331 return VARRAY_TREE_PTR (list
->vars
, elem
);
335 /* Return the virtual element at index ELEM in LIST. */
338 opbuild_elem_virtual (struct opbuild_list_d
*list
, int elem
)
340 return VARRAY_TREE (list
->vars
, elem
);
344 /* Return the virtual element uid at index ELEM in LIST. */
345 static inline unsigned int
346 opbuild_elem_uid (struct opbuild_list_d
*list
, int elem
)
348 return VARRAY_UINT (list
->uid
, elem
);
352 /* Reset an operand build list. */
355 opbuild_clear (struct opbuild_list_d
*list
)
357 list
->first
= OPBUILD_LAST
;
358 VARRAY_POP_ALL (list
->vars
);
359 VARRAY_POP_ALL (list
->next
);
361 VARRAY_POP_ALL (list
->uid
);
366 /* Remove ELEM from LIST where PREV is the previous element. Return the next
370 opbuild_remove_elem (struct opbuild_list_d
*list
, int elem
, int prev
)
373 if (prev
!= OPBUILD_LAST
)
375 gcc_assert (VARRAY_INT (list
->next
, prev
) == elem
);
376 ret
= VARRAY_INT (list
->next
, prev
) = VARRAY_INT (list
->next
, elem
);
380 gcc_assert (list
->first
== elem
);
381 ret
= list
->first
= VARRAY_INT (list
->next
, elem
);
388 /* Return true if the ssa operands cache is active. */
391 ssa_operands_active (void)
397 /* Initialize the operand cache routines. */
400 init_ssa_operands (void)
402 opbuild_initialize_real (&build_defs
, 5, "build defs");
403 opbuild_initialize_real (&build_uses
, 10, "build uses");
404 opbuild_initialize_virtual (&build_vuses
, 25, "build_vuses");
405 opbuild_initialize_virtual (&build_v_may_defs
, 25, "build_v_may_defs");
406 opbuild_initialize_virtual (&build_v_must_defs
, 25, "build_v_must_defs");
407 gcc_assert (operand_memory
== NULL
);
408 operand_memory_index
= SSA_OPERAND_MEMORY_SIZE
;
413 /* Dispose of anything required by the operand routines. */
416 fini_ssa_operands (void)
418 struct ssa_operand_memory_d
*ptr
;
419 opbuild_free (&build_defs
);
420 opbuild_free (&build_uses
);
421 opbuild_free (&build_v_must_defs
);
422 opbuild_free (&build_v_may_defs
);
423 opbuild_free (&build_vuses
);
428 free_mustdefs
= NULL
;
429 while ((ptr
= operand_memory
) != NULL
)
431 operand_memory
= operand_memory
->next
;
435 VEC_free (tree
, heap
, clobbered_v_may_defs
);
436 VEC_free (tree
, heap
, clobbered_vuses
);
437 VEC_free (tree
, heap
, ro_call_vuses
);
442 /* Return memory for operands of SIZE chunks. */
445 ssa_operand_alloc (unsigned size
)
448 if (operand_memory_index
+ size
>= SSA_OPERAND_MEMORY_SIZE
)
450 struct ssa_operand_memory_d
*ptr
;
451 ptr
= ggc_alloc (sizeof (struct ssa_operand_memory_d
));
452 ptr
->next
= operand_memory
;
453 operand_memory
= ptr
;
454 operand_memory_index
= 0;
456 ptr
= &(operand_memory
->mem
[operand_memory_index
]);
457 operand_memory_index
+= size
;
462 /* Make sure PTR is in the correct immediate use list. Since uses are simply
463 pointers into the stmt TREE, there is no way of telling if anyone has
464 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
465 The contents are different, but the pointer is still the same. This
466 routine will check to make sure PTR is in the correct list, and if it isn't
467 put it in the correct list. We cannot simply check the previous node
468 because all nodes in the same stmt might have be changed. */
471 correct_use_link (use_operand_p ptr
, tree stmt
)
476 /* Fold_stmt () may have changed the stmt pointers. */
477 if (ptr
->stmt
!= stmt
)
483 bool stmt_mod
= true;
484 /* Find the first element which isn't a SAFE iterator, is in a different
485 stmt, and is not a modified stmt. That node is in the correct list,
486 see if we are too. */
490 while (prev
->stmt
== stmt
|| prev
->stmt
== NULL
)
492 if (prev
->use
== NULL
)
495 if ((stmt_mod
= stmt_modified_p (prev
->stmt
)))
499 /* Get the ssa_name of the list the node is in. */
500 if (prev
->use
== NULL
)
504 /* If it's the right list, simply return. */
505 if (root
== *(ptr
->use
))
508 /* Its in the wrong list if we reach here. */
509 delink_imm_use (ptr
);
510 link_imm_use (ptr
, *(ptr
->use
));
514 #define FINALIZE_OPBUILD build_defs
515 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_defs, (I))
516 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_defs, (I))
517 #define FINALIZE_FUNC finalize_ssa_def_ops
518 #define FINALIZE_ALLOC alloc_def
519 #define FINALIZE_FREE free_defs
520 #define FINALIZE_TYPE struct def_optype_d
521 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
522 #define FINALIZE_OPS DEF_OPS
523 #define FINALIZE_BASE(VAR) VAR
524 #define FINALIZE_BASE_TYPE tree *
525 #define FINALIZE_BASE_ZERO NULL
526 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
527 #include "tree-ssa-opfinalize.h"
530 /* This routine will create stmt operands for STMT from the def build list. */
533 finalize_ssa_defs (tree stmt
)
535 unsigned int num
= opbuild_num_elems (&build_defs
);
536 /* There should only be a single real definition per assignment. */
537 gcc_assert ((stmt
&& TREE_CODE (stmt
) != MODIFY_EXPR
) || num
<= 1);
539 /* If there is an old list, often the new list is identical, or close, so
540 find the elements at the beginning that are the same as the vector. */
542 finalize_ssa_def_ops (stmt
);
543 opbuild_clear (&build_defs
);
546 #define FINALIZE_OPBUILD build_uses
547 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_uses, (I))
548 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_uses, (I))
549 #define FINALIZE_FUNC finalize_ssa_use_ops
550 #define FINALIZE_ALLOC alloc_use
551 #define FINALIZE_FREE free_uses
552 #define FINALIZE_TYPE struct use_optype_d
553 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
554 #define FINALIZE_OPS USE_OPS
555 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
556 #define FINALIZE_BASE(VAR) VAR
557 #define FINALIZE_BASE_TYPE tree *
558 #define FINALIZE_BASE_ZERO NULL
559 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
560 (PTR)->use_ptr.use = (VAL); \
561 link_imm_use_stmt (&((PTR)->use_ptr), \
563 #include "tree-ssa-opfinalize.h"
565 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
568 finalize_ssa_uses (tree stmt
)
570 #ifdef ENABLE_CHECKING
573 unsigned num
= opbuild_num_elems (&build_uses
);
575 /* If the pointer to the operand is the statement itself, something is
576 wrong. It means that we are pointing to a local variable (the
577 initial call to get_stmt_operands does not pass a pointer to a
579 for (x
= 0; x
< num
; x
++)
580 gcc_assert (*(opbuild_elem_real (&build_uses
, x
)) != stmt
);
583 finalize_ssa_use_ops (stmt
);
584 opbuild_clear (&build_uses
);
588 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
589 #define FINALIZE_OPBUILD build_v_may_defs
590 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_may_defs, (I))
591 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_may_defs, (I))
592 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
593 #define FINALIZE_ALLOC alloc_maydef
594 #define FINALIZE_FREE free_maydefs
595 #define FINALIZE_TYPE struct maydef_optype_d
596 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
597 #define FINALIZE_OPS MAYDEF_OPS
598 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
599 #define FINALIZE_BASE_ZERO 0
600 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
601 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
602 #define FINALIZE_BASE_TYPE unsigned
603 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
604 (PTR)->def_var = (VAL); \
605 (PTR)->use_var = (VAL); \
606 (PTR)->use_ptr.use = &((PTR)->use_var); \
607 link_imm_use_stmt (&((PTR)->use_ptr), \
609 #include "tree-ssa-opfinalize.h"
613 finalize_ssa_v_may_defs (tree stmt
)
615 finalize_ssa_v_may_def_ops (stmt
);
619 /* Clear the in_list bits and empty the build array for v_may_defs. */
622 cleanup_v_may_defs (void)
625 num
= opbuild_num_elems (&build_v_may_defs
);
627 for (x
= 0; x
< num
; x
++)
629 tree t
= opbuild_elem_virtual (&build_v_may_defs
, x
);
630 if (TREE_CODE (t
) != SSA_NAME
)
632 var_ann_t ann
= var_ann (t
);
633 ann
->in_v_may_def_list
= 0;
636 opbuild_clear (&build_v_may_defs
);
640 #define FINALIZE_OPBUILD build_vuses
641 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_vuses, (I))
642 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_vuses, (I))
643 #define FINALIZE_FUNC finalize_ssa_vuse_ops
644 #define FINALIZE_ALLOC alloc_vuse
645 #define FINALIZE_FREE free_vuses
646 #define FINALIZE_TYPE struct vuse_optype_d
647 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
648 #define FINALIZE_OPS VUSE_OPS
649 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
650 #define FINALIZE_BASE_ZERO 0
651 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
652 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
653 #define FINALIZE_BASE_TYPE unsigned
654 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
655 (PTR)->use_var = (VAL); \
656 (PTR)->use_ptr.use = &((PTR)->use_var); \
657 link_imm_use_stmt (&((PTR)->use_ptr), \
659 #include "tree-ssa-opfinalize.h"
662 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
665 finalize_ssa_vuses (tree stmt
)
667 unsigned num
, num_v_may_defs
;
670 /* Remove superfluous VUSE operands. If the statement already has a
671 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
672 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
673 suppose that variable 'a' is aliased:
676 # a_3 = V_MAY_DEF <a_2>
679 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
682 num
= opbuild_num_elems (&build_vuses
);
683 num_v_may_defs
= opbuild_num_elems (&build_v_may_defs
);
685 if (num
> 0 && num_v_may_defs
> 0)
687 int last
= OPBUILD_LAST
;
688 vuse_index
= opbuild_first (&build_vuses
);
689 for ( ; vuse_index
!= OPBUILD_LAST
; )
692 vuse
= opbuild_elem_virtual (&build_vuses
, vuse_index
);
693 if (TREE_CODE (vuse
) != SSA_NAME
)
695 var_ann_t ann
= var_ann (vuse
);
696 ann
->in_vuse_list
= 0;
697 if (ann
->in_v_may_def_list
)
699 vuse_index
= opbuild_remove_elem (&build_vuses
, vuse_index
,
705 vuse_index
= opbuild_next (&build_vuses
, vuse_index
);
709 /* Clear out the in_list bits. */
710 for (vuse_index
= opbuild_first (&build_vuses
);
711 vuse_index
!= OPBUILD_LAST
;
712 vuse_index
= opbuild_next (&build_vuses
, vuse_index
))
714 tree t
= opbuild_elem_virtual (&build_vuses
, vuse_index
);
715 if (TREE_CODE (t
) != SSA_NAME
)
717 var_ann_t ann
= var_ann (t
);
718 ann
->in_vuse_list
= 0;
722 finalize_ssa_vuse_ops (stmt
);
723 /* The v_may_def build vector wasn't cleaned up because we needed it. */
724 cleanup_v_may_defs ();
726 /* Free the vuses build vector. */
727 opbuild_clear (&build_vuses
);
731 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
733 #define FINALIZE_OPBUILD build_v_must_defs
734 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_must_defs, (I))
735 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_must_defs, (I))
736 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
737 #define FINALIZE_ALLOC alloc_mustdef
738 #define FINALIZE_FREE free_mustdefs
739 #define FINALIZE_TYPE struct mustdef_optype_d
740 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
741 #define FINALIZE_OPS MUSTDEF_OPS
742 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
743 #define FINALIZE_BASE_ZERO 0
744 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
745 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
746 #define FINALIZE_BASE_TYPE unsigned
747 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
748 (PTR)->def_var = (VAL); \
749 (PTR)->kill_var = (VAL); \
750 (PTR)->use_ptr.use = &((PTR)->kill_var);\
751 link_imm_use_stmt (&((PTR)->use_ptr), \
753 #include "tree-ssa-opfinalize.h"
757 finalize_ssa_v_must_defs (tree stmt
)
759 /* In the presence of subvars, there may be more than one V_MUST_DEF per
760 statement (one for each subvar). It is a bit expensive to verify that
761 all must-defs in a statement belong to subvars if there is more than one
762 MUST-def, so we don't do it. Suffice to say, if you reach here without
763 having subvars, and have num >1, you have hit a bug. */
765 finalize_ssa_v_must_def_ops (stmt
);
766 opbuild_clear (&build_v_must_defs
);
770 /* Finalize all the build vectors, fill the new ones into INFO. */
773 finalize_ssa_stmt_operands (tree stmt
)
775 finalize_ssa_defs (stmt
);
776 finalize_ssa_uses (stmt
);
777 finalize_ssa_v_must_defs (stmt
);
778 finalize_ssa_v_may_defs (stmt
);
779 finalize_ssa_vuses (stmt
);
783 /* Start the process of building up operands vectors in INFO. */
786 start_ssa_stmt_operands (void)
788 gcc_assert (opbuild_num_elems (&build_defs
) == 0);
789 gcc_assert (opbuild_num_elems (&build_uses
) == 0);
790 gcc_assert (opbuild_num_elems (&build_vuses
) == 0);
791 gcc_assert (opbuild_num_elems (&build_v_may_defs
) == 0);
792 gcc_assert (opbuild_num_elems (&build_v_must_defs
) == 0);
796 /* Add DEF_P to the list of pointers to operands. */
799 append_def (tree
*def_p
)
801 opbuild_append_real (&build_defs
, def_p
);
805 /* Add USE_P to the list of pointers to operands. */
808 append_use (tree
*use_p
)
810 opbuild_append_real (&build_uses
, use_p
);
814 /* Add a new virtual may def for variable VAR to the build array. */
817 append_v_may_def (tree var
)
819 if (TREE_CODE (var
) != SSA_NAME
)
821 var_ann_t ann
= get_var_ann (var
);
823 /* Don't allow duplicate entries. */
824 if (ann
->in_v_may_def_list
)
826 ann
->in_v_may_def_list
= 1;
829 opbuild_append_virtual (&build_v_may_defs
, var
);
833 /* Add VAR to the list of virtual uses. */
836 append_vuse (tree var
)
839 /* Don't allow duplicate entries. */
840 if (TREE_CODE (var
) != SSA_NAME
)
842 var_ann_t ann
= get_var_ann (var
);
844 if (ann
->in_vuse_list
|| ann
->in_v_may_def_list
)
846 ann
->in_vuse_list
= 1;
849 opbuild_append_virtual (&build_vuses
, var
);
853 /* Add VAR to the list of virtual must definitions for INFO. */
856 append_v_must_def (tree var
)
860 /* Don't allow duplicate entries. */
861 for (i
= 0; i
< opbuild_num_elems (&build_v_must_defs
); i
++)
862 if (var
== opbuild_elem_virtual (&build_v_must_defs
, i
))
865 opbuild_append_virtual (&build_v_must_defs
, var
);
869 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
870 cache for STMT, if it existed before. When finished, the various build_*
871 operand vectors will have potential operands. in them. */
874 parse_ssa_operands (tree stmt
)
878 code
= TREE_CODE (stmt
);
882 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
883 either only part of LHS is modified or if the RHS might throw,
884 otherwise, use V_MUST_DEF.
886 ??? If it might throw, we should represent somehow that it is killed
887 on the fallthrough path. */
889 tree lhs
= TREE_OPERAND (stmt
, 0);
890 int lhs_flags
= opf_is_def
;
892 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 1), opf_none
);
894 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
895 or not the entire LHS is modified; that depends on what's
896 inside the VIEW_CONVERT_EXPR. */
897 if (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
898 lhs
= TREE_OPERAND (lhs
, 0);
900 if (TREE_CODE (lhs
) != ARRAY_REF
901 && TREE_CODE (lhs
) != ARRAY_RANGE_REF
902 && TREE_CODE (lhs
) != BIT_FIELD_REF
903 && TREE_CODE (lhs
) != REALPART_EXPR
904 && TREE_CODE (lhs
) != IMAGPART_EXPR
)
905 lhs_flags
|= opf_kill_def
;
907 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), lhs_flags
);
912 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_none
);
916 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_none
);
920 get_asm_expr_operands (stmt
);
924 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_none
);
928 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_none
);
932 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_none
);
935 /* These nodes contain no variable references. */
937 case CASE_LABEL_EXPR
:
939 case TRY_FINALLY_EXPR
:
946 /* Notice that if get_expr_operands tries to use &STMT as the operand
947 pointer (which may only happen for USE operands), we will fail in
948 append_use. This default will handle statements like empty
949 statements, or CALL_EXPRs that may appear on the RHS of a statement
950 or as statements themselves. */
951 get_expr_operands (stmt
, &stmt
, opf_none
);
956 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
957 original operands, and if ANN is non-null, appropriate stmt flags are set
958 in the stmt's annotation. If ANN is NULL, this is not considered a "real"
959 stmt, and none of the operands will be entered into their respective
960 immediate uses tables. This is to allow stmts to be processed when they
961 are not actually in the CFG.
963 Note that some fields in old_ops may change to NULL, although none of the
964 memory they originally pointed to will be destroyed. It is appropriate
965 to call free_stmt_operands() on the value returned in old_ops.
967 The rationale for this: Certain optimizations wish to examine the difference
968 between new_ops and old_ops after processing. If a set of operands don't
969 change, new_ops will simply assume the pointer in old_ops, and the old_ops
970 pointer will be set to NULL, indicating no memory needs to be cleared.
971 Usage might appear something like:
973 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
974 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
975 <* compare old_ops_copy and new_ops *>
976 free_ssa_operands (old_ops); */
979 build_ssa_operands (tree stmt
)
981 stmt_ann_t ann
= get_stmt_ann (stmt
);
983 /* Initially assume that the statement has no volatile operands, nor
984 makes aliased loads or stores. */
987 ann
->has_volatile_ops
= false;
988 ann
->makes_aliased_stores
= false;
989 ann
->makes_aliased_loads
= false;
992 start_ssa_stmt_operands ();
994 parse_ssa_operands (stmt
);
996 finalize_ssa_stmt_operands (stmt
);
1000 /* Free any operands vectors in OPS. */
1002 free_ssa_operands (stmt_operands_p ops
)
1004 ops
->def_ops
= NULL
;
1005 ops
->use_ops
= NULL
;
1006 ops
->maydef_ops
= NULL
;
1007 ops
->mustdef_ops
= NULL
;
1008 ops
->vuse_ops
= NULL
;
1012 /* Get the operands of statement STMT. Note that repeated calls to
1013 get_stmt_operands for the same statement will do nothing until the
1014 statement is marked modified by a call to mark_stmt_modified(). */
1017 update_stmt_operands (tree stmt
)
1019 stmt_ann_t ann
= get_stmt_ann (stmt
);
1020 /* If get_stmt_operands is called before SSA is initialized, dont
1022 if (!ssa_operands_active ())
1024 /* The optimizers cannot handle statements that are nothing but a
1025 _DECL. This indicates a bug in the gimplifier. */
1026 gcc_assert (!SSA_VAR_P (stmt
));
1028 gcc_assert (ann
->modified
);
1030 timevar_push (TV_TREE_OPS
);
1032 build_ssa_operands (stmt
);
1034 /* Clear the modified bit for STMT. Subsequent calls to
1035 get_stmt_operands for this statement will do nothing until the
1036 statement is marked modified by a call to mark_stmt_modified(). */
1039 timevar_pop (TV_TREE_OPS
);
1043 /* Copies virtual operands from SRC to DST. */
1046 copy_virtual_operands (tree dest
, tree src
)
1049 ssa_op_iter iter
, old_iter
;
1050 use_operand_p use_p
, u2
;
1051 def_operand_p def_p
, d2
;
1053 build_ssa_operands (dest
);
1055 /* Copy all the virtual fields. */
1056 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VUSE
)
1058 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMAYDEF
)
1059 append_v_may_def (t
);
1060 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMUSTDEF
)
1061 append_v_must_def (t
);
1063 if (opbuild_num_elems (&build_vuses
) == 0
1064 && opbuild_num_elems (&build_v_may_defs
) == 0
1065 && opbuild_num_elems (&build_v_must_defs
) == 0)
1068 /* Now commit the virtual operands to this stmt. */
1069 finalize_ssa_v_must_defs (dest
);
1070 finalize_ssa_v_may_defs (dest
);
1071 finalize_ssa_vuses (dest
);
1073 /* Finally, set the field to the same values as then originals. */
1076 t
= op_iter_init_tree (&old_iter
, src
, SSA_OP_VUSE
);
1077 FOR_EACH_SSA_USE_OPERAND (use_p
, dest
, iter
, SSA_OP_VUSE
)
1079 gcc_assert (!op_iter_done (&old_iter
));
1081 t
= op_iter_next_tree (&old_iter
);
1083 gcc_assert (op_iter_done (&old_iter
));
1085 op_iter_init_maydef (&old_iter
, src
, &u2
, &d2
);
1086 FOR_EACH_SSA_MAYDEF_OPERAND (def_p
, use_p
, dest
, iter
)
1088 gcc_assert (!op_iter_done (&old_iter
));
1089 SET_USE (use_p
, USE_FROM_PTR (u2
));
1090 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
1091 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
1093 gcc_assert (op_iter_done (&old_iter
));
1095 op_iter_init_mustdef (&old_iter
, src
, &u2
, &d2
);
1096 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p
, use_p
, dest
, iter
)
1098 gcc_assert (!op_iter_done (&old_iter
));
1099 SET_USE (use_p
, USE_FROM_PTR (u2
));
1100 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
1101 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
1103 gcc_assert (op_iter_done (&old_iter
));
1108 /* Specifically for use in DOM's expression analysis. Given a store, we
1109 create an artificial stmt which looks like a load from the store, this can
1110 be used to eliminate redundant loads. OLD_OPS are the operands from the
1111 store stmt, and NEW_STMT is the new load which represents a load of the
1115 create_ssa_artficial_load_stmt (tree new_stmt
, tree old_stmt
)
1120 use_operand_p use_p
;
1123 ann
= get_stmt_ann (new_stmt
);
1125 /* process the stmt looking for operands. */
1126 start_ssa_stmt_operands ();
1127 parse_ssa_operands (new_stmt
);
1129 for (x
= 0; x
< opbuild_num_elems (&build_vuses
); x
++)
1131 tree t
= opbuild_elem_virtual (&build_vuses
, x
);
1132 if (TREE_CODE (t
) != SSA_NAME
)
1134 var_ann_t ann
= var_ann (t
);
1135 ann
->in_vuse_list
= 0;
1139 for (x
= 0; x
< opbuild_num_elems (&build_v_may_defs
); x
++)
1141 tree t
= opbuild_elem_virtual (&build_v_may_defs
, x
);
1142 if (TREE_CODE (t
) != SSA_NAME
)
1144 var_ann_t ann
= var_ann (t
);
1145 ann
->in_v_may_def_list
= 0;
1148 /* Remove any virtual operands that were found. */
1149 opbuild_clear (&build_v_may_defs
);
1150 opbuild_clear (&build_v_must_defs
);
1151 opbuild_clear (&build_vuses
);
1153 /* For each VDEF on the original statement, we want to create a
1154 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1156 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
,
1157 (SSA_OP_VMAYDEF
| SSA_OP_VMUSTDEF
))
1160 /* Now build the operands for this new stmt. */
1161 finalize_ssa_stmt_operands (new_stmt
);
1163 /* All uses in this fake stmt must not be in the immediate use lists. */
1164 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
1165 delink_imm_use (use_p
);
1169 swap_tree_operands (tree stmt
, tree
*exp0
, tree
*exp1
)
1175 /* If the operand cache is active, attempt to preserve the relative positions
1176 of these two operands in their respective immediate use lists. */
1177 if (ssa_operands_active () && op0
!= op1
)
1179 use_optype_p use0
, use1
, ptr
;
1181 /* Find the 2 operands in the cache, if they are there. */
1182 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
1183 if (USE_OP_PTR (ptr
)->use
== exp0
)
1188 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
1189 if (USE_OP_PTR (ptr
)->use
== exp1
)
1194 /* If both uses don't have operand entries, there isn't much we can do
1195 at this point. Presumably we dont need to worry about it. */
1198 tree
*tmp
= USE_OP_PTR (use1
)->use
;
1199 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
1200 USE_OP_PTR (use0
)->use
= tmp
;
1204 /* Now swap the data. */
1210 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1211 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1212 the operands found. */
1215 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
1217 enum tree_code code
;
1218 enum tree_code_class
class;
1219 tree expr
= *expr_p
;
1220 stmt_ann_t s_ann
= stmt_ann (stmt
);
1225 code
= TREE_CODE (expr
);
1226 class = TREE_CODE_CLASS (code
);
1231 /* We could have the address of a component, array member,
1232 etc which has interesting variable references. */
1233 /* Taking the address of a variable does not represent a
1234 reference to it, but the fact that the stmt takes its address will be
1235 of interest to some passes (e.g. alias resolution). */
1236 add_stmt_operand (expr_p
, s_ann
, 0);
1238 /* If the address is invariant, there may be no interesting variable
1239 references inside. */
1240 if (is_gimple_min_invariant (expr
))
1243 /* There should be no VUSEs created, since the referenced objects are
1244 not really accessed. The only operands that we should find here
1245 are ARRAY_REF indices which will always be real operands (GIMPLE
1246 does not allow non-registers as array indices). */
1247 flags
|= opf_no_vops
;
1249 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1260 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1261 Otherwise, add the variable itself.
1262 Whether it goes to USES or DEFS depends on the operand flags. */
1263 if (var_can_have_subvars (expr
)
1264 && (svars
= get_subvars_for_var (expr
)))
1267 for (sv
= svars
; sv
; sv
= sv
->next
)
1268 add_stmt_operand (&sv
->var
, s_ann
, flags
);
1272 add_stmt_operand (expr_p
, s_ann
, flags
);
1276 case MISALIGNED_INDIRECT_REF
:
1277 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1280 case ALIGN_INDIRECT_REF
:
1282 get_indirect_ref_operands (stmt
, expr
, flags
);
1285 case TARGET_MEM_REF
:
1286 get_tmr_operands (stmt
, expr
, flags
);
1290 case ARRAY_RANGE_REF
:
1291 /* Treat array references as references to the virtual variable
1292 representing the array. The virtual variable for an ARRAY_REF
1293 is the VAR_DECL for the array. */
1295 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1296 according to the value of IS_DEF. Recurse if the LHS of the
1297 ARRAY_REF node is not a regular variable. */
1298 if (SSA_VAR_P (TREE_OPERAND (expr
, 0)))
1299 add_stmt_operand (expr_p
, s_ann
, flags
);
1301 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1303 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1304 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1305 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_none
);
1313 unsigned HOST_WIDE_INT offset
, size
;
1314 /* This component ref becomes an access to all of the subvariables
1315 it can touch, if we can determine that, but *NOT* the real one.
1316 If we can't determine which fields we could touch, the recursion
1317 will eventually get to a variable and add *all* of its subvars, or
1318 whatever is the minimum correct subset. */
1320 ref
= okay_component_ref_for_subvars (expr
, &offset
, &size
);
1323 subvar_t svars
= get_subvars_for_var (ref
);
1325 for (sv
= svars
; sv
; sv
= sv
->next
)
1328 if (overlap_subvar (offset
, size
, sv
, &exact
))
1330 int subvar_flags
= flags
;
1332 subvar_flags
&= ~opf_kill_def
;
1333 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
1338 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0),
1339 flags
& ~opf_kill_def
);
1341 if (code
== COMPONENT_REF
)
1343 if (s_ann
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
1344 s_ann
->has_volatile_ops
= true;
1345 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1349 case WITH_SIZE_EXPR
:
1350 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1351 and an rvalue reference to its second argument. */
1352 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1353 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1357 get_call_expr_operands (stmt
, expr
);
1362 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1363 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1364 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1372 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1374 op
= TREE_OPERAND (expr
, 0);
1375 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
1376 op
= TREE_OPERAND (expr
, 0);
1377 if (TREE_CODE (op
) == ARRAY_REF
1378 || TREE_CODE (op
) == ARRAY_RANGE_REF
1379 || TREE_CODE (op
) == REALPART_EXPR
1380 || TREE_CODE (op
) == IMAGPART_EXPR
)
1381 subflags
= opf_is_def
;
1383 subflags
= opf_is_def
| opf_kill_def
;
1385 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), subflags
);
1391 /* General aggregate CONSTRUCTORs have been decomposed, but they
1392 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1393 constructor_elt
*ce
;
1394 unsigned HOST_WIDE_INT idx
;
1397 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
1399 get_expr_operands (stmt
, &ce
->value
, opf_none
);
1404 case TRUTH_NOT_EXPR
:
1406 case VIEW_CONVERT_EXPR
:
1408 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1411 case TRUTH_AND_EXPR
:
1413 case TRUTH_XOR_EXPR
:
1419 tree op0
= TREE_OPERAND (expr
, 0);
1420 tree op1
= TREE_OPERAND (expr
, 1);
1422 /* If it would be profitable to swap the operands, then do so to
1423 canonicalize the statement, enabling better optimization.
1425 By placing canonicalization of such expressions here we
1426 transparently keep statements in canonical form, even
1427 when the statement is modified. */
1428 if (tree_swap_operands_p (op0
, op1
, false))
1430 /* For relationals we need to swap the operands
1431 and change the code. */
1437 TREE_SET_CODE (expr
, swap_tree_comparison (code
));
1438 swap_tree_operands (stmt
,
1439 &TREE_OPERAND (expr
, 0),
1440 &TREE_OPERAND (expr
, 1));
1443 /* For a commutative operator we can just swap the operands. */
1444 else if (commutative_tree_code (code
))
1446 swap_tree_operands (stmt
,
1447 &TREE_OPERAND (expr
, 0),
1448 &TREE_OPERAND (expr
, 1));
1452 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1453 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1457 case REALIGN_LOAD_EXPR
:
1459 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1460 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1461 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
1470 /* Expressions that make no memory references. */
1474 if (class == tcc_unary
)
1476 if (class == tcc_binary
|| class == tcc_comparison
)
1478 if (class == tcc_constant
|| class == tcc_type
)
1482 /* If we get here, something has gone wrong. */
1483 #ifdef ENABLE_CHECKING
1484 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
1486 fputs ("\n", stderr
);
1487 internal_error ("internal error");
1493 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1496 get_asm_expr_operands (tree stmt
)
1498 stmt_ann_t s_ann
= stmt_ann (stmt
);
1499 int noutputs
= list_length (ASM_OUTPUTS (stmt
));
1500 const char **oconstraints
1501 = (const char **) alloca ((noutputs
) * sizeof (const char *));
1504 const char *constraint
;
1505 bool allows_mem
, allows_reg
, is_inout
;
1507 for (i
=0, link
= ASM_OUTPUTS (stmt
); link
; ++i
, link
= TREE_CHAIN (link
))
1509 oconstraints
[i
] = constraint
1510 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1511 parse_output_constraint (&constraint
, i
, 0, 0,
1512 &allows_mem
, &allows_reg
, &is_inout
);
1514 /* This should have been split in gimplify_asm_expr. */
1515 gcc_assert (!allows_reg
|| !is_inout
);
1517 /* Memory operands are addressable. Note that STMT needs the
1518 address of this operand. */
1519 if (!allows_reg
&& allows_mem
)
1521 tree t
= get_base_address (TREE_VALUE (link
));
1522 if (t
&& DECL_P (t
) && s_ann
)
1523 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1526 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_is_def
);
1529 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1532 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1533 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1534 oconstraints
, &allows_mem
, &allows_reg
);
1536 /* Memory operands are addressable. Note that STMT needs the
1537 address of this operand. */
1538 if (!allows_reg
&& allows_mem
)
1540 tree t
= get_base_address (TREE_VALUE (link
));
1541 if (t
&& DECL_P (t
) && s_ann
)
1542 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1545 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1549 /* Clobber memory for asm ("" : : : "memory"); */
1550 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1551 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1556 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1557 decided to group them). */
1559 add_stmt_operand (&global_var
, s_ann
, opf_is_def
);
1561 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
, bi
)
1563 tree var
= referenced_var (i
);
1564 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1567 /* Now clobber all addressables. */
1568 EXECUTE_IF_SET_IN_BITMAP (addressable_vars
, 0, i
, bi
)
1570 tree var
= referenced_var (i
);
1572 /* Subvars are explicitly represented in this list, so
1573 we don't need the original to be added to the clobber
1574 ops, but the original *will* be in this list because
1575 we keep the addressability of the original
1576 variable up-to-date so we don't screw up the rest of
1578 if (var_can_have_subvars (var
)
1579 && get_subvars_for_var (var
) != NULL
)
1582 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1589 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1590 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1593 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
)
1595 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1597 stmt_ann_t s_ann
= stmt_ann (stmt
);
1599 /* Stores into INDIRECT_REF operands are never killing definitions. */
1600 flags
&= ~opf_kill_def
;
1602 if (SSA_VAR_P (ptr
))
1604 struct ptr_info_def
*pi
= NULL
;
1606 /* If PTR has flow-sensitive points-to information, use it. */
1607 if (TREE_CODE (ptr
) == SSA_NAME
1608 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1609 && pi
->name_mem_tag
)
1611 /* PTR has its own memory tag. Use it. */
1612 add_stmt_operand (&pi
->name_mem_tag
, s_ann
, flags
);
1616 /* If PTR is not an SSA_NAME or it doesn't have a name
1617 tag, use its type memory tag. */
1620 /* If we are emitting debugging dumps, display a warning if
1621 PTR is an SSA_NAME with no flow-sensitive alias
1622 information. That means that we may need to compute
1625 && TREE_CODE (ptr
) == SSA_NAME
1629 "NOTE: no flow-sensitive alias info for ");
1630 print_generic_expr (dump_file
, ptr
, dump_flags
);
1631 fprintf (dump_file
, " in ");
1632 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1635 if (TREE_CODE (ptr
) == SSA_NAME
)
1636 ptr
= SSA_NAME_VAR (ptr
);
1637 v_ann
= var_ann (ptr
);
1638 if (v_ann
->type_mem_tag
)
1639 add_stmt_operand (&v_ann
->type_mem_tag
, s_ann
, flags
);
1643 /* If a constant is used as a pointer, we can't generate a real
1644 operand for it but we mark the statement volatile to prevent
1645 optimizations from messing things up. */
1646 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1649 s_ann
->has_volatile_ops
= true;
1653 /* Everything else *should* have been folded elsewhere, but users
1654 are smarter than we in finding ways to write invalid code. We
1655 cannot just assert here. If we were absolutely certain that we
1656 do handle all valid cases, then we could just do nothing here.
1657 That seems optimistic, so attempt to do something logical... */
1658 else if ((TREE_CODE (ptr
) == PLUS_EXPR
|| TREE_CODE (ptr
) == MINUS_EXPR
)
1659 && TREE_CODE (TREE_OPERAND (ptr
, 0)) == ADDR_EXPR
1660 && TREE_CODE (TREE_OPERAND (ptr
, 1)) == INTEGER_CST
)
1662 /* Make sure we know the object is addressable. */
1663 pptr
= &TREE_OPERAND (ptr
, 0);
1664 add_stmt_operand (pptr
, s_ann
, 0);
1666 /* Mark the object itself with a VUSE. */
1667 pptr
= &TREE_OPERAND (*pptr
, 0);
1668 get_expr_operands (stmt
, pptr
, flags
);
1672 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1676 /* Add a USE operand for the base pointer. */
1677 get_expr_operands (stmt
, pptr
, opf_none
);
1680 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1683 get_tmr_operands (tree stmt
, tree expr
, int flags
)
1685 tree tag
= TMR_TAG (expr
);
1687 /* First record the real operands. */
1688 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_none
);
1689 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_none
);
1691 /* MEM_REFs should never be killing. */
1692 flags
&= ~opf_kill_def
;
1694 if (TMR_SYMBOL (expr
))
1696 stmt_ann_t ann
= stmt_ann (stmt
);
1697 add_to_addressable_set (TMR_SYMBOL (expr
), &ann
->addresses_taken
);
1701 get_expr_operands (stmt
, &tag
, flags
);
1703 /* Something weird, so ensure that we will be careful. */
1704 stmt_ann (stmt
)->has_volatile_ops
= true;
1707 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1710 get_call_expr_operands (tree stmt
, tree expr
)
1713 int call_flags
= call_expr_flags (expr
);
1715 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1716 operands for all the symbols that have been found to be
1719 Note that if aliases have not been computed, the global effects
1720 of calls will not be included in the SSA web. This is fine
1721 because no optimizer should run before aliases have been
1722 computed. By not bothering with virtual operands for CALL_EXPRs
1723 we avoid adding superfluous virtual operands, which can be a
1724 significant compile time sink (See PR 15855). */
1725 if (aliases_computed_p
1726 && !bitmap_empty_p (call_clobbered_vars
)
1727 && !(call_flags
& ECF_NOVOPS
))
1729 /* A 'pure' or a 'const' function never call-clobbers anything.
1730 A 'noreturn' function might, but since we don't return anyway
1731 there is no point in recording that. */
1732 if (TREE_SIDE_EFFECTS (expr
)
1733 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1734 add_call_clobber_ops (stmt
, get_callee_fndecl (expr
));
1735 else if (!(call_flags
& ECF_CONST
))
1736 add_call_read_ops (stmt
);
1739 /* Find uses in the called function. */
1740 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1742 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1743 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_none
);
1745 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1750 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1751 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1752 the statement's real operands, otherwise it is added to virtual
1756 add_stmt_operand (tree
*var_p
, stmt_ann_t s_ann
, int flags
)
1765 /* If the operand is an ADDR_EXPR, add its operand to the list of
1766 variables that have had their address taken in this statement. */
1767 if (TREE_CODE (var
) == ADDR_EXPR
&& s_ann
)
1769 add_to_addressable_set (TREE_OPERAND (var
, 0), &s_ann
->addresses_taken
);
1773 /* If the original variable is not a scalar, it will be added to the list
1774 of virtual operands. In that case, use its base symbol as the virtual
1775 variable representing it. */
1776 is_real_op
= is_gimple_reg (var
);
1777 if (!is_real_op
&& !DECL_P (var
))
1778 var
= get_virtual_var (var
);
1780 /* If VAR is not a variable that we care to optimize, do nothing. */
1781 if (var
== NULL_TREE
|| !SSA_VAR_P (var
))
1784 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1785 v_ann
= var_ann (sym
);
1787 /* Mark statements with volatile operands. Optimizers should back
1788 off from statements having volatile operands. */
1789 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1790 s_ann
->has_volatile_ops
= true;
1792 /* If the variable cannot be modified and this is a V_MAY_DEF change
1793 it into a VUSE. This happens when read-only variables are marked
1794 call-clobbered and/or aliased to writable variables. So we only
1795 check that this only happens on non-specific stores.
1797 Note that if this is a specific store, i.e. associated with a
1798 modify_expr, then we can't suppress the V_DEF, lest we run into
1799 validation problems.
1801 This can happen when programs cast away const, leaving us with a
1802 store to read-only memory. If the statement is actually executed
1803 at runtime, then the program is ill formed. If the statement is
1804 not executed then all is well. At the very least, we cannot ICE. */
1805 if ((flags
& opf_non_specific
) && unmodifiable_var_p (var
))
1807 gcc_assert (!is_real_op
);
1808 flags
&= ~(opf_is_def
| opf_kill_def
);
1813 /* The variable is a GIMPLE register. Add it to real operands. */
1814 if (flags
& opf_is_def
)
1821 varray_type aliases
;
1823 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1824 virtual operands, unless the caller has specifically requested
1825 not to add virtual operands (used when adding operands inside an
1826 ADDR_EXPR expression). */
1827 if (flags
& opf_no_vops
)
1830 aliases
= v_ann
->may_aliases
;
1832 if (aliases
== NULL
)
1834 /* The variable is not aliased or it is an alias tag. */
1835 if (flags
& opf_is_def
)
1837 if (flags
& opf_kill_def
)
1839 /* Only regular variables or struct fields may get a
1840 V_MUST_DEF operand. */
1841 gcc_assert (v_ann
->mem_tag_kind
== NOT_A_TAG
1842 || v_ann
->mem_tag_kind
== STRUCT_FIELD
);
1843 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1844 variable definitions. */
1845 append_v_must_def (var
);
1849 /* Add a V_MAY_DEF for call-clobbered variables and
1851 append_v_may_def (var
);
1857 if (s_ann
&& v_ann
->is_alias_tag
)
1858 s_ann
->makes_aliased_loads
= 1;
1865 /* The variable is aliased. Add its aliases to the virtual
1867 gcc_assert (VARRAY_ACTIVE_SIZE (aliases
) != 0);
1869 if (flags
& opf_is_def
)
1871 /* If the variable is also an alias tag, add a virtual
1872 operand for it, otherwise we will miss representing
1873 references to the members of the variable's alias set.
1874 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1875 if (v_ann
->is_alias_tag
)
1876 append_v_may_def (var
);
1878 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1879 append_v_may_def (VARRAY_TREE (aliases
, i
));
1882 s_ann
->makes_aliased_stores
= 1;
1886 /* Similarly, append a virtual uses for VAR itself, when
1887 it is an alias tag. */
1888 if (v_ann
->is_alias_tag
)
1891 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1892 append_vuse (VARRAY_TREE (aliases
, i
));
1895 s_ann
->makes_aliased_loads
= 1;
1902 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1903 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1904 a single variable whose address has been taken or any other valid
1905 GIMPLE memory reference (structure reference, array, etc). If the
1906 base address of REF is a decl that has sub-variables, also add all
1907 of its sub-variables. */
1910 add_to_addressable_set (tree ref
, bitmap
*addresses_taken
)
1915 gcc_assert (addresses_taken
);
1917 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1918 as the only thing we take the address of. If VAR is a structure,
1919 taking the address of a field means that the whole structure may
1920 be referenced using pointer arithmetic. See PR 21407 and the
1921 ensuing mailing list discussion. */
1922 var
= get_base_address (ref
);
1923 if (var
&& SSA_VAR_P (var
))
1925 if (*addresses_taken
== NULL
)
1926 *addresses_taken
= BITMAP_GGC_ALLOC ();
1928 if (var_can_have_subvars (var
)
1929 && (svars
= get_subvars_for_var (var
)))
1932 for (sv
= svars
; sv
; sv
= sv
->next
)
1934 bitmap_set_bit (*addresses_taken
, DECL_UID (sv
->var
));
1935 TREE_ADDRESSABLE (sv
->var
) = 1;
1940 bitmap_set_bit (*addresses_taken
, DECL_UID (var
));
1941 TREE_ADDRESSABLE (var
) = 1;
1947 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1948 clobbered variables in the function. */
1951 add_call_clobber_ops (tree stmt
, tree callee
)
1957 stmt_ann_t s_ann
= stmt_ann (stmt
);
1958 struct stmt_ann_d empty_ann
;
1959 bitmap not_read_b
, not_written_b
;
1961 /* Functions that are not const, pure or never return may clobber
1962 call-clobbered variables. */
1964 s_ann
->makes_clobbering_call
= true;
1966 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1967 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1970 add_stmt_operand (&global_var
, s_ann
, opf_is_def
);
1974 /* FIXME - if we have better information from the static vars
1975 analysis, we need to make the cache call site specific. This way
1976 we can have the performance benefits even if we are doing good
1979 /* Get info for local and module level statics. There is a bit
1980 set for each static if the call being processed does not read
1981 or write that variable. */
1983 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1984 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1986 /* If cache is valid, copy the elements into the build vectors. */
1987 if (ssa_call_clobbered_cache_valid
1988 && (!not_read_b
|| bitmap_empty_p (not_read_b
))
1989 && (!not_written_b
|| bitmap_empty_p (not_written_b
)))
1991 /* Process the caches in reverse order so we are always inserting at
1992 the head of the list. */
1993 for (i
= VEC_length (tree
, clobbered_vuses
) - 1; i
>=0; i
--)
1995 t
= VEC_index (tree
, clobbered_vuses
, i
);
1996 gcc_assert (TREE_CODE (t
) != SSA_NAME
);
1997 var_ann (t
)->in_vuse_list
= 1;
1998 opbuild_append_virtual (&build_vuses
, t
);
2000 for (i
= VEC_length (tree
, clobbered_v_may_defs
) - 1; i
>= 0; i
--)
2002 t
= VEC_index (tree
, clobbered_v_may_defs
, i
);
2003 gcc_assert (TREE_CODE (t
) != SSA_NAME
);
2004 var_ann (t
)->in_v_may_def_list
= 1;
2005 opbuild_append_virtual (&build_v_may_defs
, t
);
2009 s_ann
->makes_aliased_loads
= clobbered_aliased_loads
;
2010 s_ann
->makes_aliased_stores
= clobbered_aliased_stores
;
2015 memset (&empty_ann
, 0, sizeof (struct stmt_ann_d
));
2017 /* Add a V_MAY_DEF operand for every call clobbered variable. */
2018 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, u
, bi
)
2020 tree var
= referenced_var (u
);
2021 if (unmodifiable_var_p (var
))
2022 add_stmt_operand (&var
, &empty_ann
, opf_none
);
2026 = not_read_b
? bitmap_bit_p (not_read_b
, u
) : false;
2028 = not_written_b
? bitmap_bit_p (not_written_b
, u
) : false;
2030 if ((TREE_READONLY (var
)
2031 && (TREE_STATIC (var
) || DECL_EXTERNAL (var
)))
2035 add_stmt_operand (&var
, &empty_ann
, opf_none
);
2038 add_stmt_operand (&var
, &empty_ann
, opf_is_def
);
2042 if ((!not_read_b
|| bitmap_empty_p (not_read_b
))
2043 && (!not_written_b
|| bitmap_empty_p (not_written_b
)))
2045 clobbered_aliased_loads
= empty_ann
.makes_aliased_loads
;
2046 clobbered_aliased_stores
= empty_ann
.makes_aliased_stores
;
2048 /* Set the flags for a stmt's annotation. */
2051 s_ann
->makes_aliased_loads
= empty_ann
.makes_aliased_loads
;
2052 s_ann
->makes_aliased_stores
= empty_ann
.makes_aliased_stores
;
2055 /* Prepare empty cache vectors. */
2056 VEC_truncate (tree
, clobbered_vuses
, 0);
2057 VEC_truncate (tree
, clobbered_v_may_defs
, 0);
2059 /* Now fill the clobbered cache with the values that have been found. */
2060 for (i
= opbuild_first (&build_vuses
);
2062 i
= opbuild_next (&build_vuses
, i
))
2063 VEC_safe_push (tree
, heap
, clobbered_vuses
,
2064 opbuild_elem_virtual (&build_vuses
, i
));
2066 gcc_assert (opbuild_num_elems (&build_vuses
)
2067 == VEC_length (tree
, clobbered_vuses
));
2069 for (i
= opbuild_first (&build_v_may_defs
);
2071 i
= opbuild_next (&build_v_may_defs
, i
))
2072 VEC_safe_push (tree
, heap
, clobbered_v_may_defs
,
2073 opbuild_elem_virtual (&build_v_may_defs
, i
));
2075 gcc_assert (opbuild_num_elems (&build_v_may_defs
)
2076 == VEC_length (tree
, clobbered_v_may_defs
));
2078 ssa_call_clobbered_cache_valid
= true;
2083 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
2087 add_call_read_ops (tree stmt
)
2093 stmt_ann_t s_ann
= stmt_ann (stmt
);
2094 struct stmt_ann_d empty_ann
;
2096 /* if the function is not pure, it may reference memory. Add
2097 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
2098 for the heuristic used to decide whether to create .GLOBAL_VAR. */
2101 add_stmt_operand (&global_var
, s_ann
, opf_none
);
2105 /* If cache is valid, copy the elements into the build vector. */
2106 if (ssa_ro_call_cache_valid
)
2108 for (i
= VEC_length (tree
, ro_call_vuses
) - 1; i
>=0 ; i
--)
2110 /* Process the caches in reverse order so we are always inserting at
2111 the head of the list. */
2112 t
= VEC_index (tree
, ro_call_vuses
, i
);
2113 gcc_assert (TREE_CODE (t
) != SSA_NAME
);
2114 var_ann (t
)->in_vuse_list
= 1;
2115 opbuild_append_virtual (&build_vuses
, t
);
2118 s_ann
->makes_aliased_loads
= ro_call_aliased_loads
;
2122 memset (&empty_ann
, 0, sizeof (struct stmt_ann_d
));
2124 /* Add a VUSE for each call-clobbered variable. */
2125 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, u
, bi
)
2127 tree var
= referenced_var (u
);
2128 add_stmt_operand (&var
, &empty_ann
, opf_none
| opf_non_specific
);
2131 ro_call_aliased_loads
= empty_ann
.makes_aliased_loads
;
2133 s_ann
->makes_aliased_loads
= empty_ann
.makes_aliased_loads
;
2135 /* Prepare empty cache vectors. */
2136 VEC_truncate (tree
, ro_call_vuses
, 0);
2138 /* Now fill the clobbered cache with the values that have been found. */
2139 for (i
= opbuild_first (&build_vuses
);
2141 i
= opbuild_next (&build_vuses
, i
))
2142 VEC_safe_push (tree
, heap
, ro_call_vuses
,
2143 opbuild_elem_virtual (&build_vuses
, i
));
2145 gcc_assert (opbuild_num_elems (&build_vuses
)
2146 == VEC_length (tree
, ro_call_vuses
));
2148 ssa_ro_call_cache_valid
= true;
2152 /* Scan the immediate_use list for VAR making sure its linked properly.
2153 return RTUE iof there is a problem. */
2156 verify_imm_links (FILE *f
, tree var
)
2158 use_operand_p ptr
, prev
, list
;
2161 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
2163 list
= &(SSA_NAME_IMM_USE_NODE (var
));
2164 gcc_assert (list
->use
== NULL
);
2166 if (list
->prev
== NULL
)
2168 gcc_assert (list
->next
== NULL
);
2174 for (ptr
= list
->next
; ptr
!= list
; )
2176 if (prev
!= ptr
->prev
)
2179 if (ptr
->use
== NULL
)
2180 goto error
; /* 2 roots, or SAFE guard node. */
2181 else if (*(ptr
->use
) != var
)
2186 /* Avoid infinite loops. */
2187 if (count
++ > 30000)
2191 /* Verify list in the other direction. */
2193 for (ptr
= list
->prev
; ptr
!= list
; )
2195 if (prev
!= ptr
->next
)
2209 if (ptr
->stmt
&& stmt_modified_p (ptr
->stmt
))
2211 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->stmt
);
2212 print_generic_stmt (f
, ptr
->stmt
, TDF_SLIM
);
2214 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
2216 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
2222 /* Dump all the immediate uses to FILE. */
2225 dump_immediate_uses_for (FILE *file
, tree var
)
2227 imm_use_iterator iter
;
2228 use_operand_p use_p
;
2230 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2232 print_generic_expr (file
, var
, TDF_SLIM
);
2233 fprintf (file
, " : -->");
2234 if (has_zero_uses (var
))
2235 fprintf (file
, " no uses.\n");
2237 if (has_single_use (var
))
2238 fprintf (file
, " single use.\n");
2240 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2242 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2244 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2245 print_generic_stmt (file
, USE_STMT (use_p
), TDF_VOPS
);
2247 print_generic_stmt (file
, USE_STMT (use_p
), TDF_SLIM
);
2249 fprintf(file
, "\n");
2252 /* Dump all the immediate uses to FILE. */
2255 dump_immediate_uses (FILE *file
)
2260 fprintf (file
, "Immediate_uses: \n\n");
2261 for (x
= 1; x
< num_ssa_names
; x
++)
2266 dump_immediate_uses_for (file
, var
);
2271 /* Dump def-use edges on stderr. */
2274 debug_immediate_uses (void)
2276 dump_immediate_uses (stderr
);
2279 /* Dump def-use edges on stderr. */
2282 debug_immediate_uses_for (tree var
)
2284 dump_immediate_uses_for (stderr
, var
);
2286 #include "gt-tree-ssa-operands.h"