1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
79 /* Flags to describe operand properties in helpers. */
81 /* By default, operands are loaded. */
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_is_def (1 << 0)
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
97 #define opf_no_vops (1 << 2)
99 /* Operand is a "non-specific" kill for call-clobbers and such. This
100 is used to distinguish "reset the world" events from explicit
102 #define opf_non_specific (1 << 3)
104 /* Array for building all the def operands. */
105 static VEC(tree
,heap
) *build_defs
;
107 /* Array for building all the use operands. */
108 static VEC(tree
,heap
) *build_uses
;
110 /* Array for building all the V_MAY_DEF operands. */
111 static VEC(tree
,heap
) *build_v_may_defs
;
113 /* Array for building all the VUSE operands. */
114 static VEC(tree
,heap
) *build_vuses
;
116 /* Array for building all the V_MUST_DEF operands. */
117 static VEC(tree
,heap
) *build_v_must_defs
;
119 /* These arrays are the cached operand vectors for call clobbered calls. */
120 static bool ops_active
= false;
122 static GTY (()) struct ssa_operand_memory_d
*operand_memory
= NULL
;
123 static unsigned operand_memory_index
;
125 static void get_expr_operands (tree
, tree
*, int);
127 static def_optype_p free_defs
= NULL
;
128 static use_optype_p free_uses
= NULL
;
129 static vuse_optype_p free_vuses
= NULL
;
130 static maydef_optype_p free_maydefs
= NULL
;
131 static mustdef_optype_p free_mustdefs
= NULL
;
133 /* Allocates operand OP of given TYPE from the appropriate free list,
134 or of the new value if the list is empty. */
136 #define ALLOC_OPTYPE(OP, TYPE) \
139 TYPE##_optype_p ret = free_##TYPE##s; \
141 free_##TYPE##s = ret->next; \
143 ret = ssa_operand_alloc (sizeof (*ret)); \
147 /* Return the DECL_UID of the base variable of T. */
149 static inline unsigned
150 get_name_decl (tree t
)
152 if (TREE_CODE (t
) != SSA_NAME
)
155 return DECL_UID (SSA_NAME_VAR (t
));
159 /* Comparison function for qsort used in operand_build_sort_virtual. */
162 operand_build_cmp (const void *p
, const void *q
)
164 tree e1
= *((const tree
*)p
);
165 tree e2
= *((const tree
*)q
);
168 u1
= get_name_decl (e1
);
169 u2
= get_name_decl (e2
);
171 /* We want to sort in ascending order. They can never be equal. */
172 #ifdef ENABLE_CHECKING
173 gcc_assert (u1
!= u2
);
175 return (u1
> u2
? 1 : -1);
179 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
182 operand_build_sort_virtual (VEC(tree
,heap
) *list
)
184 int num
= VEC_length (tree
, list
);
191 if (get_name_decl (VEC_index (tree
, list
, 0))
192 > get_name_decl (VEC_index (tree
, list
, 1)))
194 /* Swap elements if in the wrong order. */
195 tree tmp
= VEC_index (tree
, list
, 0);
196 VEC_replace (tree
, list
, 0, VEC_index (tree
, list
, 1));
197 VEC_replace (tree
, list
, 1, tmp
);
202 /* There are 3 or more elements, call qsort. */
203 qsort (VEC_address (tree
, list
),
204 VEC_length (tree
, list
),
210 /* Return true if the SSA operands cache is active. */
213 ssa_operands_active (void)
219 /* Structure storing statistics on how many call clobbers we have, and
220 how many where avoided. */
224 /* Number of call-clobbered ops we attempt to add to calls in
225 add_call_clobber_ops. */
226 unsigned int clobbered_vars
;
228 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
229 not_written information. */
230 unsigned int static_write_clobbers_avoided
;
232 /* Number of reads (VUSEs) avoided by using not_read information. */
233 unsigned int static_read_clobbers_avoided
;
235 /* Number of write-clobbers avoided because the variable can't escape to
237 unsigned int unescapable_clobbers_avoided
;
239 /* Number of read-only uses we attempt to add to calls in
240 add_call_read_ops. */
241 unsigned int readonly_clobbers
;
243 /* Number of read-only uses we avoid using not_read information. */
244 unsigned int static_readonly_clobbers_avoided
;
248 /* Initialize the operand cache routines. */
251 init_ssa_operands (void)
253 build_defs
= VEC_alloc (tree
, heap
, 5);
254 build_uses
= VEC_alloc (tree
, heap
, 10);
255 build_vuses
= VEC_alloc (tree
, heap
, 25);
256 build_v_may_defs
= VEC_alloc (tree
, heap
, 25);
257 build_v_must_defs
= VEC_alloc (tree
, heap
, 25);
259 gcc_assert (operand_memory
== NULL
);
260 operand_memory_index
= SSA_OPERAND_MEMORY_SIZE
;
262 memset (&clobber_stats
, 0, sizeof (clobber_stats
));
266 /* Dispose of anything required by the operand routines. */
269 fini_ssa_operands (void)
271 struct ssa_operand_memory_d
*ptr
;
272 VEC_free (tree
, heap
, build_defs
);
273 VEC_free (tree
, heap
, build_uses
);
274 VEC_free (tree
, heap
, build_v_must_defs
);
275 VEC_free (tree
, heap
, build_v_may_defs
);
276 VEC_free (tree
, heap
, build_vuses
);
281 free_mustdefs
= NULL
;
282 while ((ptr
= operand_memory
) != NULL
)
284 operand_memory
= operand_memory
->next
;
290 if (dump_file
&& (dump_flags
& TDF_STATS
))
292 fprintf (dump_file
, "Original clobbered vars:%d\n",
293 clobber_stats
.clobbered_vars
);
294 fprintf (dump_file
, "Static write clobbers avoided:%d\n",
295 clobber_stats
.static_write_clobbers_avoided
);
296 fprintf (dump_file
, "Static read clobbers avoided:%d\n",
297 clobber_stats
.static_read_clobbers_avoided
);
298 fprintf (dump_file
, "Unescapable clobbers avoided:%d\n",
299 clobber_stats
.unescapable_clobbers_avoided
);
300 fprintf (dump_file
, "Original read-only clobbers:%d\n",
301 clobber_stats
.readonly_clobbers
);
302 fprintf (dump_file
, "Static read-only clobbers avoided:%d\n",
303 clobber_stats
.static_readonly_clobbers_avoided
);
308 /* Return memory for operands of SIZE chunks. */
311 ssa_operand_alloc (unsigned size
)
314 if (operand_memory_index
+ size
>= SSA_OPERAND_MEMORY_SIZE
)
316 struct ssa_operand_memory_d
*ptr
;
317 ptr
= GGC_NEW (struct ssa_operand_memory_d
);
318 ptr
->next
= operand_memory
;
319 operand_memory
= ptr
;
320 operand_memory_index
= 0;
322 ptr
= &(operand_memory
->mem
[operand_memory_index
]);
323 operand_memory_index
+= size
;
329 /* This routine makes sure that PTR is in an immediate use list, and makes
330 sure the stmt pointer is set to the current stmt. */
333 set_virtual_use_link (use_operand_p ptr
, tree stmt
)
335 /* fold_stmt may have changed the stmt pointers. */
336 if (ptr
->stmt
!= stmt
)
339 /* If this use isn't in a list, add it to the correct list. */
341 link_imm_use (ptr
, *(ptr
->use
));
344 /* Appends ELT after TO, and moves the TO pointer to ELT. */
346 #define APPEND_OP_AFTER(ELT, TO) \
349 (TO)->next = (ELT); \
353 /* Appends head of list FROM after TO, and move both pointers
354 to their successors. */
356 #define MOVE_HEAD_AFTER(FROM, TO) \
359 APPEND_OP_AFTER (FROM, TO); \
360 (FROM) = (FROM)->next; \
363 /* Moves OP to appropriate freelist. OP is set to its successor. */
365 #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
368 TYPE##_optype_p next = (OP)->next; \
369 (OP)->next = free_##TYPE##s; \
370 free_##TYPE##s = (OP); \
374 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
375 of immediate uses. STMT is the current statement. */
377 #define INITIALIZE_USE(USE_PTR, VAL, STMT) \
380 (USE_PTR)->use = (VAL); \
381 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
384 /* Adds OP to the list of defs after LAST, and moves
385 LAST to the new element. */
388 add_def_op (tree
*op
, def_optype_p
*last
)
392 ALLOC_OPTYPE (new, def
);
393 DEF_OP_PTR (new) = op
;
394 APPEND_OP_AFTER (new, *last
);
397 /* Adds OP to the list of uses of statement STMT after LAST, and moves
398 LAST to the new element. */
401 add_use_op (tree stmt
, tree
*op
, use_optype_p
*last
)
405 ALLOC_OPTYPE (new, use
);
406 INITIALIZE_USE (USE_OP_PTR (new), op
, stmt
);
407 APPEND_OP_AFTER (new, *last
);
410 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
411 LAST to the new element. */
414 add_vuse_op (tree stmt
, tree op
, vuse_optype_p
*last
)
418 ALLOC_OPTYPE (new, vuse
);
420 INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt
);
421 APPEND_OP_AFTER (new, *last
);
424 /* Adds OP to the list of maydefs of statement STMT after LAST, and moves
425 LAST to the new element. */
428 add_maydef_op (tree stmt
, tree op
, maydef_optype_p
*last
)
432 ALLOC_OPTYPE (new, maydef
);
433 MAYDEF_RESULT (new) = op
;
434 MAYDEF_OP (new) = op
;
435 INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt
);
436 APPEND_OP_AFTER (new, *last
);
439 /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
440 LAST to the new element. */
443 add_mustdef_op (tree stmt
, tree op
, mustdef_optype_p
*last
)
445 mustdef_optype_p
new;
447 ALLOC_OPTYPE (new, mustdef
);
448 MUSTDEF_RESULT (new) = op
;
449 MUSTDEF_KILL (new) = op
;
450 INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt
);
451 APPEND_OP_AFTER (new, *last
);
454 /* Takes elements from build_defs and turns them into def operands of STMT.
455 TODO -- Given that def operands list is not necessarily sorted, merging
456 the operands this way does not make much sense.
457 -- Make build_defs VEC of tree *. */
460 finalize_ssa_def_ops (tree stmt
)
463 struct def_optype_d new_list
;
464 def_optype_p old_ops
, last
;
467 new_list
.next
= NULL
;
470 old_ops
= DEF_OPS (stmt
);
473 while (old_ops
&& new_i
< VEC_length (tree
, build_defs
))
475 tree
*new_base
= (tree
*) VEC_index (tree
, build_defs
, new_i
);
476 old_base
= DEF_OP_PTR (old_ops
);
478 if (old_base
== new_base
)
480 /* if variables are the same, reuse this node. */
481 MOVE_HEAD_AFTER (old_ops
, last
);
484 else if (old_base
< new_base
)
486 /* if old is less than new, old goes to the free list. */
487 MOVE_HEAD_TO_FREELIST (old_ops
, def
);
491 /* This is a new operand. */
492 add_def_op (new_base
, &last
);
497 /* If there is anything remaining in the build_defs list, simply emit it. */
498 for ( ; new_i
< VEC_length (tree
, build_defs
); new_i
++)
499 add_def_op ((tree
*) VEC_index (tree
, build_defs
, new_i
), &last
);
503 /* If there is anything in the old list, free it. */
506 old_ops
->next
= free_defs
;
510 /* Now set the stmt's operands. */
511 DEF_OPS (stmt
) = new_list
.next
;
513 #ifdef ENABLE_CHECKING
517 for (ptr
= DEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
520 gcc_assert (x
== VEC_length (tree
, build_defs
));
525 /* This routine will create stmt operands for STMT from the def build list. */
528 finalize_ssa_defs (tree stmt
)
530 unsigned int num
= VEC_length (tree
, build_defs
);
532 /* There should only be a single real definition per assignment. */
533 gcc_assert ((stmt
&& TREE_CODE (stmt
) != MODIFY_EXPR
) || num
<= 1);
535 /* If there is an old list, often the new list is identical, or close, so
536 find the elements at the beginning that are the same as the vector. */
537 finalize_ssa_def_ops (stmt
);
538 VEC_truncate (tree
, build_defs
, 0);
541 /* Takes elements from build_uses and turns them into use operands of STMT.
542 TODO -- Make build_uses VEC of tree *. */
545 finalize_ssa_use_ops (tree stmt
)
548 struct use_optype_d new_list
;
549 use_optype_p old_ops
, ptr
, last
;
551 new_list
.next
= NULL
;
554 old_ops
= USE_OPS (stmt
);
556 /* If there is anything in the old list, free it. */
559 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
560 delink_imm_use (USE_OP_PTR (ptr
));
561 old_ops
->next
= free_uses
;
565 /* Now create nodes for all the new nodes. */
566 for (new_i
= 0; new_i
< VEC_length (tree
, build_uses
); new_i
++)
567 add_use_op (stmt
, (tree
*) VEC_index (tree
, build_uses
, new_i
), &last
);
571 /* Now set the stmt's operands. */
572 USE_OPS (stmt
) = new_list
.next
;
574 #ifdef ENABLE_CHECKING
577 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
580 gcc_assert (x
== VEC_length (tree
, build_uses
));
585 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
588 finalize_ssa_uses (tree stmt
)
590 #ifdef ENABLE_CHECKING
593 unsigned num
= VEC_length (tree
, build_uses
);
595 /* If the pointer to the operand is the statement itself, something is
596 wrong. It means that we are pointing to a local variable (the
597 initial call to update_stmt_operands does not pass a pointer to a
599 for (x
= 0; x
< num
; x
++)
600 gcc_assert (*((tree
*)VEC_index (tree
, build_uses
, x
)) != stmt
);
603 finalize_ssa_use_ops (stmt
);
604 VEC_truncate (tree
, build_uses
, 0);
608 /* Takes elements from build_v_may_defs and turns them into maydef operands of
612 finalize_ssa_v_may_def_ops (tree stmt
)
615 struct maydef_optype_d new_list
;
616 maydef_optype_p old_ops
, ptr
, last
;
618 unsigned old_base
, new_base
;
620 new_list
.next
= NULL
;
623 old_ops
= MAYDEF_OPS (stmt
);
626 while (old_ops
&& new_i
< VEC_length (tree
, build_v_may_defs
))
628 act
= VEC_index (tree
, build_v_may_defs
, new_i
);
629 new_base
= get_name_decl (act
);
630 old_base
= get_name_decl (MAYDEF_OP (old_ops
));
632 if (old_base
== new_base
)
634 /* if variables are the same, reuse this node. */
635 MOVE_HEAD_AFTER (old_ops
, last
);
636 set_virtual_use_link (MAYDEF_OP_PTR (last
), stmt
);
639 else if (old_base
< new_base
)
641 /* if old is less than new, old goes to the free list. */
642 delink_imm_use (MAYDEF_OP_PTR (old_ops
));
643 MOVE_HEAD_TO_FREELIST (old_ops
, maydef
);
647 /* This is a new operand. */
648 add_maydef_op (stmt
, act
, &last
);
653 /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
654 for ( ; new_i
< VEC_length (tree
, build_v_may_defs
); new_i
++)
655 add_maydef_op (stmt
, VEC_index (tree
, build_v_may_defs
, new_i
), &last
);
659 /* If there is anything in the old list, free it. */
662 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
663 delink_imm_use (MAYDEF_OP_PTR (ptr
));
664 old_ops
->next
= free_maydefs
;
665 free_maydefs
= old_ops
;
668 /* Now set the stmt's operands. */
669 MAYDEF_OPS (stmt
) = new_list
.next
;
671 #ifdef ENABLE_CHECKING
674 for (ptr
= MAYDEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
677 gcc_assert (x
== VEC_length (tree
, build_v_may_defs
));
683 finalize_ssa_v_may_defs (tree stmt
)
685 finalize_ssa_v_may_def_ops (stmt
);
689 /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
692 cleanup_v_may_defs (void)
695 num
= VEC_length (tree
, build_v_may_defs
);
697 for (x
= 0; x
< num
; x
++)
699 tree t
= VEC_index (tree
, build_v_may_defs
, x
);
700 if (TREE_CODE (t
) != SSA_NAME
)
702 var_ann_t ann
= var_ann (t
);
703 ann
->in_v_may_def_list
= 0;
706 VEC_truncate (tree
, build_v_may_defs
, 0);
710 /* Takes elements from build_vuses and turns them into vuse operands of
714 finalize_ssa_vuse_ops (tree stmt
)
717 struct vuse_optype_d new_list
;
718 vuse_optype_p old_ops
, ptr
, last
;
720 unsigned old_base
, new_base
;
722 new_list
.next
= NULL
;
725 old_ops
= VUSE_OPS (stmt
);
728 while (old_ops
&& new_i
< VEC_length (tree
, build_vuses
))
730 act
= VEC_index (tree
, build_vuses
, new_i
);
731 new_base
= get_name_decl (act
);
732 old_base
= get_name_decl (VUSE_OP (old_ops
));
734 if (old_base
== new_base
)
736 /* if variables are the same, reuse this node. */
737 MOVE_HEAD_AFTER (old_ops
, last
);
738 set_virtual_use_link (VUSE_OP_PTR (last
), stmt
);
741 else if (old_base
< new_base
)
743 /* if old is less than new, old goes to the free list. */
744 delink_imm_use (USE_OP_PTR (old_ops
));
745 MOVE_HEAD_TO_FREELIST (old_ops
, vuse
);
749 /* This is a new operand. */
750 add_vuse_op (stmt
, act
, &last
);
755 /* If there is anything remaining in the build_vuses list, simply emit it. */
756 for ( ; new_i
< VEC_length (tree
, build_vuses
); new_i
++)
757 add_vuse_op (stmt
, VEC_index (tree
, build_vuses
, new_i
), &last
);
761 /* If there is anything in the old list, free it. */
764 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
765 delink_imm_use (VUSE_OP_PTR (ptr
));
766 old_ops
->next
= free_vuses
;
767 free_vuses
= old_ops
;
770 /* Now set the stmt's operands. */
771 VUSE_OPS (stmt
) = new_list
.next
;
773 #ifdef ENABLE_CHECKING
776 for (ptr
= VUSE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
779 gcc_assert (x
== VEC_length (tree
, build_vuses
));
784 /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
787 finalize_ssa_vuses (tree stmt
)
789 unsigned num
, num_v_may_defs
;
792 /* Remove superfluous VUSE operands. If the statement already has a
793 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
794 not needed because V_MAY_DEFs imply a VUSE of the variable. For
795 instance, suppose that variable 'a' is aliased:
798 # a_3 = V_MAY_DEF <a_2>
801 The VUSE <a_2> is superfluous because it is implied by the
802 V_MAY_DEF operation. */
803 num
= VEC_length (tree
, build_vuses
);
804 num_v_may_defs
= VEC_length (tree
, build_v_may_defs
);
806 if (num
> 0 && num_v_may_defs
> 0)
808 for (vuse_index
= 0; vuse_index
< VEC_length (tree
, build_vuses
); )
811 vuse
= VEC_index (tree
, build_vuses
, vuse_index
);
812 if (TREE_CODE (vuse
) != SSA_NAME
)
814 var_ann_t ann
= var_ann (vuse
);
815 ann
->in_vuse_list
= 0;
816 if (ann
->in_v_may_def_list
)
818 VEC_ordered_remove (tree
, build_vuses
, vuse_index
);
827 /* Clear out the in_list bits. */
829 vuse_index
< VEC_length (tree
, build_vuses
);
832 tree t
= VEC_index (tree
, build_vuses
, vuse_index
);
833 if (TREE_CODE (t
) != SSA_NAME
)
835 var_ann_t ann
= var_ann (t
);
836 ann
->in_vuse_list
= 0;
841 finalize_ssa_vuse_ops (stmt
);
843 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
844 cleanup_v_may_defs ();
846 /* Free the VUSEs build vector. */
847 VEC_truncate (tree
, build_vuses
, 0);
851 /* Takes elements from build_v_must_defs and turns them into mustdef operands of
855 finalize_ssa_v_must_def_ops (tree stmt
)
858 struct mustdef_optype_d new_list
;
859 mustdef_optype_p old_ops
, ptr
, last
;
861 unsigned old_base
, new_base
;
863 new_list
.next
= NULL
;
866 old_ops
= MUSTDEF_OPS (stmt
);
869 while (old_ops
&& new_i
< VEC_length (tree
, build_v_must_defs
))
871 act
= VEC_index (tree
, build_v_must_defs
, new_i
);
872 new_base
= get_name_decl (act
);
873 old_base
= get_name_decl (MUSTDEF_KILL (old_ops
));
875 if (old_base
== new_base
)
877 /* If variables are the same, reuse this node. */
878 MOVE_HEAD_AFTER (old_ops
, last
);
879 set_virtual_use_link (MUSTDEF_KILL_PTR (last
), stmt
);
882 else if (old_base
< new_base
)
884 /* If old is less than new, old goes to the free list. */
885 delink_imm_use (MUSTDEF_KILL_PTR (old_ops
));
886 MOVE_HEAD_TO_FREELIST (old_ops
, mustdef
);
890 /* This is a new operand. */
891 add_mustdef_op (stmt
, act
, &last
);
896 /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
897 for ( ; new_i
< VEC_length (tree
, build_v_must_defs
); new_i
++)
898 add_mustdef_op (stmt
, VEC_index (tree
, build_v_must_defs
, new_i
), &last
);
902 /* If there is anything in the old list, free it. */
905 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
906 delink_imm_use (MUSTDEF_KILL_PTR (ptr
));
907 old_ops
->next
= free_mustdefs
;
908 free_mustdefs
= old_ops
;
911 /* Now set the stmt's operands. */
912 MUSTDEF_OPS (stmt
) = new_list
.next
;
914 #ifdef ENABLE_CHECKING
917 for (ptr
= MUSTDEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
920 gcc_assert (x
== VEC_length (tree
, build_v_must_defs
));
926 finalize_ssa_v_must_defs (tree stmt
)
928 /* In the presence of subvars, there may be more than one V_MUST_DEF
929 per statement (one for each subvar). It is a bit expensive to
930 verify that all must-defs in a statement belong to subvars if
931 there is more than one must-def, so we don't do it. Suffice to
932 say, if you reach here without having subvars, and have num >1,
933 you have hit a bug. */
934 finalize_ssa_v_must_def_ops (stmt
);
935 VEC_truncate (tree
, build_v_must_defs
, 0);
939 /* Finalize all the build vectors, fill the new ones into INFO. */
942 finalize_ssa_stmt_operands (tree stmt
)
944 finalize_ssa_defs (stmt
);
945 finalize_ssa_uses (stmt
);
946 finalize_ssa_v_must_defs (stmt
);
947 finalize_ssa_v_may_defs (stmt
);
948 finalize_ssa_vuses (stmt
);
952 /* Start the process of building up operands vectors in INFO. */
955 start_ssa_stmt_operands (void)
957 gcc_assert (VEC_length (tree
, build_defs
) == 0);
958 gcc_assert (VEC_length (tree
, build_uses
) == 0);
959 gcc_assert (VEC_length (tree
, build_vuses
) == 0);
960 gcc_assert (VEC_length (tree
, build_v_may_defs
) == 0);
961 gcc_assert (VEC_length (tree
, build_v_must_defs
) == 0);
965 /* Add DEF_P to the list of pointers to operands. */
968 append_def (tree
*def_p
)
970 VEC_safe_push (tree
, heap
, build_defs
, (tree
)def_p
);
974 /* Add USE_P to the list of pointers to operands. */
977 append_use (tree
*use_p
)
979 VEC_safe_push (tree
, heap
, build_uses
, (tree
)use_p
);
983 /* Add a new virtual may def for variable VAR to the build array. */
986 append_v_may_def (tree var
)
988 if (TREE_CODE (var
) != SSA_NAME
)
990 var_ann_t ann
= get_var_ann (var
);
992 /* Don't allow duplicate entries. */
993 if (ann
->in_v_may_def_list
)
995 ann
->in_v_may_def_list
= 1;
998 VEC_safe_push (tree
, heap
, build_v_may_defs
, (tree
)var
);
1002 /* Add VAR to the list of virtual uses. */
1005 append_vuse (tree var
)
1007 /* Don't allow duplicate entries. */
1008 if (TREE_CODE (var
) != SSA_NAME
)
1010 var_ann_t ann
= get_var_ann (var
);
1012 if (ann
->in_vuse_list
|| ann
->in_v_may_def_list
)
1014 ann
->in_vuse_list
= 1;
1017 VEC_safe_push (tree
, heap
, build_vuses
, (tree
)var
);
1021 /* Add VAR to the list of virtual must definitions for INFO. */
1024 append_v_must_def (tree var
)
1028 /* Don't allow duplicate entries. */
1029 for (i
= 0; i
< VEC_length (tree
, build_v_must_defs
); i
++)
1030 if (var
== VEC_index (tree
, build_v_must_defs
, i
))
1033 VEC_safe_push (tree
, heap
, build_v_must_defs
, (tree
)var
);
1037 /* REF is a tree that contains the entire pointer dereference
1038 expression, if available, or NULL otherwise. ALIAS is the variable
1039 we are asking if REF can access. OFFSET and SIZE come from the
1040 memory access expression that generated this virtual operand. */
1043 access_can_touch_variable (tree ref
, tree alias
, HOST_WIDE_INT offset
,
1046 bool offsetgtz
= offset
> 0;
1047 unsigned HOST_WIDE_INT uoffset
= (unsigned HOST_WIDE_INT
) offset
;
1048 tree base
= ref
? get_base_address (ref
) : NULL
;
1050 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1051 using a call-clobbered memory tag. By definition, call-clobbered
1052 memory tags can always touch .GLOBAL_VAR. */
1053 if (alias
== global_var
)
1056 /* If ALIAS is an SFT, it can't be touched if the offset
1057 and size of the access is not overlapping with the SFT offset and
1058 size. This is only true if we are accessing through a pointer
1059 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1060 be accessing through a pointer to some substruct of the
1061 structure, and if we try to prune there, we will have the wrong
1062 offset, and get the wrong answer.
1063 i.e., we can't prune without more work if we have something like
1069 const char *byte_op;
1077 foo = &targetm.asm_out.aligned_op;
1080 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1081 terms of SFT_PARENT_VAR, that is where it is.
1082 However, the access through the foo pointer will be at offset 0. */
1084 && TREE_CODE (alias
) == STRUCT_FIELD_TAG
1086 && TREE_TYPE (base
) == TREE_TYPE (SFT_PARENT_VAR (alias
))
1087 && !overlap_subvar (offset
, size
, alias
, NULL
))
1089 #ifdef ACCESS_DEBUGGING
1090 fprintf (stderr
, "Access to ");
1091 print_generic_expr (stderr
, ref
, 0);
1092 fprintf (stderr
, " may not touch ");
1093 print_generic_expr (stderr
, alias
, 0);
1094 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1099 /* Without strict aliasing, it is impossible for a component access
1100 through a pointer to touch a random variable, unless that
1101 variable *is* a structure or a pointer.
1103 That is, given p->c, and some random global variable b,
1104 there is no legal way that p->c could be an access to b.
1106 Without strict aliasing on, we consider it legal to do something
1109 struct foos { int l; };
1111 static struct foos *getfoo(void);
1114 struct foos *f = getfoo();
1121 static struct foos *getfoo(void)
1122 { return (struct foos *)&foo; }
1124 (taken from 20000623-1.c)
1126 The docs also say/imply that access through union pointers
1127 is legal (but *not* if you take the address of the union member,
1128 i.e. the inverse), such that you can do
1138 U *pretmp = (U*)&rv;
1142 To implement this, we just punt on accesses through union
1146 && flag_strict_aliasing
1147 && TREE_CODE (ref
) != INDIRECT_REF
1149 && (TREE_CODE (base
) != INDIRECT_REF
1150 || TREE_CODE (TREE_TYPE (base
)) != UNION_TYPE
)
1151 && !AGGREGATE_TYPE_P (TREE_TYPE (alias
))
1152 && TREE_CODE (TREE_TYPE (alias
)) != COMPLEX_TYPE
1153 && !POINTER_TYPE_P (TREE_TYPE (alias
))
1154 /* When the struct has may_alias attached to it, we need not to
1156 && get_alias_set (base
))
1158 #ifdef ACCESS_DEBUGGING
1159 fprintf (stderr
, "Access to ");
1160 print_generic_expr (stderr
, ref
, 0);
1161 fprintf (stderr
, " may not touch ");
1162 print_generic_expr (stderr
, alias
, 0);
1163 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1168 /* If the offset of the access is greater than the size of one of
1169 the possible aliases, it can't be touching that alias, because it
1170 would be past the end of the structure. */
1172 && flag_strict_aliasing
1173 && TREE_CODE (ref
) != INDIRECT_REF
1175 && !POINTER_TYPE_P (TREE_TYPE (alias
))
1177 && DECL_SIZE (alias
)
1178 && TREE_CODE (DECL_SIZE (alias
)) == INTEGER_CST
1179 && uoffset
> TREE_INT_CST_LOW (DECL_SIZE (alias
)))
1181 #ifdef ACCESS_DEBUGGING
1182 fprintf (stderr
, "Access to ");
1183 print_generic_expr (stderr
, ref
, 0);
1184 fprintf (stderr
, " may not touch ");
1185 print_generic_expr (stderr
, alias
, 0);
1186 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1195 /* Add VAR to the virtual operands array. FLAGS is as in
1196 get_expr_operands. FULL_REF is a tree that contains the entire
1197 pointer dereference expression, if available, or NULL otherwise.
1198 OFFSET and SIZE come from the memory access expression that
1199 generated this virtual operand. FOR_CLOBBER is true is this is
1200 adding a virtual operand for a call clobber. */
1203 add_virtual_operand (tree var
, stmt_ann_t s_ann
, int flags
,
1204 tree full_ref
, HOST_WIDE_INT offset
,
1205 HOST_WIDE_INT size
, bool for_clobber
)
1207 VEC(tree
,gc
) *aliases
;
1211 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1212 v_ann
= var_ann (sym
);
1214 /* Mark statements with volatile operands. Optimizers should back
1215 off from statements having volatile operands. */
1216 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1217 s_ann
->has_volatile_ops
= true;
1219 /* If the variable cannot be modified and this is a V_MAY_DEF change
1220 it into a VUSE. This happens when read-only variables are marked
1221 call-clobbered and/or aliased to writable variables. So we only
1222 check that this only happens on non-specific stores.
1224 Note that if this is a specific store, i.e. associated with a
1225 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1226 into validation problems.
1228 This can happen when programs cast away const, leaving us with a
1229 store to read-only memory. If the statement is actually executed
1230 at runtime, then the program is ill formed. If the statement is
1231 not executed then all is well. At the very least, we cannot ICE. */
1232 if ((flags
& opf_non_specific
) && unmodifiable_var_p (var
))
1233 flags
&= ~(opf_is_def
| opf_kill_def
);
1235 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1236 virtual operands, unless the caller has specifically requested
1237 not to add virtual operands (used when adding operands inside an
1238 ADDR_EXPR expression). */
1239 if (flags
& opf_no_vops
)
1242 aliases
= v_ann
->may_aliases
;
1243 if (aliases
== NULL
)
1245 /* The variable is not aliased or it is an alias tag. */
1246 if (flags
& opf_is_def
)
1248 if (flags
& opf_kill_def
)
1250 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1251 variable definitions. */
1252 gcc_assert (!MTAG_P (var
)
1253 || TREE_CODE (var
) == STRUCT_FIELD_TAG
);
1254 append_v_must_def (var
);
1258 /* Add a V_MAY_DEF for call-clobbered variables and
1260 append_v_may_def (var
);
1271 /* The variable is aliased. Add its aliases to the virtual
1273 gcc_assert (VEC_length (tree
, aliases
) != 0);
1275 if (flags
& opf_is_def
)
1278 bool none_added
= true;
1280 for (i
= 0; VEC_iterate (tree
, aliases
, i
, al
); i
++)
1282 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1286 append_v_may_def (al
);
1289 /* If the variable is also an alias tag, add a virtual
1290 operand for it, otherwise we will miss representing
1291 references to the members of the variable's alias set.
1292 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1294 It is also necessary to add bare defs on clobbers for
1295 SMT's, so that bare SMT uses caused by pruning all the
1296 aliases will link up properly with calls. In order to
1297 keep the number of these bare defs we add down to the
1298 minimum necessary, we keep track of which SMT's were used
1299 alone in statement vdefs or VUSEs. */
1300 if (v_ann
->is_aliased
1302 || (TREE_CODE (var
) == SYMBOL_MEMORY_TAG
1304 && SMT_USED_ALONE (var
)))
1306 /* Every bare SMT def we add should have SMT_USED_ALONE
1307 set on it, or else we will get the wrong answer on
1310 && !updating_used_alone
&& aliases_computed_p
1311 && TREE_CODE (var
) == SYMBOL_MEMORY_TAG
)
1312 gcc_assert (SMT_USED_ALONE (var
));
1314 append_v_may_def (var
);
1319 bool none_added
= true;
1320 for (i
= 0; VEC_iterate (tree
, aliases
, i
, al
); i
++)
1322 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1328 /* Similarly, append a virtual uses for VAR itself, when
1329 it is an alias tag. */
1330 if (v_ann
->is_aliased
|| none_added
)
1337 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1338 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1339 the statement's real operands, otherwise it is added to virtual
1343 add_stmt_operand (tree
*var_p
, stmt_ann_t s_ann
, int flags
)
1350 gcc_assert (SSA_VAR_P (var
));
1352 is_real_op
= is_gimple_reg (var
);
1354 /* If this is a real operand, the operand is either an SSA name or a
1355 decl. Virtual operands may only be decls. */
1356 gcc_assert (is_real_op
|| DECL_P (var
));
1358 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1359 v_ann
= var_ann (sym
);
1361 /* Mark statements with volatile operands. Optimizers should back
1362 off from statements having volatile operands. */
1363 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1364 s_ann
->has_volatile_ops
= true;
1368 /* The variable is a GIMPLE register. Add it to real operands. */
1369 if (flags
& opf_is_def
)
1375 add_virtual_operand (var
, s_ann
, flags
, NULL_TREE
, 0, -1, false);
1379 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1380 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1382 STMT is the statement being processed, EXPR is the INDIRECT_REF
1385 FLAGS is as in get_expr_operands.
1387 FULL_REF contains the full pointer dereference expression, if we
1388 have it, or NULL otherwise.
1390 OFFSET and SIZE are the location of the access inside the
1391 dereferenced pointer, if known.
1393 RECURSE_ON_BASE should be set to true if we want to continue
1394 calling get_expr_operands on the base pointer, and false if
1395 something else will do it for us. */
1398 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
,
1400 HOST_WIDE_INT offset
, HOST_WIDE_INT size
,
1401 bool recurse_on_base
)
1403 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1405 stmt_ann_t s_ann
= stmt_ann (stmt
);
1407 /* Stores into INDIRECT_REF operands are never killing definitions. */
1408 flags
&= ~opf_kill_def
;
1410 if (SSA_VAR_P (ptr
))
1412 struct ptr_info_def
*pi
= NULL
;
1414 /* If PTR has flow-sensitive points-to information, use it. */
1415 if (TREE_CODE (ptr
) == SSA_NAME
1416 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1417 && pi
->name_mem_tag
)
1419 /* PTR has its own memory tag. Use it. */
1420 add_virtual_operand (pi
->name_mem_tag
, s_ann
, flags
,
1421 full_ref
, offset
, size
, false);
1425 /* If PTR is not an SSA_NAME or it doesn't have a name
1426 tag, use its symbol memory tag. */
1429 /* If we are emitting debugging dumps, display a warning if
1430 PTR is an SSA_NAME with no flow-sensitive alias
1431 information. That means that we may need to compute
1434 && TREE_CODE (ptr
) == SSA_NAME
1438 "NOTE: no flow-sensitive alias info for ");
1439 print_generic_expr (dump_file
, ptr
, dump_flags
);
1440 fprintf (dump_file
, " in ");
1441 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1444 if (TREE_CODE (ptr
) == SSA_NAME
)
1445 ptr
= SSA_NAME_VAR (ptr
);
1446 v_ann
= var_ann (ptr
);
1448 if (v_ann
->symbol_mem_tag
)
1449 add_virtual_operand (v_ann
->symbol_mem_tag
, s_ann
, flags
,
1450 full_ref
, offset
, size
, false);
1453 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1455 /* If a constant is used as a pointer, we can't generate a real
1456 operand for it but we mark the statement volatile to prevent
1457 optimizations from messing things up. */
1459 s_ann
->has_volatile_ops
= true;
1464 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1468 /* If requested, add a USE operand for the base pointer. */
1469 if (recurse_on_base
)
1470 get_expr_operands (stmt
, pptr
, opf_none
);
1474 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1477 get_tmr_operands (tree stmt
, tree expr
, int flags
)
1479 tree tag
= TMR_TAG (expr
), ref
;
1480 HOST_WIDE_INT offset
, size
, maxsize
;
1482 stmt_ann_t s_ann
= stmt_ann (stmt
);
1484 /* First record the real operands. */
1485 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_none
);
1486 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_none
);
1488 /* MEM_REFs should never be killing. */
1489 flags
&= ~opf_kill_def
;
1491 if (TMR_SYMBOL (expr
))
1493 stmt_ann_t ann
= stmt_ann (stmt
);
1494 add_to_addressable_set (TMR_SYMBOL (expr
), &ann
->addresses_taken
);
1499 /* Something weird, so ensure that we will be careful. */
1500 stmt_ann (stmt
)->has_volatile_ops
= true;
1506 get_expr_operands (stmt
, &tag
, flags
);
1510 ref
= get_ref_base_and_extent (tag
, &offset
, &size
, &maxsize
);
1511 gcc_assert (ref
!= NULL_TREE
);
1512 svars
= get_subvars_for_var (ref
);
1513 for (sv
= svars
; sv
; sv
= sv
->next
)
1516 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
1518 int subvar_flags
= flags
;
1519 if (!exact
|| size
!= maxsize
)
1520 subvar_flags
&= ~opf_kill_def
;
1521 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
1527 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1528 clobbered variables in the function. */
1531 add_call_clobber_ops (tree stmt
, tree callee
)
1535 stmt_ann_t s_ann
= stmt_ann (stmt
);
1536 bitmap not_read_b
, not_written_b
;
1538 /* Functions that are not const, pure or never return may clobber
1539 call-clobbered variables. */
1541 s_ann
->makes_clobbering_call
= true;
1543 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1544 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1547 add_stmt_operand (&global_var
, s_ann
, opf_is_def
);
1551 /* Get info for local and module level statics. There is a bit
1552 set for each static if the call being processed does not read
1553 or write that variable. */
1554 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1555 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1556 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1557 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, u
, bi
)
1559 tree var
= referenced_var_lookup (u
);
1560 unsigned int escape_mask
= var_ann (var
)->escape_mask
;
1561 tree real_var
= var
;
1565 /* Not read and not written are computed on regular vars, not
1566 subvars, so look at the parent var if this is an SFT. */
1567 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1568 real_var
= SFT_PARENT_VAR (var
);
1570 not_read
= not_read_b
? bitmap_bit_p (not_read_b
,
1571 DECL_UID (real_var
)) : false;
1572 not_written
= not_written_b
? bitmap_bit_p (not_written_b
,
1573 DECL_UID (real_var
)) : false;
1574 gcc_assert (!unmodifiable_var_p (var
));
1576 clobber_stats
.clobbered_vars
++;
1578 /* See if this variable is really clobbered by this function. */
1580 /* Trivial case: Things escaping only to pure/const are not
1581 clobbered by non-pure-const, and only read by pure/const. */
1582 if ((escape_mask
& ~(ESCAPE_TO_PURE_CONST
)) == 0)
1584 tree call
= get_call_expr_in (stmt
);
1585 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1587 add_stmt_operand (&var
, s_ann
, opf_none
);
1588 clobber_stats
.unescapable_clobbers_avoided
++;
1593 clobber_stats
.unescapable_clobbers_avoided
++;
1600 clobber_stats
.static_write_clobbers_avoided
++;
1602 add_stmt_operand (&var
, s_ann
, opf_none
);
1604 clobber_stats
.static_read_clobbers_avoided
++;
1607 add_virtual_operand (var
, s_ann
, opf_is_def
, NULL
, 0, -1, true);
1612 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1616 add_call_read_ops (tree stmt
, tree callee
)
1620 stmt_ann_t s_ann
= stmt_ann (stmt
);
1623 /* if the function is not pure, it may reference memory. Add
1624 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1625 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1628 add_stmt_operand (&global_var
, s_ann
, opf_none
);
1632 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1634 /* Add a VUSE for each call-clobbered variable. */
1635 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, u
, bi
)
1637 tree var
= referenced_var (u
);
1638 tree real_var
= var
;
1641 clobber_stats
.readonly_clobbers
++;
1643 /* Not read and not written are computed on regular vars, not
1644 subvars, so look at the parent var if this is an SFT. */
1646 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1647 real_var
= SFT_PARENT_VAR (var
);
1649 not_read
= not_read_b
? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1654 clobber_stats
.static_readonly_clobbers_avoided
++;
1658 add_stmt_operand (&var
, s_ann
, opf_none
| opf_non_specific
);
1663 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1666 get_call_expr_operands (tree stmt
, tree expr
)
1669 int call_flags
= call_expr_flags (expr
);
1671 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1672 operands for all the symbols that have been found to be
1675 Note that if aliases have not been computed, the global effects
1676 of calls will not be included in the SSA web. This is fine
1677 because no optimizer should run before aliases have been
1678 computed. By not bothering with virtual operands for CALL_EXPRs
1679 we avoid adding superfluous virtual operands, which can be a
1680 significant compile time sink (See PR 15855). */
1681 if (aliases_computed_p
1682 && !bitmap_empty_p (call_clobbered_vars
)
1683 && !(call_flags
& ECF_NOVOPS
))
1685 /* A 'pure' or a 'const' function never call-clobbers anything.
1686 A 'noreturn' function might, but since we don't return anyway
1687 there is no point in recording that. */
1688 if (TREE_SIDE_EFFECTS (expr
)
1689 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1690 add_call_clobber_ops (stmt
, get_callee_fndecl (expr
));
1691 else if (!(call_flags
& ECF_CONST
))
1692 add_call_read_ops (stmt
, get_callee_fndecl (expr
));
1695 /* Find uses in the called function. */
1696 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1698 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1699 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_none
);
1701 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1705 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1708 get_asm_expr_operands (tree stmt
)
1710 stmt_ann_t s_ann
= stmt_ann (stmt
);
1711 int noutputs
= list_length (ASM_OUTPUTS (stmt
));
1712 const char **oconstraints
1713 = (const char **) alloca ((noutputs
) * sizeof (const char *));
1716 const char *constraint
;
1717 bool allows_mem
, allows_reg
, is_inout
;
1719 for (i
=0, link
= ASM_OUTPUTS (stmt
); link
; ++i
, link
= TREE_CHAIN (link
))
1721 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1722 oconstraints
[i
] = constraint
;
1723 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
1724 &allows_reg
, &is_inout
);
1726 /* This should have been split in gimplify_asm_expr. */
1727 gcc_assert (!allows_reg
|| !is_inout
);
1729 /* Memory operands are addressable. Note that STMT needs the
1730 address of this operand. */
1731 if (!allows_reg
&& allows_mem
)
1733 tree t
= get_base_address (TREE_VALUE (link
));
1734 if (t
&& DECL_P (t
) && s_ann
)
1735 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1738 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_is_def
);
1741 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1743 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1744 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1745 oconstraints
, &allows_mem
, &allows_reg
);
1747 /* Memory operands are addressable. Note that STMT needs the
1748 address of this operand. */
1749 if (!allows_reg
&& allows_mem
)
1751 tree t
= get_base_address (TREE_VALUE (link
));
1752 if (t
&& DECL_P (t
) && s_ann
)
1753 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1756 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1760 /* Clobber memory for asm ("" : : : "memory"); */
1761 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1762 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1767 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1768 decided to group them). */
1770 add_stmt_operand (&global_var
, s_ann
, opf_is_def
);
1772 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
, bi
)
1774 tree var
= referenced_var (i
);
1775 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1778 /* Now clobber all addressables. */
1779 EXECUTE_IF_SET_IN_BITMAP (addressable_vars
, 0, i
, bi
)
1781 tree var
= referenced_var (i
);
1783 /* Subvars are explicitly represented in this list, so
1784 we don't need the original to be added to the clobber
1785 ops, but the original *will* be in this list because
1786 we keep the addressability of the original
1787 variable up-to-date so we don't screw up the rest of
1789 if (var_can_have_subvars (var
)
1790 && get_subvars_for_var (var
) != NULL
)
1793 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1801 /* Scan operands for the assignment expression EXPR in statement STMT. */
1804 get_modify_expr_operands (tree stmt
, tree expr
)
1806 /* First get operands from the RHS. */
1807 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1809 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1810 registers. If the LHS is a store to memory, we will either need
1811 a preserving definition (V_MAY_DEF) or a killing definition
1814 Preserving definitions are those that modify a part of an
1815 aggregate object for which no subvars have been computed (or the
1816 reference does not correspond exactly to one of them). Stores
1817 through a pointer are also represented with V_MAY_DEF operators.
1819 The determination of whether to use a preserving or a killing
1820 definition is done while scanning the LHS of the assignment. By
1821 default, assume that we will emit a V_MUST_DEF. */
1822 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_is_def
|opf_kill_def
);
1826 /* Recursively scan the expression pointed to by EXPR_P in statement
1827 STMT. FLAGS is one of the OPF_* constants modifying how to
1828 interpret the operands found. */
1831 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
1833 enum tree_code code
;
1834 enum tree_code_class
class;
1835 tree expr
= *expr_p
;
1836 stmt_ann_t s_ann
= stmt_ann (stmt
);
1841 code
= TREE_CODE (expr
);
1842 class = TREE_CODE_CLASS (code
);
1847 /* Taking the address of a variable does not represent a
1848 reference to it, but the fact that the statement takes its
1849 address will be of interest to some passes (e.g. alias
1851 add_to_addressable_set (TREE_OPERAND (expr
, 0), &s_ann
->addresses_taken
);
1853 /* If the address is invariant, there may be no interesting
1854 variable references inside. */
1855 if (is_gimple_min_invariant (expr
))
1858 /* Otherwise, there may be variables referenced inside but there
1859 should be no VUSEs created, since the referenced objects are
1860 not really accessed. The only operands that we should find
1861 here are ARRAY_REF indices which will always be real operands
1862 (GIMPLE does not allow non-registers as array indices). */
1863 flags
|= opf_no_vops
;
1864 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1868 case STRUCT_FIELD_TAG
:
1869 case SYMBOL_MEMORY_TAG
:
1870 case NAME_MEMORY_TAG
:
1871 add_stmt_operand (expr_p
, s_ann
, flags
);
1880 /* Add the subvars for a variable, if it has subvars, to DEFS
1881 or USES. Otherwise, add the variable itself. Whether it
1882 goes to USES or DEFS depends on the operand flags. */
1883 if (var_can_have_subvars (expr
)
1884 && (svars
= get_subvars_for_var (expr
)))
1887 for (sv
= svars
; sv
; sv
= sv
->next
)
1888 add_stmt_operand (&sv
->var
, s_ann
, flags
);
1891 add_stmt_operand (expr_p
, s_ann
, flags
);
1896 case MISALIGNED_INDIRECT_REF
:
1897 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1900 case ALIGN_INDIRECT_REF
:
1902 get_indirect_ref_operands (stmt
, expr
, flags
, NULL_TREE
, 0, -1, true);
1905 case TARGET_MEM_REF
:
1906 get_tmr_operands (stmt
, expr
, flags
);
1910 case ARRAY_RANGE_REF
:
1916 HOST_WIDE_INT offset
, size
, maxsize
;
1919 /* This component reference becomes an access to all of the
1920 subvariables it can touch, if we can determine that, but
1921 *NOT* the real one. If we can't determine which fields we
1922 could touch, the recursion will eventually get to a
1923 variable and add *all* of its subvars, or whatever is the
1924 minimum correct subset. */
1925 ref
= get_ref_base_and_extent (expr
, &offset
, &size
, &maxsize
);
1926 if (SSA_VAR_P (ref
) && get_subvars_for_var (ref
))
1929 subvar_t svars
= get_subvars_for_var (ref
);
1931 for (sv
= svars
; sv
; sv
= sv
->next
)
1935 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
1937 int subvar_flags
= flags
;
1939 if (!exact
|| size
!= maxsize
)
1940 subvar_flags
&= ~opf_kill_def
;
1941 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
1946 flags
|= opf_no_vops
;
1948 else if (TREE_CODE (ref
) == INDIRECT_REF
)
1950 get_indirect_ref_operands (stmt
, ref
, flags
, expr
, offset
,
1952 flags
|= opf_no_vops
;
1955 /* Even if we found subvars above we need to ensure to see
1956 immediate uses for d in s.a[d]. In case of s.a having
1957 a subvar or we would miss it otherwise. */
1958 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0),
1959 flags
& ~opf_kill_def
);
1961 if (code
== COMPONENT_REF
)
1963 if (s_ann
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
1964 s_ann
->has_volatile_ops
= true;
1965 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1967 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
1969 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1970 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1971 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_none
);
1977 case WITH_SIZE_EXPR
:
1978 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1979 and an rvalue reference to its second argument. */
1980 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1981 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1985 get_call_expr_operands (stmt
, expr
);
1990 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1991 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1992 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1996 get_modify_expr_operands (stmt
, expr
);
2001 /* General aggregate CONSTRUCTORs have been decomposed, but they
2002 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2003 constructor_elt
*ce
;
2004 unsigned HOST_WIDE_INT idx
;
2007 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
2009 get_expr_operands (stmt
, &ce
->value
, opf_none
);
2015 /* Stores using BIT_FIELD_REF are always preserving definitions. */
2016 flags
&= ~opf_kill_def
;
2020 case TRUTH_NOT_EXPR
:
2021 case VIEW_CONVERT_EXPR
:
2023 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2026 case TRUTH_AND_EXPR
:
2028 case TRUTH_XOR_EXPR
:
2034 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2035 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2040 case REALIGN_LOAD_EXPR
:
2042 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2043 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2044 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
2063 /* Expressions that make no memory references. */
2067 if (class == tcc_unary
)
2069 if (class == tcc_binary
|| class == tcc_comparison
)
2071 if (class == tcc_constant
|| class == tcc_type
)
2075 /* If we get here, something has gone wrong. */
2076 #ifdef ENABLE_CHECKING
2077 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
2079 fputs ("\n", stderr
);
2085 /* Parse STMT looking for operands. When finished, the various
2086 build_* operand vectors will have potential operands in them. */
2089 parse_ssa_operands (tree stmt
)
2091 enum tree_code code
;
2093 code
= TREE_CODE (stmt
);
2097 get_modify_expr_operands (stmt
, stmt
);
2101 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_none
);
2105 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_none
);
2109 get_asm_expr_operands (stmt
);
2113 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_none
);
2117 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_none
);
2121 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_none
);
2125 case CASE_LABEL_EXPR
:
2126 case TRY_CATCH_EXPR
:
2127 case TRY_FINALLY_EXPR
:
2128 case EH_FILTER_EXPR
:
2131 /* These nodes contain no variable references. */
2135 /* Notice that if get_expr_operands tries to use &STMT as the
2136 operand pointer (which may only happen for USE operands), we
2137 will fail in add_stmt_operand. This default will handle
2138 statements like empty statements, or CALL_EXPRs that may
2139 appear on the RHS of a statement or as statements themselves. */
2140 get_expr_operands (stmt
, &stmt
, opf_none
);
2146 /* Create an operands cache for STMT. */
2149 build_ssa_operands (tree stmt
)
2151 stmt_ann_t ann
= get_stmt_ann (stmt
);
2153 /* Initially assume that the statement has no volatile operands. */
2155 ann
->has_volatile_ops
= false;
2157 start_ssa_stmt_operands ();
2159 parse_ssa_operands (stmt
);
2160 operand_build_sort_virtual (build_vuses
);
2161 operand_build_sort_virtual (build_v_may_defs
);
2162 operand_build_sort_virtual (build_v_must_defs
);
2164 finalize_ssa_stmt_operands (stmt
);
2168 /* Free any operands vectors in OPS. */
2171 free_ssa_operands (stmt_operands_p ops
)
2173 ops
->def_ops
= NULL
;
2174 ops
->use_ops
= NULL
;
2175 ops
->maydef_ops
= NULL
;
2176 ops
->mustdef_ops
= NULL
;
2177 ops
->vuse_ops
= NULL
;
2181 /* Get the operands of statement STMT. */
2184 update_stmt_operands (tree stmt
)
2186 stmt_ann_t ann
= get_stmt_ann (stmt
);
2188 /* If update_stmt_operands is called before SSA is initialized, do
2190 if (!ssa_operands_active ())
2193 /* The optimizers cannot handle statements that are nothing but a
2194 _DECL. This indicates a bug in the gimplifier. */
2195 gcc_assert (!SSA_VAR_P (stmt
));
2197 gcc_assert (ann
->modified
);
2199 timevar_push (TV_TREE_OPS
);
2201 build_ssa_operands (stmt
);
2203 /* Clear the modified bit for STMT. */
2206 timevar_pop (TV_TREE_OPS
);
2210 /* Copies virtual operands from SRC to DST. */
2213 copy_virtual_operands (tree dest
, tree src
)
2216 ssa_op_iter iter
, old_iter
;
2217 use_operand_p use_p
, u2
;
2218 def_operand_p def_p
, d2
;
2220 build_ssa_operands (dest
);
2222 /* Copy all the virtual fields. */
2223 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VUSE
)
2225 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMAYDEF
)
2226 append_v_may_def (t
);
2227 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMUSTDEF
)
2228 append_v_must_def (t
);
2230 if (VEC_length (tree
, build_vuses
) == 0
2231 && VEC_length (tree
, build_v_may_defs
) == 0
2232 && VEC_length (tree
, build_v_must_defs
) == 0)
2235 /* Now commit the virtual operands to this stmt. */
2236 finalize_ssa_v_must_defs (dest
);
2237 finalize_ssa_v_may_defs (dest
);
2238 finalize_ssa_vuses (dest
);
2240 /* Finally, set the field to the same values as then originals. */
2241 t
= op_iter_init_tree (&old_iter
, src
, SSA_OP_VUSE
);
2242 FOR_EACH_SSA_USE_OPERAND (use_p
, dest
, iter
, SSA_OP_VUSE
)
2244 gcc_assert (!op_iter_done (&old_iter
));
2246 t
= op_iter_next_tree (&old_iter
);
2248 gcc_assert (op_iter_done (&old_iter
));
2250 op_iter_init_maydef (&old_iter
, src
, &u2
, &d2
);
2251 FOR_EACH_SSA_MAYDEF_OPERAND (def_p
, use_p
, dest
, iter
)
2253 gcc_assert (!op_iter_done (&old_iter
));
2254 SET_USE (use_p
, USE_FROM_PTR (u2
));
2255 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
2256 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
2258 gcc_assert (op_iter_done (&old_iter
));
2260 op_iter_init_mustdef (&old_iter
, src
, &u2
, &d2
);
2261 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p
, use_p
, dest
, iter
)
2263 gcc_assert (!op_iter_done (&old_iter
));
2264 SET_USE (use_p
, USE_FROM_PTR (u2
));
2265 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
2266 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
2268 gcc_assert (op_iter_done (&old_iter
));
2273 /* Specifically for use in DOM's expression analysis. Given a store, we
2274 create an artificial stmt which looks like a load from the store, this can
2275 be used to eliminate redundant loads. OLD_OPS are the operands from the
2276 store stmt, and NEW_STMT is the new load which represents a load of the
2280 create_ssa_artficial_load_stmt (tree new_stmt
, tree old_stmt
)
2285 use_operand_p use_p
;
2288 ann
= get_stmt_ann (new_stmt
);
2290 /* Process the stmt looking for operands. */
2291 start_ssa_stmt_operands ();
2292 parse_ssa_operands (new_stmt
);
2294 for (x
= 0; x
< VEC_length (tree
, build_vuses
); x
++)
2296 tree t
= VEC_index (tree
, build_vuses
, x
);
2297 if (TREE_CODE (t
) != SSA_NAME
)
2299 var_ann_t ann
= var_ann (t
);
2300 ann
->in_vuse_list
= 0;
2304 for (x
= 0; x
< VEC_length (tree
, build_v_may_defs
); x
++)
2306 tree t
= VEC_index (tree
, build_v_may_defs
, x
);
2307 if (TREE_CODE (t
) != SSA_NAME
)
2309 var_ann_t ann
= var_ann (t
);
2310 ann
->in_v_may_def_list
= 0;
2314 /* Remove any virtual operands that were found. */
2315 VEC_truncate (tree
, build_v_may_defs
, 0);
2316 VEC_truncate (tree
, build_v_must_defs
, 0);
2317 VEC_truncate (tree
, build_vuses
, 0);
2319 /* For each VDEF on the original statement, we want to create a
2320 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2322 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
,
2323 (SSA_OP_VMAYDEF
| SSA_OP_VMUSTDEF
))
2326 /* Now build the operands for this new stmt. */
2327 finalize_ssa_stmt_operands (new_stmt
);
2329 /* All uses in this fake stmt must not be in the immediate use lists. */
2330 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
2331 delink_imm_use (use_p
);
2335 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2336 to test the validity of the swap operation. */
2339 swap_tree_operands (tree stmt
, tree
*exp0
, tree
*exp1
)
2345 /* If the operand cache is active, attempt to preserve the relative
2346 positions of these two operands in their respective immediate use
2348 if (ssa_operands_active () && op0
!= op1
)
2350 use_optype_p use0
, use1
, ptr
;
2353 /* Find the 2 operands in the cache, if they are there. */
2354 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2355 if (USE_OP_PTR (ptr
)->use
== exp0
)
2361 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2362 if (USE_OP_PTR (ptr
)->use
== exp1
)
2368 /* If both uses don't have operand entries, there isn't much we can do
2369 at this point. Presumably we don't need to worry about it. */
2372 tree
*tmp
= USE_OP_PTR (use1
)->use
;
2373 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
2374 USE_OP_PTR (use0
)->use
= tmp
;
2378 /* Now swap the data. */
2384 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2385 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2386 a single variable whose address has been taken or any other valid
2387 GIMPLE memory reference (structure reference, array, etc). If the
2388 base address of REF is a decl that has sub-variables, also add all
2389 of its sub-variables. */
2392 add_to_addressable_set (tree ref
, bitmap
*addresses_taken
)
2397 gcc_assert (addresses_taken
);
2399 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2400 as the only thing we take the address of. If VAR is a structure,
2401 taking the address of a field means that the whole structure may
2402 be referenced using pointer arithmetic. See PR 21407 and the
2403 ensuing mailing list discussion. */
2404 var
= get_base_address (ref
);
2405 if (var
&& SSA_VAR_P (var
))
2407 if (*addresses_taken
== NULL
)
2408 *addresses_taken
= BITMAP_GGC_ALLOC ();
2410 if (var_can_have_subvars (var
)
2411 && (svars
= get_subvars_for_var (var
)))
2414 for (sv
= svars
; sv
; sv
= sv
->next
)
2416 bitmap_set_bit (*addresses_taken
, DECL_UID (sv
->var
));
2417 TREE_ADDRESSABLE (sv
->var
) = 1;
2422 bitmap_set_bit (*addresses_taken
, DECL_UID (var
));
2423 TREE_ADDRESSABLE (var
) = 1;
2429 /* Scan the immediate_use list for VAR making sure its linked properly.
2430 Return TRUE if there is a problem and emit an error message to F. */
2433 verify_imm_links (FILE *f
, tree var
)
2435 use_operand_p ptr
, prev
, list
;
2438 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
2440 list
= &(SSA_NAME_IMM_USE_NODE (var
));
2441 gcc_assert (list
->use
== NULL
);
2443 if (list
->prev
== NULL
)
2445 gcc_assert (list
->next
== NULL
);
2451 for (ptr
= list
->next
; ptr
!= list
; )
2453 if (prev
!= ptr
->prev
)
2456 if (ptr
->use
== NULL
)
2457 goto error
; /* 2 roots, or SAFE guard node. */
2458 else if (*(ptr
->use
) != var
)
2464 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2466 if (count
++ > 50000000)
2470 /* Verify list in the other direction. */
2472 for (ptr
= list
->prev
; ptr
!= list
; )
2474 if (prev
!= ptr
->next
)
2488 if (ptr
->stmt
&& stmt_modified_p (ptr
->stmt
))
2490 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->stmt
);
2491 print_generic_stmt (f
, ptr
->stmt
, TDF_SLIM
);
2493 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
2495 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
2501 /* Dump all the immediate uses to FILE. */
2504 dump_immediate_uses_for (FILE *file
, tree var
)
2506 imm_use_iterator iter
;
2507 use_operand_p use_p
;
2509 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2511 print_generic_expr (file
, var
, TDF_SLIM
);
2512 fprintf (file
, " : -->");
2513 if (has_zero_uses (var
))
2514 fprintf (file
, " no uses.\n");
2516 if (has_single_use (var
))
2517 fprintf (file
, " single use.\n");
2519 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2521 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2523 if (use_p
->stmt
== NULL
&& use_p
->use
== NULL
)
2524 fprintf (file
, "***end of stmt iterator marker***\n");
2526 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2527 print_generic_stmt (file
, USE_STMT (use_p
), TDF_VOPS
);
2529 print_generic_stmt (file
, USE_STMT (use_p
), TDF_SLIM
);
2531 fprintf(file
, "\n");
2535 /* Dump all the immediate uses to FILE. */
2538 dump_immediate_uses (FILE *file
)
2543 fprintf (file
, "Immediate_uses: \n\n");
2544 for (x
= 1; x
< num_ssa_names
; x
++)
2549 dump_immediate_uses_for (file
, var
);
2554 /* Dump def-use edges on stderr. */
2557 debug_immediate_uses (void)
2559 dump_immediate_uses (stderr
);
2563 /* Dump def-use edges on stderr. */
2566 debug_immediate_uses_for (tree var
)
2568 dump_immediate_uses_for (stderr
, var
);
2571 #include "gt-tree-ssa-operands.h"