2005-05-18 Daniel Berlin <dberlin@dberlin.org>
[official-gcc.git] / gcc / tree-ssa-operands.c
blobe43b6030726a7893d73f2431f3f1655a5788b705
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "errors.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
33 #include "ggc.h"
34 #include "timevar.h"
36 #include "langhooks.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
85 #define opf_none 0
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
102 /* This structure maintain a sorted list of operands which is created by
103 parse_ssa_operand. */
104 struct opbuild_list_d GTY (())
106 varray_type vars; /* The VAR_DECLS tree. */
107 varray_type uid; /* The sort value for virtual symbols. */
108 varray_type next; /* The next index in the sorted list. */
109 int first; /* First element in list. */
110 unsigned num; /* Number of elements. */
113 #define OPBUILD_LAST -1
116 /* Array for building all the def operands. */
117 static GTY (()) struct opbuild_list_d build_defs;
119 /* Array for building all the use operands. */
120 static GTY (()) struct opbuild_list_d build_uses;
122 /* Array for building all the v_may_def operands. */
123 static GTY (()) struct opbuild_list_d build_v_may_defs;
125 /* Array for building all the vuse operands. */
126 static GTY (()) struct opbuild_list_d build_vuses;
128 /* Array for building all the v_must_def operands. */
129 static GTY (()) struct opbuild_list_d build_v_must_defs;
131 /* True if the operands for call clobbered vars are cached and valid. */
132 bool ssa_call_clobbered_cache_valid;
133 bool ssa_ro_call_cache_valid;
135 /* These arrays are the cached operand vectors for call clobbered calls. */
136 static VEC(tree,heap) *clobbered_v_may_defs;
137 static VEC(tree,heap) *clobbered_vuses;
138 static VEC(tree,heap) *ro_call_vuses;
139 static bool clobbered_aliased_loads;
140 static bool clobbered_aliased_stores;
141 static bool ro_call_aliased_loads;
142 static bool ops_active = false;
144 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
145 static unsigned operand_memory_index;
147 static void note_addressable (tree, stmt_ann_t);
148 static void get_expr_operands (tree, tree *, int);
149 static void get_asm_expr_operands (tree);
150 static void get_indirect_ref_operands (tree, tree, int);
151 static void get_call_expr_operands (tree, tree);
152 static inline void append_def (tree *);
153 static inline void append_use (tree *);
154 static void append_v_may_def (tree);
155 static void append_v_must_def (tree);
156 static void add_call_clobber_ops (tree);
157 static void add_call_read_ops (tree);
158 static void add_stmt_operand (tree *, stmt_ann_t, int);
159 static void build_ssa_operands (tree stmt);
161 static def_optype_p free_defs = NULL;
162 static use_optype_p free_uses = NULL;
163 static vuse_optype_p free_vuses = NULL;
164 static maydef_optype_p free_maydefs = NULL;
165 static mustdef_optype_p free_mustdefs = NULL;
167 /* Initialize a virtual operand build LIST called NAME with NUM elements. */
169 static inline void
170 opbuild_initialize_virtual (struct opbuild_list_d *list, int num,
171 const char *name)
173 list->first = OPBUILD_LAST;
174 list->num = 0;
175 VARRAY_TREE_INIT (list->vars, num, name);
176 VARRAY_UINT_INIT (list->uid, num, "List UID");
177 VARRAY_INT_INIT (list->next, num, "List NEXT");
181 /* Initialize a real operand build LIST called NAME with NUM elements. */
183 static inline void
184 opbuild_initialize_real (struct opbuild_list_d *list, int num, const char *name)
186 list->first = OPBUILD_LAST;
187 list->num = 0;
188 VARRAY_TREE_PTR_INIT (list->vars, num, name);
189 VARRAY_INT_INIT (list->next, num, "List NEXT");
190 /* The UID field is not needed since we sort based on the pointer value. */
191 list->uid = NULL;
195 /* Free memory used in virtual operand build object LIST. */
197 static inline void
198 opbuild_free (struct opbuild_list_d *list)
200 list->vars = NULL;
201 list->uid = NULL;
202 list->next = NULL;
206 /* Number of elements in an opbuild list. */
208 static inline unsigned
209 opbuild_num_elems (struct opbuild_list_d *list)
211 return list->num;
215 /* Add VAR to the real operand list LIST, keeping it sorted and avoiding
216 duplicates. The actual sort value is the tree pointer value. */
218 static inline void
219 opbuild_append_real (struct opbuild_list_d *list, tree *var)
221 int index;
223 #ifdef ENABLE_CHECKING
224 /* Ensure the real operand doesn't exist already. */
225 for (index = list->first;
226 index != OPBUILD_LAST;
227 index = VARRAY_INT (list->next, index))
228 gcc_assert (VARRAY_TREE_PTR (list->vars, index) != var);
229 #endif
231 /* First item in the list. */
232 index = VARRAY_ACTIVE_SIZE (list->vars);
233 if (index == 0)
234 list->first = index;
235 else
236 VARRAY_INT (list->next, index - 1) = index;
237 VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
238 VARRAY_PUSH_TREE_PTR (list->vars, var);
239 list->num++;
243 /* Add VAR to the virtual operand list LIST, keeping it sorted and avoiding
244 duplicates. The actual sort value is the DECL UID of the base variable. */
246 static inline void
247 opbuild_append_virtual (struct opbuild_list_d *list, tree var)
249 int index, curr, last;
250 unsigned int var_uid;
252 if (TREE_CODE (var) != SSA_NAME)
253 var_uid = DECL_UID (var);
254 else
255 var_uid = DECL_UID (SSA_NAME_VAR (var));
257 index = VARRAY_ACTIVE_SIZE (list->vars);
259 if (index == 0)
261 VARRAY_PUSH_TREE (list->vars, var);
262 VARRAY_PUSH_UINT (list->uid, var_uid);
263 VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
264 list->first = 0;
265 list->num = 1;
266 return;
269 last = OPBUILD_LAST;
270 /* Find the correct spot in the sorted list. */
271 for (curr = list->first;
272 curr != OPBUILD_LAST;
273 last = curr, curr = VARRAY_INT (list->next, curr))
275 if (VARRAY_UINT (list->uid, curr) > var_uid)
276 break;
279 if (last == OPBUILD_LAST)
281 /* First item in the list. */
282 VARRAY_PUSH_INT (list->next, list->first);
283 list->first = index;
285 else
287 /* Don't enter duplicates at all. */
288 if (VARRAY_UINT (list->uid, last) == var_uid)
289 return;
291 VARRAY_PUSH_INT (list->next, VARRAY_INT (list->next, last));
292 VARRAY_INT (list->next, last) = index;
294 VARRAY_PUSH_TREE (list->vars, var);
295 VARRAY_PUSH_UINT (list->uid, var_uid);
296 list->num++;
300 /* Return the first element index in LIST. OPBUILD_LAST means there are no
301 more elements. */
303 static inline int
304 opbuild_first (struct opbuild_list_d *list)
306 if (list->num > 0)
307 return list->first;
308 else
309 return OPBUILD_LAST;
313 /* Return the next element after PREV in LIST. */
315 static inline int
316 opbuild_next (struct opbuild_list_d *list, int prev)
318 return VARRAY_INT (list->next, prev);
322 /* Return the real element at index ELEM in LIST. */
324 static inline tree *
325 opbuild_elem_real (struct opbuild_list_d *list, int elem)
327 return VARRAY_TREE_PTR (list->vars, elem);
331 /* Return the virtual element at index ELEM in LIST. */
333 static inline tree
334 opbuild_elem_virtual (struct opbuild_list_d *list, int elem)
336 return VARRAY_TREE (list->vars, elem);
340 /* Return the virtual element uid at index ELEM in LIST. */
341 static inline unsigned int
342 opbuild_elem_uid (struct opbuild_list_d *list, int elem)
344 return VARRAY_UINT (list->uid, elem);
348 /* Reset an operand build list. */
350 static inline void
351 opbuild_clear (struct opbuild_list_d *list)
353 list->first = OPBUILD_LAST;
354 VARRAY_POP_ALL (list->vars);
355 VARRAY_POP_ALL (list->next);
356 if (list->uid)
357 VARRAY_POP_ALL (list->uid);
358 list->num = 0;
362 /* Remove ELEM from LIST where PREV is the previous element. Return the next
363 element. */
365 static inline int
366 opbuild_remove_elem (struct opbuild_list_d *list, int elem, int prev)
368 int ret;
369 if (prev != OPBUILD_LAST)
371 gcc_assert (VARRAY_INT (list->next, prev) == elem);
372 ret = VARRAY_INT (list->next, prev) = VARRAY_INT (list->next, elem);
374 else
376 gcc_assert (list->first == elem);
377 ret = list->first = VARRAY_INT (list->next, elem);
379 list->num--;
380 return ret;
384 /* Return true if the ssa operands cache is active. */
386 bool
387 ssa_operands_active (void)
389 return ops_active;
393 /* Initialize the operand cache routines. */
395 void
396 init_ssa_operands (void)
398 opbuild_initialize_real (&build_defs, 5, "build defs");
399 opbuild_initialize_real (&build_uses, 10, "build uses");
400 opbuild_initialize_virtual (&build_vuses, 25, "build_vuses");
401 opbuild_initialize_virtual (&build_v_may_defs, 25, "build_v_may_defs");
402 opbuild_initialize_virtual (&build_v_must_defs, 25, "build_v_must_defs");
403 gcc_assert (operand_memory == NULL);
404 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
405 ops_active = true;
409 /* Dispose of anything required by the operand routines. */
411 void
412 fini_ssa_operands (void)
414 struct ssa_operand_memory_d *ptr;
415 opbuild_free (&build_defs);
416 opbuild_free (&build_uses);
417 opbuild_free (&build_v_must_defs);
418 opbuild_free (&build_v_may_defs);
419 opbuild_free (&build_vuses);
420 free_defs = NULL;
421 free_uses = NULL;
422 free_vuses = NULL;
423 free_maydefs = NULL;
424 free_mustdefs = NULL;
425 while ((ptr = operand_memory) != NULL)
427 operand_memory = operand_memory->next;
428 ggc_free (ptr);
431 VEC_free (tree, heap, clobbered_v_may_defs);
432 VEC_free (tree, heap, clobbered_vuses);
433 VEC_free (tree, heap, ro_call_vuses);
434 ops_active = false;
438 /* Return memory for operands of SIZE chunks. */
440 static inline void *
441 ssa_operand_alloc (unsigned size)
443 char *ptr;
444 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
446 struct ssa_operand_memory_d *ptr;
447 ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d));
448 ptr->next = operand_memory;
449 operand_memory = ptr;
450 operand_memory_index = 0;
452 ptr = &(operand_memory->mem[operand_memory_index]);
453 operand_memory_index += size;
454 return ptr;
458 /* Make sure PTR is inn the correct immediate use list. Since uses are simply
459 pointers into the stmt TREE, there is no way of telling if anyone has
460 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
461 THe contents are different, but the the pointer is still the same. This
462 routine will check to make sure PTR is in the correct list, and if it isn't
463 put it in the correct list. We cannot simply check the previous node
464 because all nodes in the same stmt might have be changed. */
466 static inline void
467 correct_use_link (use_operand_p ptr, tree stmt)
469 use_operand_p prev;
470 tree root;
472 /* Fold_stmt () may have changed the stmt pointers. */
473 if (ptr->stmt != stmt)
474 ptr->stmt = stmt;
476 prev = ptr->prev;
477 if (prev)
479 bool stmt_mod = true;
480 /* Find the first element which isn't a SAFE iterator, is in a different
481 stmt, and is not a a modified stmt, That node is in the correct list,
482 see if we are too. */
484 while (stmt_mod)
486 while (prev->stmt == stmt || prev->stmt == NULL)
487 prev = prev->prev;
488 if (prev->use == NULL)
489 stmt_mod = false;
490 else
491 if ((stmt_mod = stmt_modified_p (prev->stmt)))
492 prev = prev->prev;
495 /* Get the ssa_name of the list the node is in. */
496 if (prev->use == NULL)
497 root = prev->stmt;
498 else
499 root = *(prev->use);
500 /* If it's the right list, simply return. */
501 if (root == *(ptr->use))
502 return;
504 /* Its in the wrong list if we reach here. */
505 delink_imm_use (ptr);
506 link_imm_use (ptr, *(ptr->use));
510 #define FINALIZE_OPBUILD build_defs
511 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_defs, (I))
512 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_defs, (I))
513 #define FINALIZE_FUNC finalize_ssa_def_ops
514 #define FINALIZE_ALLOC alloc_def
515 #define FINALIZE_FREE free_defs
516 #define FINALIZE_TYPE struct def_optype_d
517 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
518 #define FINALIZE_OPS DEF_OPS
519 #define FINALIZE_BASE(VAR) VAR
520 #define FINALIZE_BASE_TYPE tree *
521 #define FINALIZE_BASE_ZERO NULL
522 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
523 #include "tree-ssa-opfinalize.h"
526 /* This routine will create stmt operands for STMT from the def build list. */
528 static void
529 finalize_ssa_defs (tree stmt)
531 unsigned int num = opbuild_num_elems (&build_defs);
532 /* There should only be a single real definition per assignment. */
533 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
535 /* If there is an old list, often the new list is identical, or close, so
536 find the elements at the beginning that are the same as the vector. */
538 finalize_ssa_def_ops (stmt);
539 opbuild_clear (&build_defs);
542 #define FINALIZE_OPBUILD build_uses
543 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_uses, (I))
544 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_uses, (I))
545 #define FINALIZE_FUNC finalize_ssa_use_ops
546 #define FINALIZE_ALLOC alloc_use
547 #define FINALIZE_FREE free_uses
548 #define FINALIZE_TYPE struct use_optype_d
549 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
550 #define FINALIZE_OPS USE_OPS
551 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
552 #define FINALIZE_BASE(VAR) VAR
553 #define FINALIZE_BASE_TYPE tree *
554 #define FINALIZE_BASE_ZERO NULL
555 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
556 (PTR)->use_ptr.use = (VAL); \
557 link_imm_use_stmt (&((PTR)->use_ptr), \
558 *(VAL), (STMT))
559 #include "tree-ssa-opfinalize.h"
561 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
563 static void
564 finalize_ssa_uses (tree stmt)
566 #ifdef ENABLE_CHECKING
568 unsigned x;
569 unsigned num = opbuild_num_elems (&build_uses);
571 /* If the pointer to the operand is the statement itself, something is
572 wrong. It means that we are pointing to a local variable (the
573 initial call to get_stmt_operands does not pass a pointer to a
574 statement). */
575 for (x = 0; x < num; x++)
576 gcc_assert (*(opbuild_elem_real (&build_uses, x)) != stmt);
578 #endif
579 finalize_ssa_use_ops (stmt);
580 opbuild_clear (&build_uses);
584 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
585 #define FINALIZE_OPBUILD build_v_may_defs
586 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_may_defs, (I))
587 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_may_defs, (I))
588 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
589 #define FINALIZE_ALLOC alloc_maydef
590 #define FINALIZE_FREE free_maydefs
591 #define FINALIZE_TYPE struct maydef_optype_d
592 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
593 #define FINALIZE_OPS MAYDEF_OPS
594 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
595 #define FINALIZE_BASE_ZERO 0
596 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
597 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
598 #define FINALIZE_BASE_TYPE unsigned
599 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
600 (PTR)->def_var = (VAL); \
601 (PTR)->use_var = (VAL); \
602 (PTR)->use_ptr.use = &((PTR)->use_var); \
603 link_imm_use_stmt (&((PTR)->use_ptr), \
604 (VAL), (STMT))
605 #include "tree-ssa-opfinalize.h"
608 static void
609 finalize_ssa_v_may_defs (tree stmt)
611 finalize_ssa_v_may_def_ops (stmt);
615 /* Clear the in_list bits and empty the build array for v_may_defs. */
617 static inline void
618 cleanup_v_may_defs (void)
620 unsigned x, num;
621 num = opbuild_num_elems (&build_v_may_defs);
623 for (x = 0; x < num; x++)
625 tree t = opbuild_elem_virtual (&build_v_may_defs, x);
626 if (TREE_CODE (t) != SSA_NAME)
628 var_ann_t ann = var_ann (t);
629 ann->in_v_may_def_list = 0;
632 opbuild_clear (&build_v_may_defs);
636 #define FINALIZE_OPBUILD build_vuses
637 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_vuses, (I))
638 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_vuses, (I))
639 #define FINALIZE_FUNC finalize_ssa_vuse_ops
640 #define FINALIZE_ALLOC alloc_vuse
641 #define FINALIZE_FREE free_vuses
642 #define FINALIZE_TYPE struct vuse_optype_d
643 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
644 #define FINALIZE_OPS VUSE_OPS
645 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
646 #define FINALIZE_BASE_ZERO 0
647 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
648 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
649 #define FINALIZE_BASE_TYPE unsigned
650 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
651 (PTR)->use_var = (VAL); \
652 (PTR)->use_ptr.use = &((PTR)->use_var); \
653 link_imm_use_stmt (&((PTR)->use_ptr), \
654 (VAL), (STMT))
655 #include "tree-ssa-opfinalize.h"
658 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
660 static void
661 finalize_ssa_vuses (tree stmt)
663 unsigned num, num_v_may_defs;
664 int vuse_index;
666 /* Remove superfluous VUSE operands. If the statement already has a
667 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
668 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
669 suppose that variable 'a' is aliased:
671 # VUSE <a_2>
672 # a_3 = V_MAY_DEF <a_2>
673 a = a + 1;
675 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
676 operation. */
678 num = opbuild_num_elems (&build_vuses);
679 num_v_may_defs = opbuild_num_elems (&build_v_may_defs);
681 if (num > 0 && num_v_may_defs > 0)
683 int last = OPBUILD_LAST;
684 vuse_index = opbuild_first (&build_vuses);
685 for ( ; vuse_index != OPBUILD_LAST; )
687 tree vuse;
688 vuse = opbuild_elem_virtual (&build_vuses, vuse_index);
689 if (TREE_CODE (vuse) != SSA_NAME)
691 var_ann_t ann = var_ann (vuse);
692 ann->in_vuse_list = 0;
693 if (ann->in_v_may_def_list)
695 vuse_index = opbuild_remove_elem (&build_vuses, vuse_index,
696 last);
697 continue;
700 last = vuse_index;
701 vuse_index = opbuild_next (&build_vuses, vuse_index);
704 else
705 /* Clear out the in_list bits. */
706 for (vuse_index = opbuild_first (&build_vuses);
707 vuse_index != OPBUILD_LAST;
708 vuse_index = opbuild_next (&build_vuses, vuse_index))
710 tree t = opbuild_elem_virtual (&build_vuses, vuse_index);
711 if (TREE_CODE (t) != SSA_NAME)
713 var_ann_t ann = var_ann (t);
714 ann->in_vuse_list = 0;
718 finalize_ssa_vuse_ops (stmt);
719 /* The v_may_def build vector wasn't cleaned up because we needed it. */
720 cleanup_v_may_defs ();
722 /* Free the vuses build vector. */
723 opbuild_clear (&build_vuses);
727 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
729 #define FINALIZE_OPBUILD build_v_must_defs
730 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_must_defs, (I))
731 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_must_defs, (I))
732 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
733 #define FINALIZE_ALLOC alloc_mustdef
734 #define FINALIZE_FREE free_mustdefs
735 #define FINALIZE_TYPE struct mustdef_optype_d
736 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
737 #define FINALIZE_OPS MUSTDEF_OPS
738 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
739 #define FINALIZE_BASE_ZERO 0
740 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
741 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
742 #define FINALIZE_BASE_TYPE unsigned
743 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
744 (PTR)->def_var = (VAL); \
745 (PTR)->kill_var = (VAL); \
746 (PTR)->use_ptr.use = &((PTR)->kill_var);\
747 link_imm_use_stmt (&((PTR)->use_ptr), \
748 (VAL), (STMT))
749 #include "tree-ssa-opfinalize.h"
752 static void
753 finalize_ssa_v_must_defs (tree stmt)
755 /* In the presence of subvars, there may be more than one V_MUST_DEF per
756 statement (one for each subvar). It is a bit expensive to verify that
757 all must-defs in a statement belong to subvars if there is more than one
758 MUST-def, so we don't do it. Suffice to say, if you reach here without
759 having subvars, and have num >1, you have hit a bug. */
761 finalize_ssa_v_must_def_ops (stmt);
762 opbuild_clear (&build_v_must_defs);
766 /* Finalize all the build vectors, fill the new ones into INFO. */
768 static inline void
769 finalize_ssa_stmt_operands (tree stmt)
771 finalize_ssa_defs (stmt);
772 finalize_ssa_uses (stmt);
773 finalize_ssa_v_must_defs (stmt);
774 finalize_ssa_v_may_defs (stmt);
775 finalize_ssa_vuses (stmt);
779 /* Start the process of building up operands vectors in INFO. */
781 static inline void
782 start_ssa_stmt_operands (void)
784 gcc_assert (opbuild_num_elems (&build_defs) == 0);
785 gcc_assert (opbuild_num_elems (&build_uses) == 0);
786 gcc_assert (opbuild_num_elems (&build_vuses) == 0);
787 gcc_assert (opbuild_num_elems (&build_v_may_defs) == 0);
788 gcc_assert (opbuild_num_elems (&build_v_must_defs) == 0);
792 /* Add DEF_P to the list of pointers to operands. */
794 static inline void
795 append_def (tree *def_p)
797 opbuild_append_real (&build_defs, def_p);
801 /* Add USE_P to the list of pointers to operands. */
803 static inline void
804 append_use (tree *use_p)
806 opbuild_append_real (&build_uses, use_p);
810 /* Add a new virtual may def for variable VAR to the build array. */
812 static inline void
813 append_v_may_def (tree var)
815 if (TREE_CODE (var) != SSA_NAME)
817 var_ann_t ann = get_var_ann (var);
819 /* Don't allow duplicate entries. */
820 if (ann->in_v_may_def_list)
821 return;
822 ann->in_v_may_def_list = 1;
825 opbuild_append_virtual (&build_v_may_defs, var);
829 /* Add VAR to the list of virtual uses. */
831 static inline void
832 append_vuse (tree var)
835 /* Don't allow duplicate entries. */
836 if (TREE_CODE (var) != SSA_NAME)
838 var_ann_t ann = get_var_ann (var);
840 if (ann->in_vuse_list || ann->in_v_may_def_list)
841 return;
842 ann->in_vuse_list = 1;
845 opbuild_append_virtual (&build_vuses, var);
849 /* Add VAR to the list of virtual must definitions for INFO. */
851 static inline void
852 append_v_must_def (tree var)
854 unsigned i;
856 /* Don't allow duplicate entries. */
857 for (i = 0; i < opbuild_num_elems (&build_v_must_defs); i++)
858 if (var == opbuild_elem_virtual (&build_v_must_defs, i))
859 return;
861 opbuild_append_virtual (&build_v_must_defs, var);
865 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
866 cache for STMT, if it existed before. When finished, the various build_*
867 operand vectors will have potential operands. in them. */
869 static void
870 parse_ssa_operands (tree stmt)
872 enum tree_code code;
874 code = TREE_CODE (stmt);
875 switch (code)
877 case MODIFY_EXPR:
878 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
879 either only part of LHS is modified or if the RHS might throw,
880 otherwise, use V_MUST_DEF.
882 ??? If it might throw, we should represent somehow that it is killed
883 on the fallthrough path. */
885 tree lhs = TREE_OPERAND (stmt, 0);
886 int lhs_flags = opf_is_def;
888 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
890 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
891 or not the entire LHS is modified; that depends on what's
892 inside the VIEW_CONVERT_EXPR. */
893 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
894 lhs = TREE_OPERAND (lhs, 0);
896 if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
897 && TREE_CODE (lhs) != BIT_FIELD_REF
898 && TREE_CODE (lhs) != REALPART_EXPR
899 && TREE_CODE (lhs) != IMAGPART_EXPR)
900 lhs_flags |= opf_kill_def;
902 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
904 break;
906 case COND_EXPR:
907 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
908 break;
910 case SWITCH_EXPR:
911 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
912 break;
914 case ASM_EXPR:
915 get_asm_expr_operands (stmt);
916 break;
918 case RETURN_EXPR:
919 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
920 break;
922 case GOTO_EXPR:
923 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
924 break;
926 case LABEL_EXPR:
927 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
928 break;
930 /* These nodes contain no variable references. */
931 case BIND_EXPR:
932 case CASE_LABEL_EXPR:
933 case TRY_CATCH_EXPR:
934 case TRY_FINALLY_EXPR:
935 case EH_FILTER_EXPR:
936 case CATCH_EXPR:
937 case RESX_EXPR:
938 break;
940 default:
941 /* Notice that if get_expr_operands tries to use &STMT as the operand
942 pointer (which may only happen for USE operands), we will fail in
943 append_use. This default will handle statements like empty
944 statements, or CALL_EXPRs that may appear on the RHS of a statement
945 or as statements themselves. */
946 get_expr_operands (stmt, &stmt, opf_none);
947 break;
951 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
952 original operands, and if ANN is non-null, appropriate stmt flags are set
953 in the stmt's annotation. If ANN is NULL, this is not considered a "real"
954 stmt, and none of the operands will be entered into their respective
955 immediate uses tables. This is to allow stmts to be processed when they
956 are not actually in the CFG.
958 Note that some fields in old_ops may change to NULL, although none of the
959 memory they originally pointed to will be destroyed. It is appropriate
960 to call free_stmt_operands() on the value returned in old_ops.
962 The rationale for this: Certain optimizations wish to examine the difference
963 between new_ops and old_ops after processing. If a set of operands don't
964 change, new_ops will simply assume the pointer in old_ops, and the old_ops
965 pointer will be set to NULL, indicating no memory needs to be cleared.
966 Usage might appear something like:
968 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
969 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
970 <* compare old_ops_copy and new_ops *>
971 free_ssa_operands (old_ops); */
973 static void
974 build_ssa_operands (tree stmt)
976 stmt_ann_t ann = get_stmt_ann (stmt);
978 /* Initially assume that the statement has no volatile operands, nor
979 makes aliased loads or stores. */
980 if (ann)
982 ann->has_volatile_ops = false;
983 ann->makes_aliased_stores = false;
984 ann->makes_aliased_loads = false;
987 start_ssa_stmt_operands ();
989 parse_ssa_operands (stmt);
991 finalize_ssa_stmt_operands (stmt);
995 /* Free any operands vectors in OPS. */
996 #if 0
997 static void
998 free_ssa_operands (stmt_operands_p ops)
1000 ops->def_ops = NULL;
1001 ops->use_ops = NULL;
1002 ops->maydef_ops = NULL;
1003 ops->mustdef_ops = NULL;
1004 ops->vuse_ops = NULL;
1005 while (ops->memory.next != NULL)
1007 operand_memory_p tmp = ops->memory.next;
1008 ops->memory.next = tmp->next;
1009 ggc_free (tmp);
1012 #endif
1015 /* Get the operands of statement STMT. Note that repeated calls to
1016 get_stmt_operands for the same statement will do nothing until the
1017 statement is marked modified by a call to mark_stmt_modified(). */
1019 void
1020 update_stmt_operands (tree stmt)
1022 stmt_ann_t ann = get_stmt_ann (stmt);
1023 /* If get_stmt_operands is called before SSA is initialized, dont
1024 do anything. */
1025 if (!ssa_operands_active ())
1026 return;
1027 /* The optimizers cannot handle statements that are nothing but a
1028 _DECL. This indicates a bug in the gimplifier. */
1029 gcc_assert (!SSA_VAR_P (stmt));
1031 gcc_assert (ann->modified);
1033 timevar_push (TV_TREE_OPS);
1035 build_ssa_operands (stmt);
1037 /* Clear the modified bit for STMT. Subsequent calls to
1038 get_stmt_operands for this statement will do nothing until the
1039 statement is marked modified by a call to mark_stmt_modified(). */
1040 ann->modified = 0;
1042 timevar_pop (TV_TREE_OPS);
1046 /* Copies virtual operands from SRC to DST. */
1048 void
1049 copy_virtual_operands (tree dest, tree src)
1051 tree t;
1052 ssa_op_iter iter, old_iter;
1053 use_operand_p use_p, u2;
1054 def_operand_p def_p, d2;
1056 build_ssa_operands (dest);
1058 /* Copy all the virtual fields. */
1059 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
1060 append_vuse (t);
1061 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
1062 append_v_may_def (t);
1063 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
1064 append_v_must_def (t);
1066 if (opbuild_num_elems (&build_vuses) == 0
1067 && opbuild_num_elems (&build_v_may_defs) == 0
1068 && opbuild_num_elems (&build_v_must_defs) == 0)
1069 return;
1071 /* Now commit the virtual operands to this stmt. */
1072 finalize_ssa_v_must_defs (dest);
1073 finalize_ssa_v_may_defs (dest);
1074 finalize_ssa_vuses (dest);
1076 /* Finally, set the field to the same values as then originals. */
1079 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
1080 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
1082 gcc_assert (!op_iter_done (&old_iter));
1083 SET_USE (use_p, t);
1084 t = op_iter_next_tree (&old_iter);
1086 gcc_assert (op_iter_done (&old_iter));
1088 op_iter_init_maydef (&old_iter, src, &u2, &d2);
1089 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
1091 gcc_assert (!op_iter_done (&old_iter));
1092 SET_USE (use_p, USE_FROM_PTR (u2));
1093 SET_DEF (def_p, DEF_FROM_PTR (d2));
1094 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1096 gcc_assert (op_iter_done (&old_iter));
1098 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
1099 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
1101 gcc_assert (!op_iter_done (&old_iter));
1102 SET_USE (use_p, USE_FROM_PTR (u2));
1103 SET_DEF (def_p, DEF_FROM_PTR (d2));
1104 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1106 gcc_assert (op_iter_done (&old_iter));
1111 /* Specifically for use in DOM's expression analysis. Given a store, we
1112 create an artificial stmt which looks like a load from the store, this can
1113 be used to eliminate redundant loads. OLD_OPS are the operands from the
1114 store stmt, and NEW_STMT is the new load which represents a load of the
1115 values stored. */
1117 void
1118 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
1120 stmt_ann_t ann;
1121 tree op;
1122 ssa_op_iter iter;
1123 use_operand_p use_p;
1124 unsigned x;
1126 ann = get_stmt_ann (new_stmt);
1128 /* process the stmt looking for operands. */
1129 start_ssa_stmt_operands ();
1130 parse_ssa_operands (new_stmt);
1132 for (x = 0; x < opbuild_num_elems (&build_vuses); x++)
1134 tree t = opbuild_elem_virtual (&build_vuses, x);
1135 if (TREE_CODE (t) != SSA_NAME)
1137 var_ann_t ann = var_ann (t);
1138 ann->in_vuse_list = 0;
1142 for (x = 0; x < opbuild_num_elems (&build_v_may_defs); x++)
1144 tree t = opbuild_elem_virtual (&build_v_may_defs, x);
1145 if (TREE_CODE (t) != SSA_NAME)
1147 var_ann_t ann = var_ann (t);
1148 ann->in_v_may_def_list = 0;
1151 /* Remove any virtual operands that were found. */
1152 opbuild_clear (&build_v_may_defs);
1153 opbuild_clear (&build_v_must_defs);
1154 opbuild_clear (&build_vuses);
1156 /* For each VDEF on the original statement, we want to create a
1157 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1158 statement. */
1159 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
1160 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
1161 append_vuse (op);
1163 /* Now build the operands for this new stmt. */
1164 finalize_ssa_stmt_operands (new_stmt);
1166 /* All uses in this fake stmt must not be in the immediate use lists. */
1167 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1168 delink_imm_use (use_p);
1171 static void
1172 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
1174 tree op0, op1;
1175 op0 = *exp0;
1176 op1 = *exp1;
1178 /* If the operand cache is active, attempt to preserve the relative positions
1179 of these two operands in their respective immediate use lists. */
1180 if (ssa_operands_active () && op0 != op1)
1182 use_optype_p use0, use1, ptr;
1183 use0 = use1 = NULL;
1184 /* Find the 2 operands in the cache, if they are there. */
1185 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1186 if (USE_OP_PTR (ptr)->use == exp0)
1188 use0 = ptr;
1189 break;
1191 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1192 if (USE_OP_PTR (ptr)->use == exp1)
1194 use1 = ptr;
1195 break;
1197 /* If both uses don't have operand entries, there isn't much we can do
1198 at this point. Presumably we dont need to worry about it. */
1199 if (use0 && use1)
1201 tree *tmp = USE_OP_PTR (use1)->use;
1202 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1203 USE_OP_PTR (use0)->use = tmp;
1207 /* Now swap the data. */
1208 *exp0 = op1;
1209 *exp1 = op0;
1213 /* Recursively scan the expression pointed by EXPR_P in statement referred to
1214 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
1215 operands found. */
1217 static void
1218 get_expr_operands (tree stmt, tree *expr_p, int flags)
1220 enum tree_code code;
1221 enum tree_code_class class;
1222 tree expr = *expr_p;
1223 stmt_ann_t s_ann = stmt_ann (stmt);
1225 if (expr == NULL)
1226 return;
1228 code = TREE_CODE (expr);
1229 class = TREE_CODE_CLASS (code);
1231 switch (code)
1233 case ADDR_EXPR:
1234 /* We could have the address of a component, array member,
1235 etc which has interesting variable references. */
1236 /* Taking the address of a variable does not represent a
1237 reference to it, but the fact that the stmt takes its address will be
1238 of interest to some passes (e.g. alias resolution). */
1239 add_stmt_operand (expr_p, s_ann, 0);
1241 /* If the address is invariant, there may be no interesting variable
1242 references inside. */
1243 if (is_gimple_min_invariant (expr))
1244 return;
1246 /* There should be no VUSEs created, since the referenced objects are
1247 not really accessed. The only operands that we should find here
1248 are ARRAY_REF indices which will always be real operands (GIMPLE
1249 does not allow non-registers as array indices). */
1250 flags |= opf_no_vops;
1252 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1253 return;
1255 case SSA_NAME:
1256 case VAR_DECL:
1257 case PARM_DECL:
1258 case RESULT_DECL:
1259 case CONST_DECL:
1261 subvar_t svars;
1263 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1264 Otherwise, add the variable itself.
1265 Whether it goes to USES or DEFS depends on the operand flags. */
1266 if (var_can_have_subvars (expr)
1267 && (svars = get_subvars_for_var (expr)))
1269 subvar_t sv;
1270 for (sv = svars; sv; sv = sv->next)
1271 add_stmt_operand (&sv->var, s_ann, flags);
1273 else
1275 add_stmt_operand (expr_p, s_ann, flags);
1277 return;
1279 case MISALIGNED_INDIRECT_REF:
1280 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1281 /* fall through */
1283 case ALIGN_INDIRECT_REF:
1284 case INDIRECT_REF:
1285 get_indirect_ref_operands (stmt, expr, flags);
1286 return;
1288 case ARRAY_REF:
1289 case ARRAY_RANGE_REF:
1290 /* Treat array references as references to the virtual variable
1291 representing the array. The virtual variable for an ARRAY_REF
1292 is the VAR_DECL for the array. */
1294 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1295 according to the value of IS_DEF. Recurse if the LHS of the
1296 ARRAY_REF node is not a regular variable. */
1297 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1298 add_stmt_operand (expr_p, s_ann, flags);
1299 else
1300 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1302 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1303 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1304 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1305 return;
1307 case COMPONENT_REF:
1308 case REALPART_EXPR:
1309 case IMAGPART_EXPR:
1311 tree ref;
1312 HOST_WIDE_INT offset, size;
1313 /* This component ref becomes an access to all of the subvariables
1314 it can touch, if we can determine that, but *NOT* the real one.
1315 If we can't determine which fields we could touch, the recursion
1316 will eventually get to a variable and add *all* of its subvars, or
1317 whatever is the minimum correct subset. */
1319 ref = okay_component_ref_for_subvars (expr, &offset, &size);
1320 if (ref)
1322 subvar_t svars = get_subvars_for_var (ref);
1323 subvar_t sv;
1324 for (sv = svars; sv; sv = sv->next)
1326 bool exact;
1327 if (overlap_subvar (offset, size, sv, &exact))
1329 if (exact)
1330 flags &= ~opf_kill_def;
1331 add_stmt_operand (&sv->var, s_ann, flags);
1335 else
1336 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1337 flags & ~opf_kill_def);
1339 if (code == COMPONENT_REF)
1340 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1341 return;
1343 case WITH_SIZE_EXPR:
1344 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1345 and an rvalue reference to its second argument. */
1346 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1347 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1348 return;
1350 case CALL_EXPR:
1351 get_call_expr_operands (stmt, expr);
1352 return;
1354 case COND_EXPR:
1355 case VEC_COND_EXPR:
1356 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1357 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1358 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1359 return;
1361 case MODIFY_EXPR:
1363 int subflags;
1364 tree op;
1366 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1368 op = TREE_OPERAND (expr, 0);
1369 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1370 op = TREE_OPERAND (expr, 0);
1371 if (TREE_CODE (op) == ARRAY_REF
1372 || TREE_CODE (op) == ARRAY_RANGE_REF
1373 || TREE_CODE (op) == REALPART_EXPR
1374 || TREE_CODE (op) == IMAGPART_EXPR)
1375 subflags = opf_is_def;
1376 else
1377 subflags = opf_is_def | opf_kill_def;
1379 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1380 return;
1383 case CONSTRUCTOR:
1385 /* General aggregate CONSTRUCTORs have been decomposed, but they
1386 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1388 tree t;
1389 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1390 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1392 return;
1395 case TRUTH_NOT_EXPR:
1396 case BIT_FIELD_REF:
1397 case VIEW_CONVERT_EXPR:
1398 do_unary:
1399 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1400 return;
1402 case TRUTH_AND_EXPR:
1403 case TRUTH_OR_EXPR:
1404 case TRUTH_XOR_EXPR:
1405 case COMPOUND_EXPR:
1406 case OBJ_TYPE_REF:
1407 case ASSERT_EXPR:
1408 do_binary:
1410 tree op0 = TREE_OPERAND (expr, 0);
1411 tree op1 = TREE_OPERAND (expr, 1);
1413 /* If it would be profitable to swap the operands, then do so to
1414 canonicalize the statement, enabling better optimization.
1416 By placing canonicalization of such expressions here we
1417 transparently keep statements in canonical form, even
1418 when the statement is modified. */
1419 if (tree_swap_operands_p (op0, op1, false))
1421 /* For relationals we need to swap the operands
1422 and change the code. */
1423 if (code == LT_EXPR
1424 || code == GT_EXPR
1425 || code == LE_EXPR
1426 || code == GE_EXPR)
1428 TREE_SET_CODE (expr, swap_tree_comparison (code));
1429 swap_tree_operands (stmt,
1430 &TREE_OPERAND (expr, 0),
1431 &TREE_OPERAND (expr, 1));
1434 /* For a commutative operator we can just swap the operands. */
1435 else if (commutative_tree_code (code))
1437 swap_tree_operands (stmt,
1438 &TREE_OPERAND (expr, 0),
1439 &TREE_OPERAND (expr, 1));
1443 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1444 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1445 return;
1448 case REALIGN_LOAD_EXPR:
1450 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1451 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1452 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1453 return;
1456 case BLOCK:
1457 case FUNCTION_DECL:
1458 case EXC_PTR_EXPR:
1459 case FILTER_EXPR:
1460 case LABEL_DECL:
1461 /* Expressions that make no memory references. */
1462 return;
1464 default:
1465 if (class == tcc_unary)
1466 goto do_unary;
1467 if (class == tcc_binary || class == tcc_comparison)
1468 goto do_binary;
1469 if (class == tcc_constant || class == tcc_type)
1470 return;
1473 /* If we get here, something has gone wrong. */
1474 #ifdef ENABLE_CHECKING
1475 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1476 debug_tree (expr);
1477 fputs ("\n", stderr);
1478 internal_error ("internal error");
1479 #endif
1480 gcc_unreachable ();
1484 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1486 static void
1487 get_asm_expr_operands (tree stmt)
1489 stmt_ann_t s_ann = stmt_ann (stmt);
1490 int noutputs = list_length (ASM_OUTPUTS (stmt));
1491 const char **oconstraints
1492 = (const char **) alloca ((noutputs) * sizeof (const char *));
1493 int i;
1494 tree link;
1495 const char *constraint;
1496 bool allows_mem, allows_reg, is_inout;
1498 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1500 oconstraints[i] = constraint
1501 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1502 parse_output_constraint (&constraint, i, 0, 0,
1503 &allows_mem, &allows_reg, &is_inout);
1505 /* This should have been split in gimplify_asm_expr. */
1506 gcc_assert (!allows_reg || !is_inout);
1508 /* Memory operands are addressable. Note that STMT needs the
1509 address of this operand. */
1510 if (!allows_reg && allows_mem)
1512 tree t = get_base_address (TREE_VALUE (link));
1513 if (t && DECL_P (t))
1514 note_addressable (t, s_ann);
1517 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1520 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1522 constraint
1523 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1524 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1525 oconstraints, &allows_mem, &allows_reg);
1527 /* Memory operands are addressable. Note that STMT needs the
1528 address of this operand. */
1529 if (!allows_reg && allows_mem)
1531 tree t = get_base_address (TREE_VALUE (link));
1532 if (t && DECL_P (t))
1533 note_addressable (t, s_ann);
1536 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1540 /* Clobber memory for asm ("" : : : "memory"); */
1541 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1542 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1544 unsigned i;
1545 bitmap_iterator bi;
1547 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1548 decided to group them). */
1549 if (global_var)
1550 add_stmt_operand (&global_var, s_ann, opf_is_def);
1551 else
1552 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1554 tree var = referenced_var (i);
1555 add_stmt_operand (&var, s_ann, opf_is_def);
1558 /* Now clobber all addressables. */
1559 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1561 tree var = referenced_var (i);
1563 /* Subvars are explicitly represented in this list, so
1564 we don't need the original to be added to the clobber
1565 ops, but the original *will* be in this list because
1566 we keep the addressability of the original
1567 variable up-to-date so we don't screw up the rest of
1568 the backend. */
1569 if (var_can_have_subvars (var)
1570 && get_subvars_for_var (var) != NULL)
1571 continue;
1573 add_stmt_operand (&var, s_ann, opf_is_def);
1576 break;
1580 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1581 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1583 static void
1584 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1586 tree *pptr = &TREE_OPERAND (expr, 0);
1587 tree ptr = *pptr;
1588 stmt_ann_t s_ann = stmt_ann (stmt);
1590 /* Stores into INDIRECT_REF operands are never killing definitions. */
1591 flags &= ~opf_kill_def;
1593 if (SSA_VAR_P (ptr))
1595 struct ptr_info_def *pi = NULL;
1597 /* If PTR has flow-sensitive points-to information, use it. */
1598 if (TREE_CODE (ptr) == SSA_NAME
1599 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1600 && pi->name_mem_tag)
1602 /* PTR has its own memory tag. Use it. */
1603 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1605 else
1607 /* If PTR is not an SSA_NAME or it doesn't have a name
1608 tag, use its type memory tag. */
1609 var_ann_t v_ann;
1611 /* If we are emitting debugging dumps, display a warning if
1612 PTR is an SSA_NAME with no flow-sensitive alias
1613 information. That means that we may need to compute
1614 aliasing again. */
1615 if (dump_file
1616 && TREE_CODE (ptr) == SSA_NAME
1617 && pi == NULL)
1619 fprintf (dump_file,
1620 "NOTE: no flow-sensitive alias info for ");
1621 print_generic_expr (dump_file, ptr, dump_flags);
1622 fprintf (dump_file, " in ");
1623 print_generic_stmt (dump_file, stmt, dump_flags);
1626 if (TREE_CODE (ptr) == SSA_NAME)
1627 ptr = SSA_NAME_VAR (ptr);
1628 v_ann = var_ann (ptr);
1629 if (v_ann->type_mem_tag)
1630 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1634 /* If a constant is used as a pointer, we can't generate a real
1635 operand for it but we mark the statement volatile to prevent
1636 optimizations from messing things up. */
1637 else if (TREE_CODE (ptr) == INTEGER_CST)
1639 if (s_ann)
1640 s_ann->has_volatile_ops = true;
1641 return;
1644 /* Everything else *should* have been folded elsewhere, but users
1645 are smarter than we in finding ways to write invalid code. We
1646 cannot just assert here. If we were absolutely certain that we
1647 do handle all valid cases, then we could just do nothing here.
1648 That seems optimistic, so attempt to do something logical... */
1649 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1650 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1651 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1653 /* Make sure we know the object is addressable. */
1654 pptr = &TREE_OPERAND (ptr, 0);
1655 add_stmt_operand (pptr, s_ann, 0);
1657 /* Mark the object itself with a VUSE. */
1658 pptr = &TREE_OPERAND (*pptr, 0);
1659 get_expr_operands (stmt, pptr, flags);
1660 return;
1663 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1664 else
1665 gcc_unreachable ();
1667 /* Add a USE operand for the base pointer. */
1668 get_expr_operands (stmt, pptr, opf_none);
1671 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1673 static void
1674 get_call_expr_operands (tree stmt, tree expr)
1676 tree op;
1677 int call_flags = call_expr_flags (expr);
1679 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1680 operands for all the symbols that have been found to be
1681 call-clobbered.
1683 Note that if aliases have not been computed, the global effects
1684 of calls will not be included in the SSA web. This is fine
1685 because no optimizer should run before aliases have been
1686 computed. By not bothering with virtual operands for CALL_EXPRs
1687 we avoid adding superfluous virtual operands, which can be a
1688 significant compile time sink (See PR 15855). */
1689 if (aliases_computed_p
1690 && !bitmap_empty_p (call_clobbered_vars)
1691 && !(call_flags & ECF_NOVOPS))
1693 /* A 'pure' or a 'const' function never call-clobbers anything.
1694 A 'noreturn' function might, but since we don't return anyway
1695 there is no point in recording that. */
1696 if (TREE_SIDE_EFFECTS (expr)
1697 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1698 add_call_clobber_ops (stmt);
1699 else if (!(call_flags & ECF_CONST))
1700 add_call_read_ops (stmt);
1703 /* Find uses in the called function. */
1704 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1706 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1707 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1709 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1714 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1715 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1716 the statement's real operands, otherwise it is added to virtual
1717 operands. */
1719 static void
1720 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1722 bool is_real_op;
1723 tree var, sym;
1724 var_ann_t v_ann;
1726 var = *var_p;
1727 STRIP_NOPS (var);
1729 /* If the operand is an ADDR_EXPR, add its operand to the list of
1730 variables that have had their address taken in this statement. */
1731 if (TREE_CODE (var) == ADDR_EXPR)
1733 note_addressable (TREE_OPERAND (var, 0), s_ann);
1734 return;
1737 /* If the original variable is not a scalar, it will be added to the list
1738 of virtual operands. In that case, use its base symbol as the virtual
1739 variable representing it. */
1740 is_real_op = is_gimple_reg (var);
1741 if (!is_real_op && !DECL_P (var))
1742 var = get_virtual_var (var);
1744 /* If VAR is not a variable that we care to optimize, do nothing. */
1745 if (var == NULL_TREE || !SSA_VAR_P (var))
1746 return;
1748 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1749 v_ann = var_ann (sym);
1751 /* Mark statements with volatile operands. Optimizers should back
1752 off from statements having volatile operands. */
1753 if (TREE_THIS_VOLATILE (sym) && s_ann)
1754 s_ann->has_volatile_ops = true;
1756 /* If the variable cannot be modified and this is a V_MAY_DEF change
1757 it into a VUSE. This happens when read-only variables are marked
1758 call-clobbered and/or aliased to writeable variables. So we only
1759 check that this only happens on stores, and not writes to GIMPLE
1760 registers.
1762 FIXME: The C++ FE is emitting assignments in the IL stream for
1763 read-only globals. This is wrong, but for the time being disable
1764 this transformation on V_MUST_DEF operands (otherwise, we
1765 mis-optimize SPEC2000's eon). */
1766 if ((flags & opf_is_def)
1767 && !(flags & opf_kill_def)
1768 && unmodifiable_var_p (var))
1770 gcc_assert (!is_real_op);
1771 flags &= ~opf_is_def;
1774 if (is_real_op)
1776 /* The variable is a GIMPLE register. Add it to real operands. */
1777 if (flags & opf_is_def)
1778 append_def (var_p);
1779 else
1780 append_use (var_p);
1782 else
1784 varray_type aliases;
1786 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1787 virtual operands, unless the caller has specifically requested
1788 not to add virtual operands (used when adding operands inside an
1789 ADDR_EXPR expression). */
1790 if (flags & opf_no_vops)
1791 return;
1793 aliases = v_ann->may_aliases;
1795 if (aliases == NULL)
1797 /* The variable is not aliased or it is an alias tag. */
1798 if (flags & opf_is_def)
1800 if (flags & opf_kill_def)
1802 /* Only regular variables or struct fields may get a
1803 V_MUST_DEF operand. */
1804 gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
1805 || v_ann->mem_tag_kind == STRUCT_FIELD);
1806 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1807 variable definitions. */
1808 append_v_must_def (var);
1810 else
1812 /* Add a V_MAY_DEF for call-clobbered variables and
1813 memory tags. */
1814 append_v_may_def (var);
1817 else
1819 append_vuse (var);
1820 if (s_ann && v_ann->is_alias_tag)
1821 s_ann->makes_aliased_loads = 1;
1824 else
1826 size_t i;
1828 /* The variable is aliased. Add its aliases to the virtual
1829 operands. */
1830 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1832 if (flags & opf_is_def)
1834 bool added_may_defs_p = false;
1836 /* If the variable is also an alias tag, add a virtual
1837 operand for it, otherwise we will miss representing
1838 references to the members of the variable's alias set.
1839 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1840 if (v_ann->is_alias_tag)
1842 added_may_defs_p = true;
1843 append_v_may_def (var);
1846 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1848 /* While VAR may be modifiable, some of its aliases
1849 may not be. If that's the case, we don't really
1850 need to add them a V_MAY_DEF for them. */
1851 tree alias = VARRAY_TREE (aliases, i);
1853 if (unmodifiable_var_p (alias))
1854 append_vuse (alias);
1855 else
1857 append_v_may_def (alias);
1858 added_may_defs_p = true;
1862 if (s_ann && added_may_defs_p)
1863 s_ann->makes_aliased_stores = 1;
1865 else
1867 /* Similarly, append a virtual uses for VAR itself, when
1868 it is an alias tag. */
1869 if (v_ann->is_alias_tag)
1870 append_vuse (var);
1872 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1873 append_vuse (VARRAY_TREE (aliases, i));
1875 if (s_ann)
1876 s_ann->makes_aliased_loads = 1;
1883 /* Record that VAR had its address taken in the statement with annotations
1884 S_ANN. */
1886 static void
1887 note_addressable (tree var, stmt_ann_t s_ann)
1889 subvar_t svars;
1891 if (!s_ann)
1892 return;
1894 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1895 as the only thing we take the address of.
1896 See PR 21407 and the ensuing mailing list discussion. */
1898 var = get_base_address (var);
1899 if (var && SSA_VAR_P (var))
1901 if (s_ann->addresses_taken == NULL)
1902 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1905 if (var_can_have_subvars (var)
1906 && (svars = get_subvars_for_var (var)))
1908 subvar_t sv;
1909 for (sv = svars; sv; sv = sv->next)
1910 bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
1912 else
1913 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1917 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1918 clobbered variables in the function. */
1920 static void
1921 add_call_clobber_ops (tree stmt)
1923 int i;
1924 unsigned u;
1925 tree t;
1926 bitmap_iterator bi;
1927 stmt_ann_t s_ann = stmt_ann (stmt);
1928 struct stmt_ann_d empty_ann;
1930 /* Functions that are not const, pure or never return may clobber
1931 call-clobbered variables. */
1932 if (s_ann)
1933 s_ann->makes_clobbering_call = true;
1935 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1936 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1937 if (global_var)
1939 add_stmt_operand (&global_var, s_ann, opf_is_def);
1940 return;
1943 /* If cache is valid, copy the elements into the build vectors. */
1944 if (ssa_call_clobbered_cache_valid)
1946 /* Process the caches in reverse order so we are always inserting at
1947 the head of the list. */
1948 for (i = VEC_length (tree, clobbered_vuses) - 1; i >=0; i--)
1950 t = VEC_index (tree, clobbered_vuses, i);
1951 gcc_assert (TREE_CODE (t) != SSA_NAME);
1952 var_ann (t)->in_vuse_list = 1;
1953 opbuild_append_virtual (&build_vuses, t);
1955 for (i = VEC_length (tree, clobbered_v_may_defs) - 1; i >= 0; i--)
1957 t = VEC_index (tree, clobbered_v_may_defs, i);
1958 gcc_assert (TREE_CODE (t) != SSA_NAME);
1959 var_ann (t)->in_v_may_def_list = 1;
1960 opbuild_append_virtual (&build_v_may_defs, t);
1962 if (s_ann)
1964 s_ann->makes_aliased_loads = clobbered_aliased_loads;
1965 s_ann->makes_aliased_stores = clobbered_aliased_stores;
1967 return;
1970 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1972 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1973 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1975 tree var = referenced_var (u);
1976 if (unmodifiable_var_p (var))
1977 add_stmt_operand (&var, &empty_ann, opf_none);
1978 else
1979 add_stmt_operand (&var, &empty_ann, opf_is_def);
1982 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
1983 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
1985 /* Set the flags for a stmt's annotation. */
1986 if (s_ann)
1988 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1989 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
1992 /* Prepare empty cache vectors. */
1993 VEC_truncate (tree, clobbered_vuses, 0);
1994 VEC_truncate (tree, clobbered_v_may_defs, 0);
1996 /* Now fill the clobbered cache with the values that have been found. */
1997 for (i = opbuild_first (&build_vuses);
1998 i != OPBUILD_LAST;
1999 i = opbuild_next (&build_vuses, i))
2000 VEC_safe_push (tree, heap, clobbered_vuses,
2001 opbuild_elem_virtual (&build_vuses, i));
2003 gcc_assert (opbuild_num_elems (&build_vuses)
2004 == VEC_length (tree, clobbered_vuses));
2006 for (i = opbuild_first (&build_v_may_defs);
2007 i != OPBUILD_LAST;
2008 i = opbuild_next (&build_v_may_defs, i))
2009 VEC_safe_push (tree, heap, clobbered_v_may_defs,
2010 opbuild_elem_virtual (&build_v_may_defs, i));
2012 gcc_assert (opbuild_num_elems (&build_v_may_defs)
2013 == VEC_length (tree, clobbered_v_may_defs));
2015 ssa_call_clobbered_cache_valid = true;
2019 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
2020 function. */
2022 static void
2023 add_call_read_ops (tree stmt)
2025 int i;
2026 unsigned u;
2027 tree t;
2028 bitmap_iterator bi;
2029 stmt_ann_t s_ann = stmt_ann (stmt);
2030 struct stmt_ann_d empty_ann;
2032 /* if the function is not pure, it may reference memory. Add
2033 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
2034 for the heuristic used to decide whether to create .GLOBAL_VAR. */
2035 if (global_var)
2037 add_stmt_operand (&global_var, s_ann, opf_none);
2038 return;
2041 /* If cache is valid, copy the elements into the build vector. */
2042 if (ssa_ro_call_cache_valid)
2044 for (i = VEC_length (tree, ro_call_vuses) - 1; i >=0 ; i--)
2046 /* Process the caches in reverse order so we are always inserting at
2047 the head of the list. */
2048 t = VEC_index (tree, ro_call_vuses, i);
2049 gcc_assert (TREE_CODE (t) != SSA_NAME);
2050 var_ann (t)->in_vuse_list = 1;
2051 opbuild_append_virtual (&build_vuses, t);
2053 if (s_ann)
2054 s_ann->makes_aliased_loads = ro_call_aliased_loads;
2055 return;
2058 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
2060 /* Add a VUSE for each call-clobbered variable. */
2061 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
2063 tree var = referenced_var (u);
2064 add_stmt_operand (&var, &empty_ann, opf_none);
2067 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
2068 if (s_ann)
2069 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
2071 /* Prepare empty cache vectors. */
2072 VEC_truncate (tree, ro_call_vuses, 0);
2074 /* Now fill the clobbered cache with the values that have been found. */
2075 for (i = opbuild_first (&build_vuses);
2076 i != OPBUILD_LAST;
2077 i = opbuild_next (&build_vuses, i))
2078 VEC_safe_push (tree, heap, ro_call_vuses,
2079 opbuild_elem_virtual (&build_vuses, i));
2081 gcc_assert (opbuild_num_elems (&build_vuses)
2082 == VEC_length (tree, ro_call_vuses));
2084 ssa_ro_call_cache_valid = true;
2088 /* Scan the immediate_use list for VAR making sure its linked properly.
2089 return RTUE iof there is a problem. */
2091 bool
2092 verify_imm_links (FILE *f, tree var)
2094 use_operand_p ptr, prev, list;
2095 int count;
2097 gcc_assert (TREE_CODE (var) == SSA_NAME);
2099 list = &(SSA_NAME_IMM_USE_NODE (var));
2100 gcc_assert (list->use == NULL);
2102 if (list->prev == NULL)
2104 gcc_assert (list->next == NULL);
2105 return false;
2108 prev = list;
2109 count = 0;
2110 for (ptr = list->next; ptr != list; )
2112 if (prev != ptr->prev)
2113 goto error;
2115 if (ptr->use == NULL)
2116 goto error; /* 2 roots, or SAFE guard node. */
2117 else if (*(ptr->use) != var)
2118 goto error;
2120 prev = ptr;
2121 ptr = ptr->next;
2122 /* Avoid infinite loops. */
2123 if (count++ > 30000)
2124 goto error;
2127 /* Verify list in the other direction. */
2128 prev = list;
2129 for (ptr = list->prev; ptr != list; )
2131 if (prev != ptr->next)
2132 goto error;
2133 prev = ptr;
2134 ptr = ptr->prev;
2135 if (count-- < 0)
2136 goto error;
2139 if (count != 0)
2140 goto error;
2142 return false;
2144 error:
2145 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2147 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2148 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2150 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2151 (void *)ptr->use);
2152 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2153 fprintf(f, "\n");
2154 return true;
2158 /* Dump all the immediate uses to FILE. */
2160 void
2161 dump_immediate_uses_for (FILE *file, tree var)
2163 imm_use_iterator iter;
2164 use_operand_p use_p;
2166 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2168 print_generic_expr (file, var, TDF_SLIM);
2169 fprintf (file, " : -->");
2170 if (has_zero_uses (var))
2171 fprintf (file, " no uses.\n");
2172 else
2173 if (has_single_use (var))
2174 fprintf (file, " single use.\n");
2175 else
2176 fprintf (file, "%d uses.\n", num_imm_uses (var));
2178 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2180 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2181 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2182 else
2183 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2185 fprintf(file, "\n");
2188 /* Dump all the immediate uses to FILE. */
2190 void
2191 dump_immediate_uses (FILE *file)
2193 tree var;
2194 unsigned int x;
2196 fprintf (file, "Immediate_uses: \n\n");
2197 for (x = 1; x < num_ssa_names; x++)
2199 var = ssa_name(x);
2200 if (!var)
2201 continue;
2202 dump_immediate_uses_for (file, var);
2207 /* Dump def-use edges on stderr. */
2209 void
2210 debug_immediate_uses (void)
2212 dump_immediate_uses (stderr);
2215 /* Dump def-use edges on stderr. */
2217 void
2218 debug_immediate_uses_for (tree var)
2220 dump_immediate_uses_for (stderr, var);
2222 #include "gt-tree-ssa-operands.h"