jawt.c: Remove malloc.h, covered by stdlib.h.
[official-gcc.git] / gcc / tree-ssa-operands.c
blob83ec21b949714795e2884372f0d82cb50cd38db4
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
36 #include "langhooks.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
85 #define opf_none 0
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
106 /* This structure maintain a sorted list of operands which is created by
107 parse_ssa_operand. */
108 struct opbuild_list_d GTY (())
110 varray_type vars; /* The VAR_DECLS tree. */
111 varray_type uid; /* The sort value for virtual symbols. */
112 varray_type next; /* The next index in the sorted list. */
113 int first; /* First element in list. */
114 unsigned num; /* Number of elements. */
117 #define OPBUILD_LAST -1
120 /* Array for building all the def operands. */
121 static GTY (()) struct opbuild_list_d build_defs;
123 /* Array for building all the use operands. */
124 static GTY (()) struct opbuild_list_d build_uses;
126 /* Array for building all the v_may_def operands. */
127 static GTY (()) struct opbuild_list_d build_v_may_defs;
129 /* Array for building all the vuse operands. */
130 static GTY (()) struct opbuild_list_d build_vuses;
132 /* Array for building all the v_must_def operands. */
133 static GTY (()) struct opbuild_list_d build_v_must_defs;
135 /* True if the operands for call clobbered vars are cached and valid. */
136 bool ssa_call_clobbered_cache_valid;
137 bool ssa_ro_call_cache_valid;
139 /* These arrays are the cached operand vectors for call clobbered calls. */
140 static VEC(tree,heap) *clobbered_v_may_defs;
141 static VEC(tree,heap) *clobbered_vuses;
142 static VEC(tree,heap) *ro_call_vuses;
143 static bool clobbered_aliased_loads;
144 static bool clobbered_aliased_stores;
145 static bool ro_call_aliased_loads;
146 static bool ops_active = false;
148 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
149 static unsigned operand_memory_index;
151 static void note_addressable (tree, stmt_ann_t);
152 static void get_expr_operands (tree, tree *, int);
153 static void get_asm_expr_operands (tree);
154 static void get_indirect_ref_operands (tree, tree, int);
155 static void get_call_expr_operands (tree, tree);
156 static inline void append_def (tree *);
157 static inline void append_use (tree *);
158 static void append_v_may_def (tree);
159 static void append_v_must_def (tree);
160 static void add_call_clobber_ops (tree);
161 static void add_call_read_ops (tree);
162 static void add_stmt_operand (tree *, stmt_ann_t, int);
163 static void build_ssa_operands (tree stmt);
165 static def_optype_p free_defs = NULL;
166 static use_optype_p free_uses = NULL;
167 static vuse_optype_p free_vuses = NULL;
168 static maydef_optype_p free_maydefs = NULL;
169 static mustdef_optype_p free_mustdefs = NULL;
171 /* Initialize a virtual operand build LIST called NAME with NUM elements. */
173 static inline void
174 opbuild_initialize_virtual (struct opbuild_list_d *list, int num,
175 const char *name)
177 list->first = OPBUILD_LAST;
178 list->num = 0;
179 VARRAY_TREE_INIT (list->vars, num, name);
180 VARRAY_UINT_INIT (list->uid, num, "List UID");
181 VARRAY_INT_INIT (list->next, num, "List NEXT");
185 /* Initialize a real operand build LIST called NAME with NUM elements. */
187 static inline void
188 opbuild_initialize_real (struct opbuild_list_d *list, int num, const char *name)
190 list->first = OPBUILD_LAST;
191 list->num = 0;
192 VARRAY_TREE_PTR_INIT (list->vars, num, name);
193 VARRAY_INT_INIT (list->next, num, "List NEXT");
194 /* The UID field is not needed since we sort based on the pointer value. */
195 list->uid = NULL;
199 /* Free memory used in virtual operand build object LIST. */
201 static inline void
202 opbuild_free (struct opbuild_list_d *list)
204 list->vars = NULL;
205 list->uid = NULL;
206 list->next = NULL;
210 /* Number of elements in an opbuild list. */
212 static inline unsigned
213 opbuild_num_elems (struct opbuild_list_d *list)
215 return list->num;
219 /* Add VAR to the real operand list LIST, keeping it sorted and avoiding
220 duplicates. The actual sort value is the tree pointer value. */
222 static inline void
223 opbuild_append_real (struct opbuild_list_d *list, tree *var)
225 int index;
227 #ifdef ENABLE_CHECKING
228 /* Ensure the real operand doesn't exist already. */
229 for (index = list->first;
230 index != OPBUILD_LAST;
231 index = VARRAY_INT (list->next, index))
232 gcc_assert (VARRAY_TREE_PTR (list->vars, index) != var);
233 #endif
235 /* First item in the list. */
236 index = VARRAY_ACTIVE_SIZE (list->vars);
237 if (index == 0)
238 list->first = index;
239 else
240 VARRAY_INT (list->next, index - 1) = index;
241 VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
242 VARRAY_PUSH_TREE_PTR (list->vars, var);
243 list->num++;
247 /* Add VAR to the virtual operand list LIST, keeping it sorted and avoiding
248 duplicates. The actual sort value is the DECL UID of the base variable. */
250 static inline void
251 opbuild_append_virtual (struct opbuild_list_d *list, tree var)
253 int index, curr, last;
254 unsigned int var_uid;
256 if (TREE_CODE (var) != SSA_NAME)
257 var_uid = DECL_UID (var);
258 else
259 var_uid = DECL_UID (SSA_NAME_VAR (var));
261 index = VARRAY_ACTIVE_SIZE (list->vars);
263 if (index == 0)
265 VARRAY_PUSH_TREE (list->vars, var);
266 VARRAY_PUSH_UINT (list->uid, var_uid);
267 VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
268 list->first = 0;
269 list->num = 1;
270 return;
273 last = OPBUILD_LAST;
274 /* Find the correct spot in the sorted list. */
275 for (curr = list->first;
276 curr != OPBUILD_LAST;
277 last = curr, curr = VARRAY_INT (list->next, curr))
279 if (VARRAY_UINT (list->uid, curr) > var_uid)
280 break;
283 if (last == OPBUILD_LAST)
285 /* First item in the list. */
286 VARRAY_PUSH_INT (list->next, list->first);
287 list->first = index;
289 else
291 /* Don't enter duplicates at all. */
292 if (VARRAY_UINT (list->uid, last) == var_uid)
293 return;
295 VARRAY_PUSH_INT (list->next, VARRAY_INT (list->next, last));
296 VARRAY_INT (list->next, last) = index;
298 VARRAY_PUSH_TREE (list->vars, var);
299 VARRAY_PUSH_UINT (list->uid, var_uid);
300 list->num++;
304 /* Return the first element index in LIST. OPBUILD_LAST means there are no
305 more elements. */
307 static inline int
308 opbuild_first (struct opbuild_list_d *list)
310 if (list->num > 0)
311 return list->first;
312 else
313 return OPBUILD_LAST;
317 /* Return the next element after PREV in LIST. */
319 static inline int
320 opbuild_next (struct opbuild_list_d *list, int prev)
322 return VARRAY_INT (list->next, prev);
326 /* Return the real element at index ELEM in LIST. */
328 static inline tree *
329 opbuild_elem_real (struct opbuild_list_d *list, int elem)
331 return VARRAY_TREE_PTR (list->vars, elem);
335 /* Return the virtual element at index ELEM in LIST. */
337 static inline tree
338 opbuild_elem_virtual (struct opbuild_list_d *list, int elem)
340 return VARRAY_TREE (list->vars, elem);
344 /* Return the virtual element uid at index ELEM in LIST. */
345 static inline unsigned int
346 opbuild_elem_uid (struct opbuild_list_d *list, int elem)
348 return VARRAY_UINT (list->uid, elem);
352 /* Reset an operand build list. */
354 static inline void
355 opbuild_clear (struct opbuild_list_d *list)
357 list->first = OPBUILD_LAST;
358 VARRAY_POP_ALL (list->vars);
359 VARRAY_POP_ALL (list->next);
360 if (list->uid)
361 VARRAY_POP_ALL (list->uid);
362 list->num = 0;
366 /* Remove ELEM from LIST where PREV is the previous element. Return the next
367 element. */
369 static inline int
370 opbuild_remove_elem (struct opbuild_list_d *list, int elem, int prev)
372 int ret;
373 if (prev != OPBUILD_LAST)
375 gcc_assert (VARRAY_INT (list->next, prev) == elem);
376 ret = VARRAY_INT (list->next, prev) = VARRAY_INT (list->next, elem);
378 else
380 gcc_assert (list->first == elem);
381 ret = list->first = VARRAY_INT (list->next, elem);
383 list->num--;
384 return ret;
388 /* Return true if the ssa operands cache is active. */
390 bool
391 ssa_operands_active (void)
393 return ops_active;
397 /* Initialize the operand cache routines. */
399 void
400 init_ssa_operands (void)
402 opbuild_initialize_real (&build_defs, 5, "build defs");
403 opbuild_initialize_real (&build_uses, 10, "build uses");
404 opbuild_initialize_virtual (&build_vuses, 25, "build_vuses");
405 opbuild_initialize_virtual (&build_v_may_defs, 25, "build_v_may_defs");
406 opbuild_initialize_virtual (&build_v_must_defs, 25, "build_v_must_defs");
407 gcc_assert (operand_memory == NULL);
408 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
409 ops_active = true;
413 /* Dispose of anything required by the operand routines. */
415 void
416 fini_ssa_operands (void)
418 struct ssa_operand_memory_d *ptr;
419 opbuild_free (&build_defs);
420 opbuild_free (&build_uses);
421 opbuild_free (&build_v_must_defs);
422 opbuild_free (&build_v_may_defs);
423 opbuild_free (&build_vuses);
424 free_defs = NULL;
425 free_uses = NULL;
426 free_vuses = NULL;
427 free_maydefs = NULL;
428 free_mustdefs = NULL;
429 while ((ptr = operand_memory) != NULL)
431 operand_memory = operand_memory->next;
432 ggc_free (ptr);
435 VEC_free (tree, heap, clobbered_v_may_defs);
436 VEC_free (tree, heap, clobbered_vuses);
437 VEC_free (tree, heap, ro_call_vuses);
438 ops_active = false;
442 /* Return memory for operands of SIZE chunks. */
444 static inline void *
445 ssa_operand_alloc (unsigned size)
447 char *ptr;
448 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
450 struct ssa_operand_memory_d *ptr;
451 ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d));
452 ptr->next = operand_memory;
453 operand_memory = ptr;
454 operand_memory_index = 0;
456 ptr = &(operand_memory->mem[operand_memory_index]);
457 operand_memory_index += size;
458 return ptr;
462 /* Make sure PTR is inn the correct immediate use list. Since uses are simply
463 pointers into the stmt TREE, there is no way of telling if anyone has
464 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
465 THe contents are different, but the the pointer is still the same. This
466 routine will check to make sure PTR is in the correct list, and if it isn't
467 put it in the correct list. We cannot simply check the previous node
468 because all nodes in the same stmt might have be changed. */
470 static inline void
471 correct_use_link (use_operand_p ptr, tree stmt)
473 use_operand_p prev;
474 tree root;
476 /* Fold_stmt () may have changed the stmt pointers. */
477 if (ptr->stmt != stmt)
478 ptr->stmt = stmt;
480 prev = ptr->prev;
481 if (prev)
483 bool stmt_mod = true;
484 /* Find the first element which isn't a SAFE iterator, is in a different
485 stmt, and is not a a modified stmt, That node is in the correct list,
486 see if we are too. */
488 while (stmt_mod)
490 while (prev->stmt == stmt || prev->stmt == NULL)
491 prev = prev->prev;
492 if (prev->use == NULL)
493 stmt_mod = false;
494 else
495 if ((stmt_mod = stmt_modified_p (prev->stmt)))
496 prev = prev->prev;
499 /* Get the ssa_name of the list the node is in. */
500 if (prev->use == NULL)
501 root = prev->stmt;
502 else
503 root = *(prev->use);
504 /* If it's the right list, simply return. */
505 if (root == *(ptr->use))
506 return;
508 /* Its in the wrong list if we reach here. */
509 delink_imm_use (ptr);
510 link_imm_use (ptr, *(ptr->use));
514 #define FINALIZE_OPBUILD build_defs
515 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_defs, (I))
516 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_defs, (I))
517 #define FINALIZE_FUNC finalize_ssa_def_ops
518 #define FINALIZE_ALLOC alloc_def
519 #define FINALIZE_FREE free_defs
520 #define FINALIZE_TYPE struct def_optype_d
521 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
522 #define FINALIZE_OPS DEF_OPS
523 #define FINALIZE_BASE(VAR) VAR
524 #define FINALIZE_BASE_TYPE tree *
525 #define FINALIZE_BASE_ZERO NULL
526 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
527 #include "tree-ssa-opfinalize.h"
530 /* This routine will create stmt operands for STMT from the def build list. */
532 static void
533 finalize_ssa_defs (tree stmt)
535 unsigned int num = opbuild_num_elems (&build_defs);
536 /* There should only be a single real definition per assignment. */
537 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
539 /* If there is an old list, often the new list is identical, or close, so
540 find the elements at the beginning that are the same as the vector. */
542 finalize_ssa_def_ops (stmt);
543 opbuild_clear (&build_defs);
546 #define FINALIZE_OPBUILD build_uses
547 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_uses, (I))
548 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_uses, (I))
549 #define FINALIZE_FUNC finalize_ssa_use_ops
550 #define FINALIZE_ALLOC alloc_use
551 #define FINALIZE_FREE free_uses
552 #define FINALIZE_TYPE struct use_optype_d
553 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
554 #define FINALIZE_OPS USE_OPS
555 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
556 #define FINALIZE_BASE(VAR) VAR
557 #define FINALIZE_BASE_TYPE tree *
558 #define FINALIZE_BASE_ZERO NULL
559 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
560 (PTR)->use_ptr.use = (VAL); \
561 link_imm_use_stmt (&((PTR)->use_ptr), \
562 *(VAL), (STMT))
563 #include "tree-ssa-opfinalize.h"
565 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
567 static void
568 finalize_ssa_uses (tree stmt)
570 #ifdef ENABLE_CHECKING
572 unsigned x;
573 unsigned num = opbuild_num_elems (&build_uses);
575 /* If the pointer to the operand is the statement itself, something is
576 wrong. It means that we are pointing to a local variable (the
577 initial call to get_stmt_operands does not pass a pointer to a
578 statement). */
579 for (x = 0; x < num; x++)
580 gcc_assert (*(opbuild_elem_real (&build_uses, x)) != stmt);
582 #endif
583 finalize_ssa_use_ops (stmt);
584 opbuild_clear (&build_uses);
588 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
589 #define FINALIZE_OPBUILD build_v_may_defs
590 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_may_defs, (I))
591 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_may_defs, (I))
592 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
593 #define FINALIZE_ALLOC alloc_maydef
594 #define FINALIZE_FREE free_maydefs
595 #define FINALIZE_TYPE struct maydef_optype_d
596 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
597 #define FINALIZE_OPS MAYDEF_OPS
598 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
599 #define FINALIZE_BASE_ZERO 0
600 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
601 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
602 #define FINALIZE_BASE_TYPE unsigned
603 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
604 (PTR)->def_var = (VAL); \
605 (PTR)->use_var = (VAL); \
606 (PTR)->use_ptr.use = &((PTR)->use_var); \
607 link_imm_use_stmt (&((PTR)->use_ptr), \
608 (VAL), (STMT))
609 #include "tree-ssa-opfinalize.h"
612 static void
613 finalize_ssa_v_may_defs (tree stmt)
615 finalize_ssa_v_may_def_ops (stmt);
619 /* Clear the in_list bits and empty the build array for v_may_defs. */
621 static inline void
622 cleanup_v_may_defs (void)
624 unsigned x, num;
625 num = opbuild_num_elems (&build_v_may_defs);
627 for (x = 0; x < num; x++)
629 tree t = opbuild_elem_virtual (&build_v_may_defs, x);
630 if (TREE_CODE (t) != SSA_NAME)
632 var_ann_t ann = var_ann (t);
633 ann->in_v_may_def_list = 0;
636 opbuild_clear (&build_v_may_defs);
640 #define FINALIZE_OPBUILD build_vuses
641 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_vuses, (I))
642 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_vuses, (I))
643 #define FINALIZE_FUNC finalize_ssa_vuse_ops
644 #define FINALIZE_ALLOC alloc_vuse
645 #define FINALIZE_FREE free_vuses
646 #define FINALIZE_TYPE struct vuse_optype_d
647 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
648 #define FINALIZE_OPS VUSE_OPS
649 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
650 #define FINALIZE_BASE_ZERO 0
651 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
652 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
653 #define FINALIZE_BASE_TYPE unsigned
654 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
655 (PTR)->use_var = (VAL); \
656 (PTR)->use_ptr.use = &((PTR)->use_var); \
657 link_imm_use_stmt (&((PTR)->use_ptr), \
658 (VAL), (STMT))
659 #include "tree-ssa-opfinalize.h"
662 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
664 static void
665 finalize_ssa_vuses (tree stmt)
667 unsigned num, num_v_may_defs;
668 int vuse_index;
670 /* Remove superfluous VUSE operands. If the statement already has a
671 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
672 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
673 suppose that variable 'a' is aliased:
675 # VUSE <a_2>
676 # a_3 = V_MAY_DEF <a_2>
677 a = a + 1;
679 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
680 operation. */
682 num = opbuild_num_elems (&build_vuses);
683 num_v_may_defs = opbuild_num_elems (&build_v_may_defs);
685 if (num > 0 && num_v_may_defs > 0)
687 int last = OPBUILD_LAST;
688 vuse_index = opbuild_first (&build_vuses);
689 for ( ; vuse_index != OPBUILD_LAST; )
691 tree vuse;
692 vuse = opbuild_elem_virtual (&build_vuses, vuse_index);
693 if (TREE_CODE (vuse) != SSA_NAME)
695 var_ann_t ann = var_ann (vuse);
696 ann->in_vuse_list = 0;
697 if (ann->in_v_may_def_list)
699 vuse_index = opbuild_remove_elem (&build_vuses, vuse_index,
700 last);
701 continue;
704 last = vuse_index;
705 vuse_index = opbuild_next (&build_vuses, vuse_index);
708 else
709 /* Clear out the in_list bits. */
710 for (vuse_index = opbuild_first (&build_vuses);
711 vuse_index != OPBUILD_LAST;
712 vuse_index = opbuild_next (&build_vuses, vuse_index))
714 tree t = opbuild_elem_virtual (&build_vuses, vuse_index);
715 if (TREE_CODE (t) != SSA_NAME)
717 var_ann_t ann = var_ann (t);
718 ann->in_vuse_list = 0;
722 finalize_ssa_vuse_ops (stmt);
723 /* The v_may_def build vector wasn't cleaned up because we needed it. */
724 cleanup_v_may_defs ();
726 /* Free the vuses build vector. */
727 opbuild_clear (&build_vuses);
731 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
733 #define FINALIZE_OPBUILD build_v_must_defs
734 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_must_defs, (I))
735 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_must_defs, (I))
736 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
737 #define FINALIZE_ALLOC alloc_mustdef
738 #define FINALIZE_FREE free_mustdefs
739 #define FINALIZE_TYPE struct mustdef_optype_d
740 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
741 #define FINALIZE_OPS MUSTDEF_OPS
742 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
743 #define FINALIZE_BASE_ZERO 0
744 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
745 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
746 #define FINALIZE_BASE_TYPE unsigned
747 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
748 (PTR)->def_var = (VAL); \
749 (PTR)->kill_var = (VAL); \
750 (PTR)->use_ptr.use = &((PTR)->kill_var);\
751 link_imm_use_stmt (&((PTR)->use_ptr), \
752 (VAL), (STMT))
753 #include "tree-ssa-opfinalize.h"
756 static void
757 finalize_ssa_v_must_defs (tree stmt)
759 /* In the presence of subvars, there may be more than one V_MUST_DEF per
760 statement (one for each subvar). It is a bit expensive to verify that
761 all must-defs in a statement belong to subvars if there is more than one
762 MUST-def, so we don't do it. Suffice to say, if you reach here without
763 having subvars, and have num >1, you have hit a bug. */
765 finalize_ssa_v_must_def_ops (stmt);
766 opbuild_clear (&build_v_must_defs);
770 /* Finalize all the build vectors, fill the new ones into INFO. */
772 static inline void
773 finalize_ssa_stmt_operands (tree stmt)
775 finalize_ssa_defs (stmt);
776 finalize_ssa_uses (stmt);
777 finalize_ssa_v_must_defs (stmt);
778 finalize_ssa_v_may_defs (stmt);
779 finalize_ssa_vuses (stmt);
783 /* Start the process of building up operands vectors in INFO. */
785 static inline void
786 start_ssa_stmt_operands (void)
788 gcc_assert (opbuild_num_elems (&build_defs) == 0);
789 gcc_assert (opbuild_num_elems (&build_uses) == 0);
790 gcc_assert (opbuild_num_elems (&build_vuses) == 0);
791 gcc_assert (opbuild_num_elems (&build_v_may_defs) == 0);
792 gcc_assert (opbuild_num_elems (&build_v_must_defs) == 0);
796 /* Add DEF_P to the list of pointers to operands. */
798 static inline void
799 append_def (tree *def_p)
801 opbuild_append_real (&build_defs, def_p);
805 /* Add USE_P to the list of pointers to operands. */
807 static inline void
808 append_use (tree *use_p)
810 opbuild_append_real (&build_uses, use_p);
814 /* Add a new virtual may def for variable VAR to the build array. */
816 static inline void
817 append_v_may_def (tree var)
819 if (TREE_CODE (var) != SSA_NAME)
821 var_ann_t ann = get_var_ann (var);
823 /* Don't allow duplicate entries. */
824 if (ann->in_v_may_def_list)
825 return;
826 ann->in_v_may_def_list = 1;
829 opbuild_append_virtual (&build_v_may_defs, var);
833 /* Add VAR to the list of virtual uses. */
835 static inline void
836 append_vuse (tree var)
839 /* Don't allow duplicate entries. */
840 if (TREE_CODE (var) != SSA_NAME)
842 var_ann_t ann = get_var_ann (var);
844 if (ann->in_vuse_list || ann->in_v_may_def_list)
845 return;
846 ann->in_vuse_list = 1;
849 opbuild_append_virtual (&build_vuses, var);
853 /* Add VAR to the list of virtual must definitions for INFO. */
855 static inline void
856 append_v_must_def (tree var)
858 unsigned i;
860 /* Don't allow duplicate entries. */
861 for (i = 0; i < opbuild_num_elems (&build_v_must_defs); i++)
862 if (var == opbuild_elem_virtual (&build_v_must_defs, i))
863 return;
865 opbuild_append_virtual (&build_v_must_defs, var);
869 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
870 cache for STMT, if it existed before. When finished, the various build_*
871 operand vectors will have potential operands. in them. */
873 static void
874 parse_ssa_operands (tree stmt)
876 enum tree_code code;
878 code = TREE_CODE (stmt);
879 switch (code)
881 case MODIFY_EXPR:
882 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
883 either only part of LHS is modified or if the RHS might throw,
884 otherwise, use V_MUST_DEF.
886 ??? If it might throw, we should represent somehow that it is killed
887 on the fallthrough path. */
889 tree lhs = TREE_OPERAND (stmt, 0);
890 int lhs_flags = opf_is_def;
892 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
894 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
895 or not the entire LHS is modified; that depends on what's
896 inside the VIEW_CONVERT_EXPR. */
897 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
898 lhs = TREE_OPERAND (lhs, 0);
900 if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
901 && TREE_CODE (lhs) != BIT_FIELD_REF
902 && TREE_CODE (lhs) != REALPART_EXPR
903 && TREE_CODE (lhs) != IMAGPART_EXPR)
904 lhs_flags |= opf_kill_def;
906 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
908 break;
910 case COND_EXPR:
911 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
912 break;
914 case SWITCH_EXPR:
915 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
916 break;
918 case ASM_EXPR:
919 get_asm_expr_operands (stmt);
920 break;
922 case RETURN_EXPR:
923 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
924 break;
926 case GOTO_EXPR:
927 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
928 break;
930 case LABEL_EXPR:
931 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
932 break;
934 /* These nodes contain no variable references. */
935 case BIND_EXPR:
936 case CASE_LABEL_EXPR:
937 case TRY_CATCH_EXPR:
938 case TRY_FINALLY_EXPR:
939 case EH_FILTER_EXPR:
940 case CATCH_EXPR:
941 case RESX_EXPR:
942 break;
944 default:
945 /* Notice that if get_expr_operands tries to use &STMT as the operand
946 pointer (which may only happen for USE operands), we will fail in
947 append_use. This default will handle statements like empty
948 statements, or CALL_EXPRs that may appear on the RHS of a statement
949 or as statements themselves. */
950 get_expr_operands (stmt, &stmt, opf_none);
951 break;
955 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
956 original operands, and if ANN is non-null, appropriate stmt flags are set
957 in the stmt's annotation. If ANN is NULL, this is not considered a "real"
958 stmt, and none of the operands will be entered into their respective
959 immediate uses tables. This is to allow stmts to be processed when they
960 are not actually in the CFG.
962 Note that some fields in old_ops may change to NULL, although none of the
963 memory they originally pointed to will be destroyed. It is appropriate
964 to call free_stmt_operands() on the value returned in old_ops.
966 The rationale for this: Certain optimizations wish to examine the difference
967 between new_ops and old_ops after processing. If a set of operands don't
968 change, new_ops will simply assume the pointer in old_ops, and the old_ops
969 pointer will be set to NULL, indicating no memory needs to be cleared.
970 Usage might appear something like:
972 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
973 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
974 <* compare old_ops_copy and new_ops *>
975 free_ssa_operands (old_ops); */
977 static void
978 build_ssa_operands (tree stmt)
980 stmt_ann_t ann = get_stmt_ann (stmt);
982 /* Initially assume that the statement has no volatile operands, nor
983 makes aliased loads or stores. */
984 if (ann)
986 ann->has_volatile_ops = false;
987 ann->makes_aliased_stores = false;
988 ann->makes_aliased_loads = false;
991 start_ssa_stmt_operands ();
993 parse_ssa_operands (stmt);
995 finalize_ssa_stmt_operands (stmt);
999 /* Free any operands vectors in OPS. */
1000 #if 0
1001 static void
1002 free_ssa_operands (stmt_operands_p ops)
1004 ops->def_ops = NULL;
1005 ops->use_ops = NULL;
1006 ops->maydef_ops = NULL;
1007 ops->mustdef_ops = NULL;
1008 ops->vuse_ops = NULL;
1009 while (ops->memory.next != NULL)
1011 operand_memory_p tmp = ops->memory.next;
1012 ops->memory.next = tmp->next;
1013 ggc_free (tmp);
1016 #endif
1019 /* Get the operands of statement STMT. Note that repeated calls to
1020 get_stmt_operands for the same statement will do nothing until the
1021 statement is marked modified by a call to mark_stmt_modified(). */
1023 void
1024 update_stmt_operands (tree stmt)
1026 stmt_ann_t ann = get_stmt_ann (stmt);
1027 /* If get_stmt_operands is called before SSA is initialized, dont
1028 do anything. */
1029 if (!ssa_operands_active ())
1030 return;
1031 /* The optimizers cannot handle statements that are nothing but a
1032 _DECL. This indicates a bug in the gimplifier. */
1033 gcc_assert (!SSA_VAR_P (stmt));
1035 gcc_assert (ann->modified);
1037 timevar_push (TV_TREE_OPS);
1039 build_ssa_operands (stmt);
1041 /* Clear the modified bit for STMT. Subsequent calls to
1042 get_stmt_operands for this statement will do nothing until the
1043 statement is marked modified by a call to mark_stmt_modified(). */
1044 ann->modified = 0;
1046 timevar_pop (TV_TREE_OPS);
1050 /* Copies virtual operands from SRC to DST. */
1052 void
1053 copy_virtual_operands (tree dest, tree src)
1055 tree t;
1056 ssa_op_iter iter, old_iter;
1057 use_operand_p use_p, u2;
1058 def_operand_p def_p, d2;
1060 build_ssa_operands (dest);
1062 /* Copy all the virtual fields. */
1063 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
1064 append_vuse (t);
1065 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
1066 append_v_may_def (t);
1067 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
1068 append_v_must_def (t);
1070 if (opbuild_num_elems (&build_vuses) == 0
1071 && opbuild_num_elems (&build_v_may_defs) == 0
1072 && opbuild_num_elems (&build_v_must_defs) == 0)
1073 return;
1075 /* Now commit the virtual operands to this stmt. */
1076 finalize_ssa_v_must_defs (dest);
1077 finalize_ssa_v_may_defs (dest);
1078 finalize_ssa_vuses (dest);
1080 /* Finally, set the field to the same values as then originals. */
1083 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
1084 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
1086 gcc_assert (!op_iter_done (&old_iter));
1087 SET_USE (use_p, t);
1088 t = op_iter_next_tree (&old_iter);
1090 gcc_assert (op_iter_done (&old_iter));
1092 op_iter_init_maydef (&old_iter, src, &u2, &d2);
1093 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
1095 gcc_assert (!op_iter_done (&old_iter));
1096 SET_USE (use_p, USE_FROM_PTR (u2));
1097 SET_DEF (def_p, DEF_FROM_PTR (d2));
1098 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1100 gcc_assert (op_iter_done (&old_iter));
1102 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
1103 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
1105 gcc_assert (!op_iter_done (&old_iter));
1106 SET_USE (use_p, USE_FROM_PTR (u2));
1107 SET_DEF (def_p, DEF_FROM_PTR (d2));
1108 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1110 gcc_assert (op_iter_done (&old_iter));
1115 /* Specifically for use in DOM's expression analysis. Given a store, we
1116 create an artificial stmt which looks like a load from the store, this can
1117 be used to eliminate redundant loads. OLD_OPS are the operands from the
1118 store stmt, and NEW_STMT is the new load which represents a load of the
1119 values stored. */
1121 void
1122 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
1124 stmt_ann_t ann;
1125 tree op;
1126 ssa_op_iter iter;
1127 use_operand_p use_p;
1128 unsigned x;
1130 ann = get_stmt_ann (new_stmt);
1132 /* process the stmt looking for operands. */
1133 start_ssa_stmt_operands ();
1134 parse_ssa_operands (new_stmt);
1136 for (x = 0; x < opbuild_num_elems (&build_vuses); x++)
1138 tree t = opbuild_elem_virtual (&build_vuses, x);
1139 if (TREE_CODE (t) != SSA_NAME)
1141 var_ann_t ann = var_ann (t);
1142 ann->in_vuse_list = 0;
1146 for (x = 0; x < opbuild_num_elems (&build_v_may_defs); x++)
1148 tree t = opbuild_elem_virtual (&build_v_may_defs, x);
1149 if (TREE_CODE (t) != SSA_NAME)
1151 var_ann_t ann = var_ann (t);
1152 ann->in_v_may_def_list = 0;
1155 /* Remove any virtual operands that were found. */
1156 opbuild_clear (&build_v_may_defs);
1157 opbuild_clear (&build_v_must_defs);
1158 opbuild_clear (&build_vuses);
1160 /* For each VDEF on the original statement, we want to create a
1161 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1162 statement. */
1163 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
1164 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
1165 append_vuse (op);
1167 /* Now build the operands for this new stmt. */
1168 finalize_ssa_stmt_operands (new_stmt);
1170 /* All uses in this fake stmt must not be in the immediate use lists. */
1171 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1172 delink_imm_use (use_p);
1175 static void
1176 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
1178 tree op0, op1;
1179 op0 = *exp0;
1180 op1 = *exp1;
1182 /* If the operand cache is active, attempt to preserve the relative positions
1183 of these two operands in their respective immediate use lists. */
1184 if (ssa_operands_active () && op0 != op1)
1186 use_optype_p use0, use1, ptr;
1187 use0 = use1 = NULL;
1188 /* Find the 2 operands in the cache, if they are there. */
1189 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1190 if (USE_OP_PTR (ptr)->use == exp0)
1192 use0 = ptr;
1193 break;
1195 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1196 if (USE_OP_PTR (ptr)->use == exp1)
1198 use1 = ptr;
1199 break;
1201 /* If both uses don't have operand entries, there isn't much we can do
1202 at this point. Presumably we dont need to worry about it. */
1203 if (use0 && use1)
1205 tree *tmp = USE_OP_PTR (use1)->use;
1206 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1207 USE_OP_PTR (use0)->use = tmp;
1211 /* Now swap the data. */
1212 *exp0 = op1;
1213 *exp1 = op0;
1217 /* Recursively scan the expression pointed by EXPR_P in statement referred to
1218 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
1219 operands found. */
1221 static void
1222 get_expr_operands (tree stmt, tree *expr_p, int flags)
1224 enum tree_code code;
1225 enum tree_code_class class;
1226 tree expr = *expr_p;
1227 stmt_ann_t s_ann = stmt_ann (stmt);
1229 if (expr == NULL)
1230 return;
1232 code = TREE_CODE (expr);
1233 class = TREE_CODE_CLASS (code);
1235 switch (code)
1237 case ADDR_EXPR:
1238 /* We could have the address of a component, array member,
1239 etc which has interesting variable references. */
1240 /* Taking the address of a variable does not represent a
1241 reference to it, but the fact that the stmt takes its address will be
1242 of interest to some passes (e.g. alias resolution). */
1243 add_stmt_operand (expr_p, s_ann, 0);
1245 /* If the address is invariant, there may be no interesting variable
1246 references inside. */
1247 if (is_gimple_min_invariant (expr))
1248 return;
1250 /* There should be no VUSEs created, since the referenced objects are
1251 not really accessed. The only operands that we should find here
1252 are ARRAY_REF indices which will always be real operands (GIMPLE
1253 does not allow non-registers as array indices). */
1254 flags |= opf_no_vops;
1256 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1257 return;
1259 case SSA_NAME:
1260 case VAR_DECL:
1261 case PARM_DECL:
1262 case RESULT_DECL:
1263 case CONST_DECL:
1265 subvar_t svars;
1267 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1268 Otherwise, add the variable itself.
1269 Whether it goes to USES or DEFS depends on the operand flags. */
1270 if (var_can_have_subvars (expr)
1271 && (svars = get_subvars_for_var (expr)))
1273 subvar_t sv;
1274 for (sv = svars; sv; sv = sv->next)
1275 add_stmt_operand (&sv->var, s_ann, flags);
1277 else
1279 add_stmt_operand (expr_p, s_ann, flags);
1281 return;
1283 case MISALIGNED_INDIRECT_REF:
1284 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1285 /* fall through */
1287 case ALIGN_INDIRECT_REF:
1288 case INDIRECT_REF:
1289 get_indirect_ref_operands (stmt, expr, flags);
1290 return;
1292 case ARRAY_REF:
1293 case ARRAY_RANGE_REF:
1294 /* Treat array references as references to the virtual variable
1295 representing the array. The virtual variable for an ARRAY_REF
1296 is the VAR_DECL for the array. */
1298 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1299 according to the value of IS_DEF. Recurse if the LHS of the
1300 ARRAY_REF node is not a regular variable. */
1301 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1302 add_stmt_operand (expr_p, s_ann, flags);
1303 else
1304 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1306 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1307 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1308 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1309 return;
1311 case COMPONENT_REF:
1312 case REALPART_EXPR:
1313 case IMAGPART_EXPR:
1315 tree ref;
1316 HOST_WIDE_INT offset, size;
1317 /* This component ref becomes an access to all of the subvariables
1318 it can touch, if we can determine that, but *NOT* the real one.
1319 If we can't determine which fields we could touch, the recursion
1320 will eventually get to a variable and add *all* of its subvars, or
1321 whatever is the minimum correct subset. */
1323 ref = okay_component_ref_for_subvars (expr, &offset, &size);
1324 if (ref)
1326 subvar_t svars = get_subvars_for_var (ref);
1327 subvar_t sv;
1328 for (sv = svars; sv; sv = sv->next)
1330 bool exact;
1331 if (overlap_subvar (offset, size, sv, &exact))
1333 if (!exact)
1334 flags &= ~opf_kill_def;
1335 add_stmt_operand (&sv->var, s_ann, flags);
1339 else
1340 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1341 flags & ~opf_kill_def);
1343 if (code == COMPONENT_REF)
1344 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1345 return;
1347 case WITH_SIZE_EXPR:
1348 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1349 and an rvalue reference to its second argument. */
1350 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1351 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1352 return;
1354 case CALL_EXPR:
1355 get_call_expr_operands (stmt, expr);
1356 return;
1358 case COND_EXPR:
1359 case VEC_COND_EXPR:
1360 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1361 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1362 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1363 return;
1365 case MODIFY_EXPR:
1367 int subflags;
1368 tree op;
1370 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1372 op = TREE_OPERAND (expr, 0);
1373 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1374 op = TREE_OPERAND (expr, 0);
1375 if (TREE_CODE (op) == ARRAY_REF
1376 || TREE_CODE (op) == ARRAY_RANGE_REF
1377 || TREE_CODE (op) == REALPART_EXPR
1378 || TREE_CODE (op) == IMAGPART_EXPR)
1379 subflags = opf_is_def;
1380 else
1381 subflags = opf_is_def | opf_kill_def;
1383 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1384 return;
1387 case CONSTRUCTOR:
1389 /* General aggregate CONSTRUCTORs have been decomposed, but they
1390 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1392 tree t;
1393 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1394 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1396 return;
1399 case TRUTH_NOT_EXPR:
1400 case BIT_FIELD_REF:
1401 case VIEW_CONVERT_EXPR:
1402 do_unary:
1403 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1404 return;
1406 case TRUTH_AND_EXPR:
1407 case TRUTH_OR_EXPR:
1408 case TRUTH_XOR_EXPR:
1409 case COMPOUND_EXPR:
1410 case OBJ_TYPE_REF:
1411 case ASSERT_EXPR:
1412 do_binary:
1414 tree op0 = TREE_OPERAND (expr, 0);
1415 tree op1 = TREE_OPERAND (expr, 1);
1417 /* If it would be profitable to swap the operands, then do so to
1418 canonicalize the statement, enabling better optimization.
1420 By placing canonicalization of such expressions here we
1421 transparently keep statements in canonical form, even
1422 when the statement is modified. */
1423 if (tree_swap_operands_p (op0, op1, false))
1425 /* For relationals we need to swap the operands
1426 and change the code. */
1427 if (code == LT_EXPR
1428 || code == GT_EXPR
1429 || code == LE_EXPR
1430 || code == GE_EXPR)
1432 TREE_SET_CODE (expr, swap_tree_comparison (code));
1433 swap_tree_operands (stmt,
1434 &TREE_OPERAND (expr, 0),
1435 &TREE_OPERAND (expr, 1));
1438 /* For a commutative operator we can just swap the operands. */
1439 else if (commutative_tree_code (code))
1441 swap_tree_operands (stmt,
1442 &TREE_OPERAND (expr, 0),
1443 &TREE_OPERAND (expr, 1));
1447 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1448 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1449 return;
1452 case REALIGN_LOAD_EXPR:
1454 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1455 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1456 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1457 return;
1460 case BLOCK:
1461 case FUNCTION_DECL:
1462 case EXC_PTR_EXPR:
1463 case FILTER_EXPR:
1464 case LABEL_DECL:
1465 /* Expressions that make no memory references. */
1466 return;
1468 default:
1469 if (class == tcc_unary)
1470 goto do_unary;
1471 if (class == tcc_binary || class == tcc_comparison)
1472 goto do_binary;
1473 if (class == tcc_constant || class == tcc_type)
1474 return;
1477 /* If we get here, something has gone wrong. */
1478 #ifdef ENABLE_CHECKING
1479 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1480 debug_tree (expr);
1481 fputs ("\n", stderr);
1482 internal_error ("internal error");
1483 #endif
1484 gcc_unreachable ();
1488 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1490 static void
1491 get_asm_expr_operands (tree stmt)
1493 stmt_ann_t s_ann = stmt_ann (stmt);
1494 int noutputs = list_length (ASM_OUTPUTS (stmt));
1495 const char **oconstraints
1496 = (const char **) alloca ((noutputs) * sizeof (const char *));
1497 int i;
1498 tree link;
1499 const char *constraint;
1500 bool allows_mem, allows_reg, is_inout;
1502 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1504 oconstraints[i] = constraint
1505 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1506 parse_output_constraint (&constraint, i, 0, 0,
1507 &allows_mem, &allows_reg, &is_inout);
1509 /* This should have been split in gimplify_asm_expr. */
1510 gcc_assert (!allows_reg || !is_inout);
1512 /* Memory operands are addressable. Note that STMT needs the
1513 address of this operand. */
1514 if (!allows_reg && allows_mem)
1516 tree t = get_base_address (TREE_VALUE (link));
1517 if (t && DECL_P (t))
1518 note_addressable (t, s_ann);
1521 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1524 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1526 constraint
1527 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1528 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1529 oconstraints, &allows_mem, &allows_reg);
1531 /* Memory operands are addressable. Note that STMT needs the
1532 address of this operand. */
1533 if (!allows_reg && allows_mem)
1535 tree t = get_base_address (TREE_VALUE (link));
1536 if (t && DECL_P (t))
1537 note_addressable (t, s_ann);
1540 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1544 /* Clobber memory for asm ("" : : : "memory"); */
1545 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1546 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1548 unsigned i;
1549 bitmap_iterator bi;
1551 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1552 decided to group them). */
1553 if (global_var)
1554 add_stmt_operand (&global_var, s_ann, opf_is_def);
1555 else
1556 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1558 tree var = referenced_var (i);
1559 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1562 /* Now clobber all addressables. */
1563 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1565 tree var = referenced_var (i);
1567 /* Subvars are explicitly represented in this list, so
1568 we don't need the original to be added to the clobber
1569 ops, but the original *will* be in this list because
1570 we keep the addressability of the original
1571 variable up-to-date so we don't screw up the rest of
1572 the backend. */
1573 if (var_can_have_subvars (var)
1574 && get_subvars_for_var (var) != NULL)
1575 continue;
1577 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1580 break;
1584 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1585 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1587 static void
1588 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1590 tree *pptr = &TREE_OPERAND (expr, 0);
1591 tree ptr = *pptr;
1592 stmt_ann_t s_ann = stmt_ann (stmt);
1594 /* Stores into INDIRECT_REF operands are never killing definitions. */
1595 flags &= ~opf_kill_def;
1597 if (SSA_VAR_P (ptr))
1599 struct ptr_info_def *pi = NULL;
1601 /* If PTR has flow-sensitive points-to information, use it. */
1602 if (TREE_CODE (ptr) == SSA_NAME
1603 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1604 && pi->name_mem_tag)
1606 /* PTR has its own memory tag. Use it. */
1607 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1609 else
1611 /* If PTR is not an SSA_NAME or it doesn't have a name
1612 tag, use its type memory tag. */
1613 var_ann_t v_ann;
1615 /* If we are emitting debugging dumps, display a warning if
1616 PTR is an SSA_NAME with no flow-sensitive alias
1617 information. That means that we may need to compute
1618 aliasing again. */
1619 if (dump_file
1620 && TREE_CODE (ptr) == SSA_NAME
1621 && pi == NULL)
1623 fprintf (dump_file,
1624 "NOTE: no flow-sensitive alias info for ");
1625 print_generic_expr (dump_file, ptr, dump_flags);
1626 fprintf (dump_file, " in ");
1627 print_generic_stmt (dump_file, stmt, dump_flags);
1630 if (TREE_CODE (ptr) == SSA_NAME)
1631 ptr = SSA_NAME_VAR (ptr);
1632 v_ann = var_ann (ptr);
1633 if (v_ann->type_mem_tag)
1634 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1638 /* If a constant is used as a pointer, we can't generate a real
1639 operand for it but we mark the statement volatile to prevent
1640 optimizations from messing things up. */
1641 else if (TREE_CODE (ptr) == INTEGER_CST)
1643 if (s_ann)
1644 s_ann->has_volatile_ops = true;
1645 return;
1648 /* Everything else *should* have been folded elsewhere, but users
1649 are smarter than we in finding ways to write invalid code. We
1650 cannot just assert here. If we were absolutely certain that we
1651 do handle all valid cases, then we could just do nothing here.
1652 That seems optimistic, so attempt to do something logical... */
1653 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1654 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1655 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1657 /* Make sure we know the object is addressable. */
1658 pptr = &TREE_OPERAND (ptr, 0);
1659 add_stmt_operand (pptr, s_ann, 0);
1661 /* Mark the object itself with a VUSE. */
1662 pptr = &TREE_OPERAND (*pptr, 0);
1663 get_expr_operands (stmt, pptr, flags);
1664 return;
1667 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1668 else
1669 gcc_unreachable ();
1671 /* Add a USE operand for the base pointer. */
1672 get_expr_operands (stmt, pptr, opf_none);
1675 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1677 static void
1678 get_call_expr_operands (tree stmt, tree expr)
1680 tree op;
1681 int call_flags = call_expr_flags (expr);
1683 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1684 operands for all the symbols that have been found to be
1685 call-clobbered.
1687 Note that if aliases have not been computed, the global effects
1688 of calls will not be included in the SSA web. This is fine
1689 because no optimizer should run before aliases have been
1690 computed. By not bothering with virtual operands for CALL_EXPRs
1691 we avoid adding superfluous virtual operands, which can be a
1692 significant compile time sink (See PR 15855). */
1693 if (aliases_computed_p
1694 && !bitmap_empty_p (call_clobbered_vars)
1695 && !(call_flags & ECF_NOVOPS))
1697 /* A 'pure' or a 'const' function never call-clobbers anything.
1698 A 'noreturn' function might, but since we don't return anyway
1699 there is no point in recording that. */
1700 if (TREE_SIDE_EFFECTS (expr)
1701 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1702 add_call_clobber_ops (stmt);
1703 else if (!(call_flags & ECF_CONST))
1704 add_call_read_ops (stmt);
1707 /* Find uses in the called function. */
1708 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1710 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1711 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1713 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1718 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1719 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1720 the statement's real operands, otherwise it is added to virtual
1721 operands. */
1723 static void
1724 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1726 bool is_real_op;
1727 tree var, sym;
1728 var_ann_t v_ann;
1730 var = *var_p;
1731 STRIP_NOPS (var);
1733 /* If the operand is an ADDR_EXPR, add its operand to the list of
1734 variables that have had their address taken in this statement. */
1735 if (TREE_CODE (var) == ADDR_EXPR)
1737 note_addressable (TREE_OPERAND (var, 0), s_ann);
1738 return;
1741 /* If the original variable is not a scalar, it will be added to the list
1742 of virtual operands. In that case, use its base symbol as the virtual
1743 variable representing it. */
1744 is_real_op = is_gimple_reg (var);
1745 if (!is_real_op && !DECL_P (var))
1746 var = get_virtual_var (var);
1748 /* If VAR is not a variable that we care to optimize, do nothing. */
1749 if (var == NULL_TREE || !SSA_VAR_P (var))
1750 return;
1752 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1753 v_ann = var_ann (sym);
1755 /* Mark statements with volatile operands. Optimizers should back
1756 off from statements having volatile operands. */
1757 if (TREE_THIS_VOLATILE (sym) && s_ann)
1758 s_ann->has_volatile_ops = true;
1760 /* If the variable cannot be modified and this is a V_MAY_DEF change
1761 it into a VUSE. This happens when read-only variables are marked
1762 call-clobbered and/or aliased to writable variables. So we only
1763 check that this only happens on non-specific stores.
1765 Note that if this is a specific store, i.e. associated with a
1766 modify_expr, then we can't suppress the V_DEF, lest we run into
1767 validation problems.
1769 This can happen when programs cast away const, leaving us with a
1770 store to read-only memory. If the statement is actually executed
1771 at runtime, then the program is ill formed. If the statement is
1772 not executed then all is well. At the very least, we cannot ICE. */
1773 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1775 gcc_assert (!is_real_op);
1776 flags &= ~(opf_is_def | opf_kill_def);
1779 if (is_real_op)
1781 /* The variable is a GIMPLE register. Add it to real operands. */
1782 if (flags & opf_is_def)
1783 append_def (var_p);
1784 else
1785 append_use (var_p);
1787 else
1789 varray_type aliases;
1791 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1792 virtual operands, unless the caller has specifically requested
1793 not to add virtual operands (used when adding operands inside an
1794 ADDR_EXPR expression). */
1795 if (flags & opf_no_vops)
1796 return;
1798 aliases = v_ann->may_aliases;
1800 if (aliases == NULL)
1802 /* The variable is not aliased or it is an alias tag. */
1803 if (flags & opf_is_def)
1805 if (flags & opf_kill_def)
1807 /* Only regular variables or struct fields may get a
1808 V_MUST_DEF operand. */
1809 gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
1810 || v_ann->mem_tag_kind == STRUCT_FIELD);
1811 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1812 variable definitions. */
1813 append_v_must_def (var);
1815 else
1817 /* Add a V_MAY_DEF for call-clobbered variables and
1818 memory tags. */
1819 append_v_may_def (var);
1822 else
1824 append_vuse (var);
1825 if (s_ann && v_ann->is_alias_tag)
1826 s_ann->makes_aliased_loads = 1;
1829 else
1831 size_t i;
1833 /* The variable is aliased. Add its aliases to the virtual
1834 operands. */
1835 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1837 if (flags & opf_is_def)
1839 bool added_may_defs_p = false;
1841 /* If the variable is also an alias tag, add a virtual
1842 operand for it, otherwise we will miss representing
1843 references to the members of the variable's alias set.
1844 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1845 if (v_ann->is_alias_tag)
1847 added_may_defs_p = true;
1848 append_v_may_def (var);
1851 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1853 /* While VAR may be modifiable, some of its aliases
1854 may not be. If that's the case, we don't really
1855 need to add them a V_MAY_DEF for them. */
1856 tree alias = VARRAY_TREE (aliases, i);
1858 if (unmodifiable_var_p (alias))
1859 append_vuse (alias);
1860 else
1862 append_v_may_def (alias);
1863 added_may_defs_p = true;
1867 if (s_ann && added_may_defs_p)
1868 s_ann->makes_aliased_stores = 1;
1870 else
1872 /* Similarly, append a virtual uses for VAR itself, when
1873 it is an alias tag. */
1874 if (v_ann->is_alias_tag)
1875 append_vuse (var);
1877 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1878 append_vuse (VARRAY_TREE (aliases, i));
1880 if (s_ann)
1881 s_ann->makes_aliased_loads = 1;
1888 /* Record that VAR had its address taken in the statement with annotations
1889 S_ANN. */
1891 static void
1892 note_addressable (tree var, stmt_ann_t s_ann)
1894 subvar_t svars;
1896 if (!s_ann)
1897 return;
1899 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1900 as the only thing we take the address of.
1901 See PR 21407 and the ensuing mailing list discussion. */
1903 var = get_base_address (var);
1904 if (var && SSA_VAR_P (var))
1906 if (s_ann->addresses_taken == NULL)
1907 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1910 if (var_can_have_subvars (var)
1911 && (svars = get_subvars_for_var (var)))
1913 subvar_t sv;
1914 for (sv = svars; sv; sv = sv->next)
1915 bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
1917 else
1918 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1922 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1923 clobbered variables in the function. */
1925 static void
1926 add_call_clobber_ops (tree stmt)
1928 int i;
1929 unsigned u;
1930 tree t;
1931 bitmap_iterator bi;
1932 stmt_ann_t s_ann = stmt_ann (stmt);
1933 struct stmt_ann_d empty_ann;
1935 /* Functions that are not const, pure or never return may clobber
1936 call-clobbered variables. */
1937 if (s_ann)
1938 s_ann->makes_clobbering_call = true;
1940 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1941 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1942 if (global_var)
1944 add_stmt_operand (&global_var, s_ann, opf_is_def);
1945 return;
1948 /* If cache is valid, copy the elements into the build vectors. */
1949 if (ssa_call_clobbered_cache_valid)
1951 /* Process the caches in reverse order so we are always inserting at
1952 the head of the list. */
1953 for (i = VEC_length (tree, clobbered_vuses) - 1; i >=0; i--)
1955 t = VEC_index (tree, clobbered_vuses, i);
1956 gcc_assert (TREE_CODE (t) != SSA_NAME);
1957 var_ann (t)->in_vuse_list = 1;
1958 opbuild_append_virtual (&build_vuses, t);
1960 for (i = VEC_length (tree, clobbered_v_may_defs) - 1; i >= 0; i--)
1962 t = VEC_index (tree, clobbered_v_may_defs, i);
1963 gcc_assert (TREE_CODE (t) != SSA_NAME);
1964 var_ann (t)->in_v_may_def_list = 1;
1965 opbuild_append_virtual (&build_v_may_defs, t);
1967 if (s_ann)
1969 s_ann->makes_aliased_loads = clobbered_aliased_loads;
1970 s_ann->makes_aliased_stores = clobbered_aliased_stores;
1972 return;
1975 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1977 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1978 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1980 tree var = referenced_var (u);
1981 if (unmodifiable_var_p (var))
1982 add_stmt_operand (&var, &empty_ann, opf_none);
1983 else
1984 add_stmt_operand (&var, &empty_ann, opf_is_def | opf_non_specific);
1987 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
1988 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
1990 /* Set the flags for a stmt's annotation. */
1991 if (s_ann)
1993 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1994 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
1997 /* Prepare empty cache vectors. */
1998 VEC_truncate (tree, clobbered_vuses, 0);
1999 VEC_truncate (tree, clobbered_v_may_defs, 0);
2001 /* Now fill the clobbered cache with the values that have been found. */
2002 for (i = opbuild_first (&build_vuses);
2003 i != OPBUILD_LAST;
2004 i = opbuild_next (&build_vuses, i))
2005 VEC_safe_push (tree, heap, clobbered_vuses,
2006 opbuild_elem_virtual (&build_vuses, i));
2008 gcc_assert (opbuild_num_elems (&build_vuses)
2009 == VEC_length (tree, clobbered_vuses));
2011 for (i = opbuild_first (&build_v_may_defs);
2012 i != OPBUILD_LAST;
2013 i = opbuild_next (&build_v_may_defs, i))
2014 VEC_safe_push (tree, heap, clobbered_v_may_defs,
2015 opbuild_elem_virtual (&build_v_may_defs, i));
2017 gcc_assert (opbuild_num_elems (&build_v_may_defs)
2018 == VEC_length (tree, clobbered_v_may_defs));
2020 ssa_call_clobbered_cache_valid = true;
2024 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
2025 function. */
2027 static void
2028 add_call_read_ops (tree stmt)
2030 int i;
2031 unsigned u;
2032 tree t;
2033 bitmap_iterator bi;
2034 stmt_ann_t s_ann = stmt_ann (stmt);
2035 struct stmt_ann_d empty_ann;
2037 /* if the function is not pure, it may reference memory. Add
2038 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
2039 for the heuristic used to decide whether to create .GLOBAL_VAR. */
2040 if (global_var)
2042 add_stmt_operand (&global_var, s_ann, opf_none);
2043 return;
2046 /* If cache is valid, copy the elements into the build vector. */
2047 if (ssa_ro_call_cache_valid)
2049 for (i = VEC_length (tree, ro_call_vuses) - 1; i >=0 ; i--)
2051 /* Process the caches in reverse order so we are always inserting at
2052 the head of the list. */
2053 t = VEC_index (tree, ro_call_vuses, i);
2054 gcc_assert (TREE_CODE (t) != SSA_NAME);
2055 var_ann (t)->in_vuse_list = 1;
2056 opbuild_append_virtual (&build_vuses, t);
2058 if (s_ann)
2059 s_ann->makes_aliased_loads = ro_call_aliased_loads;
2060 return;
2063 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
2065 /* Add a VUSE for each call-clobbered variable. */
2066 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
2068 tree var = referenced_var (u);
2069 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
2072 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
2073 if (s_ann)
2074 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
2076 /* Prepare empty cache vectors. */
2077 VEC_truncate (tree, ro_call_vuses, 0);
2079 /* Now fill the clobbered cache with the values that have been found. */
2080 for (i = opbuild_first (&build_vuses);
2081 i != OPBUILD_LAST;
2082 i = opbuild_next (&build_vuses, i))
2083 VEC_safe_push (tree, heap, ro_call_vuses,
2084 opbuild_elem_virtual (&build_vuses, i));
2086 gcc_assert (opbuild_num_elems (&build_vuses)
2087 == VEC_length (tree, ro_call_vuses));
2089 ssa_ro_call_cache_valid = true;
2093 /* Scan the immediate_use list for VAR making sure its linked properly.
2094 return RTUE iof there is a problem. */
2096 bool
2097 verify_imm_links (FILE *f, tree var)
2099 use_operand_p ptr, prev, list;
2100 int count;
2102 gcc_assert (TREE_CODE (var) == SSA_NAME);
2104 list = &(SSA_NAME_IMM_USE_NODE (var));
2105 gcc_assert (list->use == NULL);
2107 if (list->prev == NULL)
2109 gcc_assert (list->next == NULL);
2110 return false;
2113 prev = list;
2114 count = 0;
2115 for (ptr = list->next; ptr != list; )
2117 if (prev != ptr->prev)
2118 goto error;
2120 if (ptr->use == NULL)
2121 goto error; /* 2 roots, or SAFE guard node. */
2122 else if (*(ptr->use) != var)
2123 goto error;
2125 prev = ptr;
2126 ptr = ptr->next;
2127 /* Avoid infinite loops. */
2128 if (count++ > 30000)
2129 goto error;
2132 /* Verify list in the other direction. */
2133 prev = list;
2134 for (ptr = list->prev; ptr != list; )
2136 if (prev != ptr->next)
2137 goto error;
2138 prev = ptr;
2139 ptr = ptr->prev;
2140 if (count-- < 0)
2141 goto error;
2144 if (count != 0)
2145 goto error;
2147 return false;
2149 error:
2150 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2152 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2153 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2155 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2156 (void *)ptr->use);
2157 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2158 fprintf(f, "\n");
2159 return true;
2163 /* Dump all the immediate uses to FILE. */
2165 void
2166 dump_immediate_uses_for (FILE *file, tree var)
2168 imm_use_iterator iter;
2169 use_operand_p use_p;
2171 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2173 print_generic_expr (file, var, TDF_SLIM);
2174 fprintf (file, " : -->");
2175 if (has_zero_uses (var))
2176 fprintf (file, " no uses.\n");
2177 else
2178 if (has_single_use (var))
2179 fprintf (file, " single use.\n");
2180 else
2181 fprintf (file, "%d uses.\n", num_imm_uses (var));
2183 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2185 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2186 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2187 else
2188 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2190 fprintf(file, "\n");
2193 /* Dump all the immediate uses to FILE. */
2195 void
2196 dump_immediate_uses (FILE *file)
2198 tree var;
2199 unsigned int x;
2201 fprintf (file, "Immediate_uses: \n\n");
2202 for (x = 1; x < num_ssa_names; x++)
2204 var = ssa_name(x);
2205 if (!var)
2206 continue;
2207 dump_immediate_uses_for (file, var);
2212 /* Dump def-use edges on stderr. */
2214 void
2215 debug_immediate_uses (void)
2217 dump_immediate_uses (stderr);
2220 /* Dump def-use edges on stderr. */
2222 void
2223 debug_immediate_uses_for (tree var)
2225 dump_immediate_uses_for (stderr, var);
2227 #include "gt-tree-ssa-operands.h"