2005-06-30 J. D. Johnston <jjohnst@us.ibm.com>
[official-gcc.git] / gcc / tree-ssa-operands.c
blobb2419e58cf2ab8b44509b7a5d2b00330349819c8
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
36 #include "langhooks.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
85 #define opf_none 0
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
106 /* This structure maintain a sorted list of operands which is created by
107 parse_ssa_operand. */
108 struct opbuild_list_d GTY (())
110 varray_type vars; /* The VAR_DECLS tree. */
111 varray_type uid; /* The sort value for virtual symbols. */
112 varray_type next; /* The next index in the sorted list. */
113 int first; /* First element in list. */
114 unsigned num; /* Number of elements. */
117 #define OPBUILD_LAST -1
120 /* Array for building all the def operands. */
121 static GTY (()) struct opbuild_list_d build_defs;
123 /* Array for building all the use operands. */
124 static GTY (()) struct opbuild_list_d build_uses;
126 /* Array for building all the v_may_def operands. */
127 static GTY (()) struct opbuild_list_d build_v_may_defs;
129 /* Array for building all the vuse operands. */
130 static GTY (()) struct opbuild_list_d build_vuses;
132 /* Array for building all the v_must_def operands. */
133 static GTY (()) struct opbuild_list_d build_v_must_defs;
135 /* True if the operands for call clobbered vars are cached and valid. */
136 bool ssa_call_clobbered_cache_valid;
137 bool ssa_ro_call_cache_valid;
139 /* These arrays are the cached operand vectors for call clobbered calls. */
140 static VEC(tree,heap) *clobbered_v_may_defs;
141 static VEC(tree,heap) *clobbered_vuses;
142 static VEC(tree,heap) *ro_call_vuses;
143 static bool clobbered_aliased_loads;
144 static bool clobbered_aliased_stores;
145 static bool ro_call_aliased_loads;
146 static bool ops_active = false;
148 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
149 static unsigned operand_memory_index;
151 static void note_addressable (tree, stmt_ann_t);
152 static void get_expr_operands (tree, tree *, int);
153 static void get_asm_expr_operands (tree);
154 static void get_indirect_ref_operands (tree, tree, int);
155 static void get_tmr_operands (tree, tree, int);
156 static void get_call_expr_operands (tree, tree);
157 static inline void append_def (tree *);
158 static inline void append_use (tree *);
159 static void append_v_may_def (tree);
160 static void append_v_must_def (tree);
161 static void add_call_clobber_ops (tree);
162 static void add_call_read_ops (tree);
163 static void add_stmt_operand (tree *, stmt_ann_t, int);
164 static void build_ssa_operands (tree stmt);
166 static def_optype_p free_defs = NULL;
167 static use_optype_p free_uses = NULL;
168 static vuse_optype_p free_vuses = NULL;
169 static maydef_optype_p free_maydefs = NULL;
170 static mustdef_optype_p free_mustdefs = NULL;
172 /* Initialize a virtual operand build LIST called NAME with NUM elements. */
174 static inline void
175 opbuild_initialize_virtual (struct opbuild_list_d *list, int num,
176 const char *name)
178 list->first = OPBUILD_LAST;
179 list->num = 0;
180 VARRAY_TREE_INIT (list->vars, num, name);
181 VARRAY_UINT_INIT (list->uid, num, "List UID");
182 VARRAY_INT_INIT (list->next, num, "List NEXT");
186 /* Initialize a real operand build LIST called NAME with NUM elements. */
188 static inline void
189 opbuild_initialize_real (struct opbuild_list_d *list, int num, const char *name)
191 list->first = OPBUILD_LAST;
192 list->num = 0;
193 VARRAY_TREE_PTR_INIT (list->vars, num, name);
194 VARRAY_INT_INIT (list->next, num, "List NEXT");
195 /* The UID field is not needed since we sort based on the pointer value. */
196 list->uid = NULL;
200 /* Free memory used in virtual operand build object LIST. */
202 static inline void
203 opbuild_free (struct opbuild_list_d *list)
205 list->vars = NULL;
206 list->uid = NULL;
207 list->next = NULL;
211 /* Number of elements in an opbuild list. */
213 static inline unsigned
214 opbuild_num_elems (struct opbuild_list_d *list)
216 return list->num;
220 /* Add VAR to the real operand list LIST, keeping it sorted and avoiding
221 duplicates. The actual sort value is the tree pointer value. */
223 static inline void
224 opbuild_append_real (struct opbuild_list_d *list, tree *var)
226 int index;
228 #ifdef ENABLE_CHECKING
229 /* Ensure the real operand doesn't exist already. */
230 for (index = list->first;
231 index != OPBUILD_LAST;
232 index = VARRAY_INT (list->next, index))
233 gcc_assert (VARRAY_TREE_PTR (list->vars, index) != var);
234 #endif
236 /* First item in the list. */
237 index = VARRAY_ACTIVE_SIZE (list->vars);
238 if (index == 0)
239 list->first = index;
240 else
241 VARRAY_INT (list->next, index - 1) = index;
242 VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
243 VARRAY_PUSH_TREE_PTR (list->vars, var);
244 list->num++;
248 /* Add VAR to the virtual operand list LIST, keeping it sorted and avoiding
249 duplicates. The actual sort value is the DECL UID of the base variable. */
251 static inline void
252 opbuild_append_virtual (struct opbuild_list_d *list, tree var)
254 int index, curr, last;
255 unsigned int var_uid;
257 if (TREE_CODE (var) != SSA_NAME)
258 var_uid = DECL_UID (var);
259 else
260 var_uid = DECL_UID (SSA_NAME_VAR (var));
262 index = VARRAY_ACTIVE_SIZE (list->vars);
264 if (index == 0)
266 VARRAY_PUSH_TREE (list->vars, var);
267 VARRAY_PUSH_UINT (list->uid, var_uid);
268 VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
269 list->first = 0;
270 list->num = 1;
271 return;
274 last = OPBUILD_LAST;
275 /* Find the correct spot in the sorted list. */
276 for (curr = list->first;
277 curr != OPBUILD_LAST;
278 last = curr, curr = VARRAY_INT (list->next, curr))
280 if (VARRAY_UINT (list->uid, curr) > var_uid)
281 break;
284 if (last == OPBUILD_LAST)
286 /* First item in the list. */
287 VARRAY_PUSH_INT (list->next, list->first);
288 list->first = index;
290 else
292 /* Don't enter duplicates at all. */
293 if (VARRAY_UINT (list->uid, last) == var_uid)
294 return;
296 VARRAY_PUSH_INT (list->next, VARRAY_INT (list->next, last));
297 VARRAY_INT (list->next, last) = index;
299 VARRAY_PUSH_TREE (list->vars, var);
300 VARRAY_PUSH_UINT (list->uid, var_uid);
301 list->num++;
305 /* Return the first element index in LIST. OPBUILD_LAST means there are no
306 more elements. */
308 static inline int
309 opbuild_first (struct opbuild_list_d *list)
311 if (list->num > 0)
312 return list->first;
313 else
314 return OPBUILD_LAST;
318 /* Return the next element after PREV in LIST. */
320 static inline int
321 opbuild_next (struct opbuild_list_d *list, int prev)
323 return VARRAY_INT (list->next, prev);
327 /* Return the real element at index ELEM in LIST. */
329 static inline tree *
330 opbuild_elem_real (struct opbuild_list_d *list, int elem)
332 return VARRAY_TREE_PTR (list->vars, elem);
336 /* Return the virtual element at index ELEM in LIST. */
338 static inline tree
339 opbuild_elem_virtual (struct opbuild_list_d *list, int elem)
341 return VARRAY_TREE (list->vars, elem);
345 /* Return the virtual element uid at index ELEM in LIST. */
346 static inline unsigned int
347 opbuild_elem_uid (struct opbuild_list_d *list, int elem)
349 return VARRAY_UINT (list->uid, elem);
353 /* Reset an operand build list. */
355 static inline void
356 opbuild_clear (struct opbuild_list_d *list)
358 list->first = OPBUILD_LAST;
359 VARRAY_POP_ALL (list->vars);
360 VARRAY_POP_ALL (list->next);
361 if (list->uid)
362 VARRAY_POP_ALL (list->uid);
363 list->num = 0;
367 /* Remove ELEM from LIST where PREV is the previous element. Return the next
368 element. */
370 static inline int
371 opbuild_remove_elem (struct opbuild_list_d *list, int elem, int prev)
373 int ret;
374 if (prev != OPBUILD_LAST)
376 gcc_assert (VARRAY_INT (list->next, prev) == elem);
377 ret = VARRAY_INT (list->next, prev) = VARRAY_INT (list->next, elem);
379 else
381 gcc_assert (list->first == elem);
382 ret = list->first = VARRAY_INT (list->next, elem);
384 list->num--;
385 return ret;
389 /* Return true if the ssa operands cache is active. */
391 bool
392 ssa_operands_active (void)
394 return ops_active;
398 /* Initialize the operand cache routines. */
400 void
401 init_ssa_operands (void)
403 opbuild_initialize_real (&build_defs, 5, "build defs");
404 opbuild_initialize_real (&build_uses, 10, "build uses");
405 opbuild_initialize_virtual (&build_vuses, 25, "build_vuses");
406 opbuild_initialize_virtual (&build_v_may_defs, 25, "build_v_may_defs");
407 opbuild_initialize_virtual (&build_v_must_defs, 25, "build_v_must_defs");
408 gcc_assert (operand_memory == NULL);
409 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
410 ops_active = true;
414 /* Dispose of anything required by the operand routines. */
416 void
417 fini_ssa_operands (void)
419 struct ssa_operand_memory_d *ptr;
420 opbuild_free (&build_defs);
421 opbuild_free (&build_uses);
422 opbuild_free (&build_v_must_defs);
423 opbuild_free (&build_v_may_defs);
424 opbuild_free (&build_vuses);
425 free_defs = NULL;
426 free_uses = NULL;
427 free_vuses = NULL;
428 free_maydefs = NULL;
429 free_mustdefs = NULL;
430 while ((ptr = operand_memory) != NULL)
432 operand_memory = operand_memory->next;
433 ggc_free (ptr);
436 VEC_free (tree, heap, clobbered_v_may_defs);
437 VEC_free (tree, heap, clobbered_vuses);
438 VEC_free (tree, heap, ro_call_vuses);
439 ops_active = false;
443 /* Return memory for operands of SIZE chunks. */
445 static inline void *
446 ssa_operand_alloc (unsigned size)
448 char *ptr;
449 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
451 struct ssa_operand_memory_d *ptr;
452 ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d));
453 ptr->next = operand_memory;
454 operand_memory = ptr;
455 operand_memory_index = 0;
457 ptr = &(operand_memory->mem[operand_memory_index]);
458 operand_memory_index += size;
459 return ptr;
463 /* Make sure PTR is inn the correct immediate use list. Since uses are simply
464 pointers into the stmt TREE, there is no way of telling if anyone has
465 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
466 THe contents are different, but the the pointer is still the same. This
467 routine will check to make sure PTR is in the correct list, and if it isn't
468 put it in the correct list. We cannot simply check the previous node
469 because all nodes in the same stmt might have be changed. */
471 static inline void
472 correct_use_link (use_operand_p ptr, tree stmt)
474 use_operand_p prev;
475 tree root;
477 /* Fold_stmt () may have changed the stmt pointers. */
478 if (ptr->stmt != stmt)
479 ptr->stmt = stmt;
481 prev = ptr->prev;
482 if (prev)
484 bool stmt_mod = true;
485 /* Find the first element which isn't a SAFE iterator, is in a different
486 stmt, and is not a a modified stmt, That node is in the correct list,
487 see if we are too. */
489 while (stmt_mod)
491 while (prev->stmt == stmt || prev->stmt == NULL)
492 prev = prev->prev;
493 if (prev->use == NULL)
494 stmt_mod = false;
495 else
496 if ((stmt_mod = stmt_modified_p (prev->stmt)))
497 prev = prev->prev;
500 /* Get the ssa_name of the list the node is in. */
501 if (prev->use == NULL)
502 root = prev->stmt;
503 else
504 root = *(prev->use);
505 /* If it's the right list, simply return. */
506 if (root == *(ptr->use))
507 return;
509 /* Its in the wrong list if we reach here. */
510 delink_imm_use (ptr);
511 link_imm_use (ptr, *(ptr->use));
515 #define FINALIZE_OPBUILD build_defs
516 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_defs, (I))
517 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_defs, (I))
518 #define FINALIZE_FUNC finalize_ssa_def_ops
519 #define FINALIZE_ALLOC alloc_def
520 #define FINALIZE_FREE free_defs
521 #define FINALIZE_TYPE struct def_optype_d
522 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
523 #define FINALIZE_OPS DEF_OPS
524 #define FINALIZE_BASE(VAR) VAR
525 #define FINALIZE_BASE_TYPE tree *
526 #define FINALIZE_BASE_ZERO NULL
527 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
528 #include "tree-ssa-opfinalize.h"
531 /* This routine will create stmt operands for STMT from the def build list. */
533 static void
534 finalize_ssa_defs (tree stmt)
536 unsigned int num = opbuild_num_elems (&build_defs);
537 /* There should only be a single real definition per assignment. */
538 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
540 /* If there is an old list, often the new list is identical, or close, so
541 find the elements at the beginning that are the same as the vector. */
543 finalize_ssa_def_ops (stmt);
544 opbuild_clear (&build_defs);
547 #define FINALIZE_OPBUILD build_uses
548 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_uses, (I))
549 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_uses, (I))
550 #define FINALIZE_FUNC finalize_ssa_use_ops
551 #define FINALIZE_ALLOC alloc_use
552 #define FINALIZE_FREE free_uses
553 #define FINALIZE_TYPE struct use_optype_d
554 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
555 #define FINALIZE_OPS USE_OPS
556 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
557 #define FINALIZE_BASE(VAR) VAR
558 #define FINALIZE_BASE_TYPE tree *
559 #define FINALIZE_BASE_ZERO NULL
560 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
561 (PTR)->use_ptr.use = (VAL); \
562 link_imm_use_stmt (&((PTR)->use_ptr), \
563 *(VAL), (STMT))
564 #include "tree-ssa-opfinalize.h"
566 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
568 static void
569 finalize_ssa_uses (tree stmt)
571 #ifdef ENABLE_CHECKING
573 unsigned x;
574 unsigned num = opbuild_num_elems (&build_uses);
576 /* If the pointer to the operand is the statement itself, something is
577 wrong. It means that we are pointing to a local variable (the
578 initial call to get_stmt_operands does not pass a pointer to a
579 statement). */
580 for (x = 0; x < num; x++)
581 gcc_assert (*(opbuild_elem_real (&build_uses, x)) != stmt);
583 #endif
584 finalize_ssa_use_ops (stmt);
585 opbuild_clear (&build_uses);
589 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
590 #define FINALIZE_OPBUILD build_v_may_defs
591 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_may_defs, (I))
592 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_may_defs, (I))
593 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
594 #define FINALIZE_ALLOC alloc_maydef
595 #define FINALIZE_FREE free_maydefs
596 #define FINALIZE_TYPE struct maydef_optype_d
597 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
598 #define FINALIZE_OPS MAYDEF_OPS
599 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
600 #define FINALIZE_BASE_ZERO 0
601 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
602 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
603 #define FINALIZE_BASE_TYPE unsigned
604 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
605 (PTR)->def_var = (VAL); \
606 (PTR)->use_var = (VAL); \
607 (PTR)->use_ptr.use = &((PTR)->use_var); \
608 link_imm_use_stmt (&((PTR)->use_ptr), \
609 (VAL), (STMT))
610 #include "tree-ssa-opfinalize.h"
613 static void
614 finalize_ssa_v_may_defs (tree stmt)
616 finalize_ssa_v_may_def_ops (stmt);
620 /* Clear the in_list bits and empty the build array for v_may_defs. */
622 static inline void
623 cleanup_v_may_defs (void)
625 unsigned x, num;
626 num = opbuild_num_elems (&build_v_may_defs);
628 for (x = 0; x < num; x++)
630 tree t = opbuild_elem_virtual (&build_v_may_defs, x);
631 if (TREE_CODE (t) != SSA_NAME)
633 var_ann_t ann = var_ann (t);
634 ann->in_v_may_def_list = 0;
637 opbuild_clear (&build_v_may_defs);
641 #define FINALIZE_OPBUILD build_vuses
642 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_vuses, (I))
643 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_vuses, (I))
644 #define FINALIZE_FUNC finalize_ssa_vuse_ops
645 #define FINALIZE_ALLOC alloc_vuse
646 #define FINALIZE_FREE free_vuses
647 #define FINALIZE_TYPE struct vuse_optype_d
648 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
649 #define FINALIZE_OPS VUSE_OPS
650 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
651 #define FINALIZE_BASE_ZERO 0
652 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
653 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
654 #define FINALIZE_BASE_TYPE unsigned
655 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
656 (PTR)->use_var = (VAL); \
657 (PTR)->use_ptr.use = &((PTR)->use_var); \
658 link_imm_use_stmt (&((PTR)->use_ptr), \
659 (VAL), (STMT))
660 #include "tree-ssa-opfinalize.h"
663 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
665 static void
666 finalize_ssa_vuses (tree stmt)
668 unsigned num, num_v_may_defs;
669 int vuse_index;
671 /* Remove superfluous VUSE operands. If the statement already has a
672 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
673 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
674 suppose that variable 'a' is aliased:
676 # VUSE <a_2>
677 # a_3 = V_MAY_DEF <a_2>
678 a = a + 1;
680 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
681 operation. */
683 num = opbuild_num_elems (&build_vuses);
684 num_v_may_defs = opbuild_num_elems (&build_v_may_defs);
686 if (num > 0 && num_v_may_defs > 0)
688 int last = OPBUILD_LAST;
689 vuse_index = opbuild_first (&build_vuses);
690 for ( ; vuse_index != OPBUILD_LAST; )
692 tree vuse;
693 vuse = opbuild_elem_virtual (&build_vuses, vuse_index);
694 if (TREE_CODE (vuse) != SSA_NAME)
696 var_ann_t ann = var_ann (vuse);
697 ann->in_vuse_list = 0;
698 if (ann->in_v_may_def_list)
700 vuse_index = opbuild_remove_elem (&build_vuses, vuse_index,
701 last);
702 continue;
705 last = vuse_index;
706 vuse_index = opbuild_next (&build_vuses, vuse_index);
709 else
710 /* Clear out the in_list bits. */
711 for (vuse_index = opbuild_first (&build_vuses);
712 vuse_index != OPBUILD_LAST;
713 vuse_index = opbuild_next (&build_vuses, vuse_index))
715 tree t = opbuild_elem_virtual (&build_vuses, vuse_index);
716 if (TREE_CODE (t) != SSA_NAME)
718 var_ann_t ann = var_ann (t);
719 ann->in_vuse_list = 0;
723 finalize_ssa_vuse_ops (stmt);
724 /* The v_may_def build vector wasn't cleaned up because we needed it. */
725 cleanup_v_may_defs ();
727 /* Free the vuses build vector. */
728 opbuild_clear (&build_vuses);
732 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
734 #define FINALIZE_OPBUILD build_v_must_defs
735 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_must_defs, (I))
736 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_must_defs, (I))
737 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
738 #define FINALIZE_ALLOC alloc_mustdef
739 #define FINALIZE_FREE free_mustdefs
740 #define FINALIZE_TYPE struct mustdef_optype_d
741 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
742 #define FINALIZE_OPS MUSTDEF_OPS
743 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
744 #define FINALIZE_BASE_ZERO 0
745 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
746 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
747 #define FINALIZE_BASE_TYPE unsigned
748 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
749 (PTR)->def_var = (VAL); \
750 (PTR)->kill_var = (VAL); \
751 (PTR)->use_ptr.use = &((PTR)->kill_var);\
752 link_imm_use_stmt (&((PTR)->use_ptr), \
753 (VAL), (STMT))
754 #include "tree-ssa-opfinalize.h"
757 static void
758 finalize_ssa_v_must_defs (tree stmt)
760 /* In the presence of subvars, there may be more than one V_MUST_DEF per
761 statement (one for each subvar). It is a bit expensive to verify that
762 all must-defs in a statement belong to subvars if there is more than one
763 MUST-def, so we don't do it. Suffice to say, if you reach here without
764 having subvars, and have num >1, you have hit a bug. */
766 finalize_ssa_v_must_def_ops (stmt);
767 opbuild_clear (&build_v_must_defs);
771 /* Finalize all the build vectors, fill the new ones into INFO. */
773 static inline void
774 finalize_ssa_stmt_operands (tree stmt)
776 finalize_ssa_defs (stmt);
777 finalize_ssa_uses (stmt);
778 finalize_ssa_v_must_defs (stmt);
779 finalize_ssa_v_may_defs (stmt);
780 finalize_ssa_vuses (stmt);
784 /* Start the process of building up operands vectors in INFO. */
786 static inline void
787 start_ssa_stmt_operands (void)
789 gcc_assert (opbuild_num_elems (&build_defs) == 0);
790 gcc_assert (opbuild_num_elems (&build_uses) == 0);
791 gcc_assert (opbuild_num_elems (&build_vuses) == 0);
792 gcc_assert (opbuild_num_elems (&build_v_may_defs) == 0);
793 gcc_assert (opbuild_num_elems (&build_v_must_defs) == 0);
797 /* Add DEF_P to the list of pointers to operands. */
799 static inline void
800 append_def (tree *def_p)
802 opbuild_append_real (&build_defs, def_p);
806 /* Add USE_P to the list of pointers to operands. */
808 static inline void
809 append_use (tree *use_p)
811 opbuild_append_real (&build_uses, use_p);
815 /* Add a new virtual may def for variable VAR to the build array. */
817 static inline void
818 append_v_may_def (tree var)
820 if (TREE_CODE (var) != SSA_NAME)
822 var_ann_t ann = get_var_ann (var);
824 /* Don't allow duplicate entries. */
825 if (ann->in_v_may_def_list)
826 return;
827 ann->in_v_may_def_list = 1;
830 opbuild_append_virtual (&build_v_may_defs, var);
834 /* Add VAR to the list of virtual uses. */
836 static inline void
837 append_vuse (tree var)
840 /* Don't allow duplicate entries. */
841 if (TREE_CODE (var) != SSA_NAME)
843 var_ann_t ann = get_var_ann (var);
845 if (ann->in_vuse_list || ann->in_v_may_def_list)
846 return;
847 ann->in_vuse_list = 1;
850 opbuild_append_virtual (&build_vuses, var);
854 /* Add VAR to the list of virtual must definitions for INFO. */
856 static inline void
857 append_v_must_def (tree var)
859 unsigned i;
861 /* Don't allow duplicate entries. */
862 for (i = 0; i < opbuild_num_elems (&build_v_must_defs); i++)
863 if (var == opbuild_elem_virtual (&build_v_must_defs, i))
864 return;
866 opbuild_append_virtual (&build_v_must_defs, var);
870 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
871 cache for STMT, if it existed before. When finished, the various build_*
872 operand vectors will have potential operands. in them. */
874 static void
875 parse_ssa_operands (tree stmt)
877 enum tree_code code;
879 code = TREE_CODE (stmt);
880 switch (code)
882 case MODIFY_EXPR:
883 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
884 either only part of LHS is modified or if the RHS might throw,
885 otherwise, use V_MUST_DEF.
887 ??? If it might throw, we should represent somehow that it is killed
888 on the fallthrough path. */
890 tree lhs = TREE_OPERAND (stmt, 0);
891 int lhs_flags = opf_is_def;
893 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
895 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
896 or not the entire LHS is modified; that depends on what's
897 inside the VIEW_CONVERT_EXPR. */
898 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
899 lhs = TREE_OPERAND (lhs, 0);
901 if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
902 && TREE_CODE (lhs) != BIT_FIELD_REF
903 && TREE_CODE (lhs) != REALPART_EXPR
904 && TREE_CODE (lhs) != IMAGPART_EXPR)
905 lhs_flags |= opf_kill_def;
907 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
909 break;
911 case COND_EXPR:
912 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
913 break;
915 case SWITCH_EXPR:
916 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
917 break;
919 case ASM_EXPR:
920 get_asm_expr_operands (stmt);
921 break;
923 case RETURN_EXPR:
924 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
925 break;
927 case GOTO_EXPR:
928 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
929 break;
931 case LABEL_EXPR:
932 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
933 break;
935 /* These nodes contain no variable references. */
936 case BIND_EXPR:
937 case CASE_LABEL_EXPR:
938 case TRY_CATCH_EXPR:
939 case TRY_FINALLY_EXPR:
940 case EH_FILTER_EXPR:
941 case CATCH_EXPR:
942 case RESX_EXPR:
943 break;
945 default:
946 /* Notice that if get_expr_operands tries to use &STMT as the operand
947 pointer (which may only happen for USE operands), we will fail in
948 append_use. This default will handle statements like empty
949 statements, or CALL_EXPRs that may appear on the RHS of a statement
950 or as statements themselves. */
951 get_expr_operands (stmt, &stmt, opf_none);
952 break;
956 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
957 original operands, and if ANN is non-null, appropriate stmt flags are set
958 in the stmt's annotation. If ANN is NULL, this is not considered a "real"
959 stmt, and none of the operands will be entered into their respective
960 immediate uses tables. This is to allow stmts to be processed when they
961 are not actually in the CFG.
963 Note that some fields in old_ops may change to NULL, although none of the
964 memory they originally pointed to will be destroyed. It is appropriate
965 to call free_stmt_operands() on the value returned in old_ops.
967 The rationale for this: Certain optimizations wish to examine the difference
968 between new_ops and old_ops after processing. If a set of operands don't
969 change, new_ops will simply assume the pointer in old_ops, and the old_ops
970 pointer will be set to NULL, indicating no memory needs to be cleared.
971 Usage might appear something like:
973 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
974 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
975 <* compare old_ops_copy and new_ops *>
976 free_ssa_operands (old_ops); */
978 static void
979 build_ssa_operands (tree stmt)
981 stmt_ann_t ann = get_stmt_ann (stmt);
983 /* Initially assume that the statement has no volatile operands, nor
984 makes aliased loads or stores. */
985 if (ann)
987 ann->has_volatile_ops = false;
988 ann->makes_aliased_stores = false;
989 ann->makes_aliased_loads = false;
992 start_ssa_stmt_operands ();
994 parse_ssa_operands (stmt);
996 finalize_ssa_stmt_operands (stmt);
1000 /* Free any operands vectors in OPS. */
1001 void
1002 free_ssa_operands (stmt_operands_p ops)
1004 ops->def_ops = NULL;
1005 ops->use_ops = NULL;
1006 ops->maydef_ops = NULL;
1007 ops->mustdef_ops = NULL;
1008 ops->vuse_ops = NULL;
1012 /* Get the operands of statement STMT. Note that repeated calls to
1013 get_stmt_operands for the same statement will do nothing until the
1014 statement is marked modified by a call to mark_stmt_modified(). */
1016 void
1017 update_stmt_operands (tree stmt)
1019 stmt_ann_t ann = get_stmt_ann (stmt);
1020 /* If get_stmt_operands is called before SSA is initialized, dont
1021 do anything. */
1022 if (!ssa_operands_active ())
1023 return;
1024 /* The optimizers cannot handle statements that are nothing but a
1025 _DECL. This indicates a bug in the gimplifier. */
1026 gcc_assert (!SSA_VAR_P (stmt));
1028 gcc_assert (ann->modified);
1030 timevar_push (TV_TREE_OPS);
1032 build_ssa_operands (stmt);
1034 /* Clear the modified bit for STMT. Subsequent calls to
1035 get_stmt_operands for this statement will do nothing until the
1036 statement is marked modified by a call to mark_stmt_modified(). */
1037 ann->modified = 0;
1039 timevar_pop (TV_TREE_OPS);
1043 /* Copies virtual operands from SRC to DST. */
1045 void
1046 copy_virtual_operands (tree dest, tree src)
1048 tree t;
1049 ssa_op_iter iter, old_iter;
1050 use_operand_p use_p, u2;
1051 def_operand_p def_p, d2;
1053 build_ssa_operands (dest);
1055 /* Copy all the virtual fields. */
1056 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
1057 append_vuse (t);
1058 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
1059 append_v_may_def (t);
1060 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
1061 append_v_must_def (t);
1063 if (opbuild_num_elems (&build_vuses) == 0
1064 && opbuild_num_elems (&build_v_may_defs) == 0
1065 && opbuild_num_elems (&build_v_must_defs) == 0)
1066 return;
1068 /* Now commit the virtual operands to this stmt. */
1069 finalize_ssa_v_must_defs (dest);
1070 finalize_ssa_v_may_defs (dest);
1071 finalize_ssa_vuses (dest);
1073 /* Finally, set the field to the same values as then originals. */
1076 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
1077 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
1079 gcc_assert (!op_iter_done (&old_iter));
1080 SET_USE (use_p, t);
1081 t = op_iter_next_tree (&old_iter);
1083 gcc_assert (op_iter_done (&old_iter));
1085 op_iter_init_maydef (&old_iter, src, &u2, &d2);
1086 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
1088 gcc_assert (!op_iter_done (&old_iter));
1089 SET_USE (use_p, USE_FROM_PTR (u2));
1090 SET_DEF (def_p, DEF_FROM_PTR (d2));
1091 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1093 gcc_assert (op_iter_done (&old_iter));
1095 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
1096 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
1098 gcc_assert (!op_iter_done (&old_iter));
1099 SET_USE (use_p, USE_FROM_PTR (u2));
1100 SET_DEF (def_p, DEF_FROM_PTR (d2));
1101 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1103 gcc_assert (op_iter_done (&old_iter));
1108 /* Specifically for use in DOM's expression analysis. Given a store, we
1109 create an artificial stmt which looks like a load from the store, this can
1110 be used to eliminate redundant loads. OLD_OPS are the operands from the
1111 store stmt, and NEW_STMT is the new load which represents a load of the
1112 values stored. */
1114 void
1115 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
1117 stmt_ann_t ann;
1118 tree op;
1119 ssa_op_iter iter;
1120 use_operand_p use_p;
1121 unsigned x;
1123 ann = get_stmt_ann (new_stmt);
1125 /* process the stmt looking for operands. */
1126 start_ssa_stmt_operands ();
1127 parse_ssa_operands (new_stmt);
1129 for (x = 0; x < opbuild_num_elems (&build_vuses); x++)
1131 tree t = opbuild_elem_virtual (&build_vuses, x);
1132 if (TREE_CODE (t) != SSA_NAME)
1134 var_ann_t ann = var_ann (t);
1135 ann->in_vuse_list = 0;
1139 for (x = 0; x < opbuild_num_elems (&build_v_may_defs); x++)
1141 tree t = opbuild_elem_virtual (&build_v_may_defs, x);
1142 if (TREE_CODE (t) != SSA_NAME)
1144 var_ann_t ann = var_ann (t);
1145 ann->in_v_may_def_list = 0;
1148 /* Remove any virtual operands that were found. */
1149 opbuild_clear (&build_v_may_defs);
1150 opbuild_clear (&build_v_must_defs);
1151 opbuild_clear (&build_vuses);
1153 /* For each VDEF on the original statement, we want to create a
1154 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1155 statement. */
1156 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
1157 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
1158 append_vuse (op);
1160 /* Now build the operands for this new stmt. */
1161 finalize_ssa_stmt_operands (new_stmt);
1163 /* All uses in this fake stmt must not be in the immediate use lists. */
1164 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1165 delink_imm_use (use_p);
1168 static void
1169 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
1171 tree op0, op1;
1172 op0 = *exp0;
1173 op1 = *exp1;
1175 /* If the operand cache is active, attempt to preserve the relative positions
1176 of these two operands in their respective immediate use lists. */
1177 if (ssa_operands_active () && op0 != op1)
1179 use_optype_p use0, use1, ptr;
1180 use0 = use1 = NULL;
1181 /* Find the 2 operands in the cache, if they are there. */
1182 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1183 if (USE_OP_PTR (ptr)->use == exp0)
1185 use0 = ptr;
1186 break;
1188 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1189 if (USE_OP_PTR (ptr)->use == exp1)
1191 use1 = ptr;
1192 break;
1194 /* If both uses don't have operand entries, there isn't much we can do
1195 at this point. Presumably we dont need to worry about it. */
1196 if (use0 && use1)
1198 tree *tmp = USE_OP_PTR (use1)->use;
1199 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1200 USE_OP_PTR (use0)->use = tmp;
1204 /* Now swap the data. */
1205 *exp0 = op1;
1206 *exp1 = op0;
1210 /* Recursively scan the expression pointed by EXPR_P in statement referred to
1211 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
1212 operands found. */
1214 static void
1215 get_expr_operands (tree stmt, tree *expr_p, int flags)
1217 enum tree_code code;
1218 enum tree_code_class class;
1219 tree expr = *expr_p;
1220 stmt_ann_t s_ann = stmt_ann (stmt);
1222 if (expr == NULL)
1223 return;
1225 code = TREE_CODE (expr);
1226 class = TREE_CODE_CLASS (code);
1228 switch (code)
1230 case ADDR_EXPR:
1231 /* We could have the address of a component, array member,
1232 etc which has interesting variable references. */
1233 /* Taking the address of a variable does not represent a
1234 reference to it, but the fact that the stmt takes its address will be
1235 of interest to some passes (e.g. alias resolution). */
1236 add_stmt_operand (expr_p, s_ann, 0);
1238 /* If the address is invariant, there may be no interesting variable
1239 references inside. */
1240 if (is_gimple_min_invariant (expr))
1241 return;
1243 /* There should be no VUSEs created, since the referenced objects are
1244 not really accessed. The only operands that we should find here
1245 are ARRAY_REF indices which will always be real operands (GIMPLE
1246 does not allow non-registers as array indices). */
1247 flags |= opf_no_vops;
1249 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1250 return;
1252 case SSA_NAME:
1253 case VAR_DECL:
1254 case PARM_DECL:
1255 case RESULT_DECL:
1256 case CONST_DECL:
1258 subvar_t svars;
1260 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1261 Otherwise, add the variable itself.
1262 Whether it goes to USES or DEFS depends on the operand flags. */
1263 if (var_can_have_subvars (expr)
1264 && (svars = get_subvars_for_var (expr)))
1266 subvar_t sv;
1267 for (sv = svars; sv; sv = sv->next)
1268 add_stmt_operand (&sv->var, s_ann, flags);
1270 else
1272 add_stmt_operand (expr_p, s_ann, flags);
1274 return;
1276 case MISALIGNED_INDIRECT_REF:
1277 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1278 /* fall through */
1280 case ALIGN_INDIRECT_REF:
1281 case INDIRECT_REF:
1282 get_indirect_ref_operands (stmt, expr, flags);
1283 return;
1285 case TARGET_MEM_REF:
1286 get_tmr_operands (stmt, expr, flags);
1287 return;
1289 case ARRAY_REF:
1290 case ARRAY_RANGE_REF:
1291 /* Treat array references as references to the virtual variable
1292 representing the array. The virtual variable for an ARRAY_REF
1293 is the VAR_DECL for the array. */
1295 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1296 according to the value of IS_DEF. Recurse if the LHS of the
1297 ARRAY_REF node is not a regular variable. */
1298 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1299 add_stmt_operand (expr_p, s_ann, flags);
1300 else
1301 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1303 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1304 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1305 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1306 return;
1308 case COMPONENT_REF:
1309 case REALPART_EXPR:
1310 case IMAGPART_EXPR:
1312 tree ref;
1313 HOST_WIDE_INT offset, size;
1314 /* This component ref becomes an access to all of the subvariables
1315 it can touch, if we can determine that, but *NOT* the real one.
1316 If we can't determine which fields we could touch, the recursion
1317 will eventually get to a variable and add *all* of its subvars, or
1318 whatever is the minimum correct subset. */
1320 ref = okay_component_ref_for_subvars (expr, &offset, &size);
1321 if (ref)
1323 subvar_t svars = get_subvars_for_var (ref);
1324 subvar_t sv;
1325 for (sv = svars; sv; sv = sv->next)
1327 bool exact;
1328 if (overlap_subvar (offset, size, sv, &exact))
1330 if (!exact)
1331 flags &= ~opf_kill_def;
1332 add_stmt_operand (&sv->var, s_ann, flags);
1336 else
1337 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1338 flags & ~opf_kill_def);
1340 if (code == COMPONENT_REF)
1342 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1343 s_ann->has_volatile_ops = true;
1344 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1346 return;
1348 case WITH_SIZE_EXPR:
1349 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1350 and an rvalue reference to its second argument. */
1351 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1352 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1353 return;
1355 case CALL_EXPR:
1356 get_call_expr_operands (stmt, expr);
1357 return;
1359 case COND_EXPR:
1360 case VEC_COND_EXPR:
1361 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1362 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1363 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1364 return;
1366 case MODIFY_EXPR:
1368 int subflags;
1369 tree op;
1371 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1373 op = TREE_OPERAND (expr, 0);
1374 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1375 op = TREE_OPERAND (expr, 0);
1376 if (TREE_CODE (op) == ARRAY_REF
1377 || TREE_CODE (op) == ARRAY_RANGE_REF
1378 || TREE_CODE (op) == REALPART_EXPR
1379 || TREE_CODE (op) == IMAGPART_EXPR)
1380 subflags = opf_is_def;
1381 else
1382 subflags = opf_is_def | opf_kill_def;
1384 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1385 return;
1388 case CONSTRUCTOR:
1390 /* General aggregate CONSTRUCTORs have been decomposed, but they
1391 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1393 tree t;
1394 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1395 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1397 return;
1400 case TRUTH_NOT_EXPR:
1401 case BIT_FIELD_REF:
1402 case VIEW_CONVERT_EXPR:
1403 do_unary:
1404 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1405 return;
1407 case TRUTH_AND_EXPR:
1408 case TRUTH_OR_EXPR:
1409 case TRUTH_XOR_EXPR:
1410 case COMPOUND_EXPR:
1411 case OBJ_TYPE_REF:
1412 case ASSERT_EXPR:
1413 do_binary:
1415 tree op0 = TREE_OPERAND (expr, 0);
1416 tree op1 = TREE_OPERAND (expr, 1);
1418 /* If it would be profitable to swap the operands, then do so to
1419 canonicalize the statement, enabling better optimization.
1421 By placing canonicalization of such expressions here we
1422 transparently keep statements in canonical form, even
1423 when the statement is modified. */
1424 if (tree_swap_operands_p (op0, op1, false))
1426 /* For relationals we need to swap the operands
1427 and change the code. */
1428 if (code == LT_EXPR
1429 || code == GT_EXPR
1430 || code == LE_EXPR
1431 || code == GE_EXPR)
1433 TREE_SET_CODE (expr, swap_tree_comparison (code));
1434 swap_tree_operands (stmt,
1435 &TREE_OPERAND (expr, 0),
1436 &TREE_OPERAND (expr, 1));
1439 /* For a commutative operator we can just swap the operands. */
1440 else if (commutative_tree_code (code))
1442 swap_tree_operands (stmt,
1443 &TREE_OPERAND (expr, 0),
1444 &TREE_OPERAND (expr, 1));
1448 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1449 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1450 return;
1453 case REALIGN_LOAD_EXPR:
1455 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1456 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1457 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1458 return;
1461 case BLOCK:
1462 case FUNCTION_DECL:
1463 case EXC_PTR_EXPR:
1464 case FILTER_EXPR:
1465 case LABEL_DECL:
1466 /* Expressions that make no memory references. */
1467 return;
1469 default:
1470 if (class == tcc_unary)
1471 goto do_unary;
1472 if (class == tcc_binary || class == tcc_comparison)
1473 goto do_binary;
1474 if (class == tcc_constant || class == tcc_type)
1475 return;
1478 /* If we get here, something has gone wrong. */
1479 #ifdef ENABLE_CHECKING
1480 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1481 debug_tree (expr);
1482 fputs ("\n", stderr);
1483 internal_error ("internal error");
1484 #endif
1485 gcc_unreachable ();
1489 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1491 static void
1492 get_asm_expr_operands (tree stmt)
1494 stmt_ann_t s_ann = stmt_ann (stmt);
1495 int noutputs = list_length (ASM_OUTPUTS (stmt));
1496 const char **oconstraints
1497 = (const char **) alloca ((noutputs) * sizeof (const char *));
1498 int i;
1499 tree link;
1500 const char *constraint;
1501 bool allows_mem, allows_reg, is_inout;
1503 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1505 oconstraints[i] = constraint
1506 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1507 parse_output_constraint (&constraint, i, 0, 0,
1508 &allows_mem, &allows_reg, &is_inout);
1510 /* This should have been split in gimplify_asm_expr. */
1511 gcc_assert (!allows_reg || !is_inout);
1513 /* Memory operands are addressable. Note that STMT needs the
1514 address of this operand. */
1515 if (!allows_reg && allows_mem)
1517 tree t = get_base_address (TREE_VALUE (link));
1518 if (t && DECL_P (t))
1519 note_addressable (t, s_ann);
1522 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1525 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1527 constraint
1528 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1529 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1530 oconstraints, &allows_mem, &allows_reg);
1532 /* Memory operands are addressable. Note that STMT needs the
1533 address of this operand. */
1534 if (!allows_reg && allows_mem)
1536 tree t = get_base_address (TREE_VALUE (link));
1537 if (t && DECL_P (t))
1538 note_addressable (t, s_ann);
1541 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1545 /* Clobber memory for asm ("" : : : "memory"); */
1546 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1547 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1549 unsigned i;
1550 bitmap_iterator bi;
1552 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1553 decided to group them). */
1554 if (global_var)
1555 add_stmt_operand (&global_var, s_ann, opf_is_def);
1556 else
1557 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1559 tree var = referenced_var (i);
1560 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1563 /* Now clobber all addressables. */
1564 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1566 tree var = referenced_var (i);
1568 /* Subvars are explicitly represented in this list, so
1569 we don't need the original to be added to the clobber
1570 ops, but the original *will* be in this list because
1571 we keep the addressability of the original
1572 variable up-to-date so we don't screw up the rest of
1573 the backend. */
1574 if (var_can_have_subvars (var)
1575 && get_subvars_for_var (var) != NULL)
1576 continue;
1578 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1581 break;
1585 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1586 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1588 static void
1589 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1591 tree *pptr = &TREE_OPERAND (expr, 0);
1592 tree ptr = *pptr;
1593 stmt_ann_t s_ann = stmt_ann (stmt);
1595 /* Stores into INDIRECT_REF operands are never killing definitions. */
1596 flags &= ~opf_kill_def;
1598 if (SSA_VAR_P (ptr))
1600 struct ptr_info_def *pi = NULL;
1602 /* If PTR has flow-sensitive points-to information, use it. */
1603 if (TREE_CODE (ptr) == SSA_NAME
1604 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1605 && pi->name_mem_tag)
1607 /* PTR has its own memory tag. Use it. */
1608 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1610 else
1612 /* If PTR is not an SSA_NAME or it doesn't have a name
1613 tag, use its type memory tag. */
1614 var_ann_t v_ann;
1616 /* If we are emitting debugging dumps, display a warning if
1617 PTR is an SSA_NAME with no flow-sensitive alias
1618 information. That means that we may need to compute
1619 aliasing again. */
1620 if (dump_file
1621 && TREE_CODE (ptr) == SSA_NAME
1622 && pi == NULL)
1624 fprintf (dump_file,
1625 "NOTE: no flow-sensitive alias info for ");
1626 print_generic_expr (dump_file, ptr, dump_flags);
1627 fprintf (dump_file, " in ");
1628 print_generic_stmt (dump_file, stmt, dump_flags);
1631 if (TREE_CODE (ptr) == SSA_NAME)
1632 ptr = SSA_NAME_VAR (ptr);
1633 v_ann = var_ann (ptr);
1634 if (v_ann->type_mem_tag)
1635 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1639 /* If a constant is used as a pointer, we can't generate a real
1640 operand for it but we mark the statement volatile to prevent
1641 optimizations from messing things up. */
1642 else if (TREE_CODE (ptr) == INTEGER_CST)
1644 if (s_ann)
1645 s_ann->has_volatile_ops = true;
1646 return;
1649 /* Everything else *should* have been folded elsewhere, but users
1650 are smarter than we in finding ways to write invalid code. We
1651 cannot just assert here. If we were absolutely certain that we
1652 do handle all valid cases, then we could just do nothing here.
1653 That seems optimistic, so attempt to do something logical... */
1654 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1655 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1656 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1658 /* Make sure we know the object is addressable. */
1659 pptr = &TREE_OPERAND (ptr, 0);
1660 add_stmt_operand (pptr, s_ann, 0);
1662 /* Mark the object itself with a VUSE. */
1663 pptr = &TREE_OPERAND (*pptr, 0);
1664 get_expr_operands (stmt, pptr, flags);
1665 return;
1668 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1669 else
1670 gcc_unreachable ();
1672 /* Add a USE operand for the base pointer. */
1673 get_expr_operands (stmt, pptr, opf_none);
1676 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1678 static void
1679 get_tmr_operands (tree stmt, tree expr, int flags)
1681 tree tag = TMR_TAG (expr);
1683 /* First record the real operands. */
1684 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1685 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1687 /* MEM_REFs should never be killing. */
1688 flags &= ~opf_kill_def;
1690 if (TMR_SYMBOL (expr))
1691 note_addressable (TMR_SYMBOL (expr), stmt_ann (stmt));
1693 if (tag)
1694 add_stmt_operand (&tag, stmt_ann (stmt), flags);
1695 else
1696 /* Something weird, so ensure that we will be careful. */
1697 stmt_ann (stmt)->has_volatile_ops = true;
1700 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1702 static void
1703 get_call_expr_operands (tree stmt, tree expr)
1705 tree op;
1706 int call_flags = call_expr_flags (expr);
1708 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1709 operands for all the symbols that have been found to be
1710 call-clobbered.
1712 Note that if aliases have not been computed, the global effects
1713 of calls will not be included in the SSA web. This is fine
1714 because no optimizer should run before aliases have been
1715 computed. By not bothering with virtual operands for CALL_EXPRs
1716 we avoid adding superfluous virtual operands, which can be a
1717 significant compile time sink (See PR 15855). */
1718 if (aliases_computed_p
1719 && !bitmap_empty_p (call_clobbered_vars)
1720 && !(call_flags & ECF_NOVOPS))
1722 /* A 'pure' or a 'const' function never call-clobbers anything.
1723 A 'noreturn' function might, but since we don't return anyway
1724 there is no point in recording that. */
1725 if (TREE_SIDE_EFFECTS (expr)
1726 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1727 add_call_clobber_ops (stmt);
1728 else if (!(call_flags & ECF_CONST))
1729 add_call_read_ops (stmt);
1732 /* Find uses in the called function. */
1733 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1735 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1736 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1738 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1743 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1744 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1745 the statement's real operands, otherwise it is added to virtual
1746 operands. */
1748 static void
1749 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1751 bool is_real_op;
1752 tree var, sym;
1753 var_ann_t v_ann;
1755 var = *var_p;
1756 STRIP_NOPS (var);
1758 /* If the operand is an ADDR_EXPR, add its operand to the list of
1759 variables that have had their address taken in this statement. */
1760 if (TREE_CODE (var) == ADDR_EXPR)
1762 note_addressable (TREE_OPERAND (var, 0), s_ann);
1763 return;
1766 /* If the original variable is not a scalar, it will be added to the list
1767 of virtual operands. In that case, use its base symbol as the virtual
1768 variable representing it. */
1769 is_real_op = is_gimple_reg (var);
1770 if (!is_real_op && !DECL_P (var))
1771 var = get_virtual_var (var);
1773 /* If VAR is not a variable that we care to optimize, do nothing. */
1774 if (var == NULL_TREE || !SSA_VAR_P (var))
1775 return;
1777 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1778 v_ann = var_ann (sym);
1780 /* Mark statements with volatile operands. Optimizers should back
1781 off from statements having volatile operands. */
1782 if (TREE_THIS_VOLATILE (sym) && s_ann)
1783 s_ann->has_volatile_ops = true;
1785 /* If the variable cannot be modified and this is a V_MAY_DEF change
1786 it into a VUSE. This happens when read-only variables are marked
1787 call-clobbered and/or aliased to writable variables. So we only
1788 check that this only happens on non-specific stores.
1790 Note that if this is a specific store, i.e. associated with a
1791 modify_expr, then we can't suppress the V_DEF, lest we run into
1792 validation problems.
1794 This can happen when programs cast away const, leaving us with a
1795 store to read-only memory. If the statement is actually executed
1796 at runtime, then the program is ill formed. If the statement is
1797 not executed then all is well. At the very least, we cannot ICE. */
1798 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1800 gcc_assert (!is_real_op);
1801 flags &= ~(opf_is_def | opf_kill_def);
1804 if (is_real_op)
1806 /* The variable is a GIMPLE register. Add it to real operands. */
1807 if (flags & opf_is_def)
1808 append_def (var_p);
1809 else
1810 append_use (var_p);
1812 else
1814 varray_type aliases;
1816 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1817 virtual operands, unless the caller has specifically requested
1818 not to add virtual operands (used when adding operands inside an
1819 ADDR_EXPR expression). */
1820 if (flags & opf_no_vops)
1821 return;
1823 aliases = v_ann->may_aliases;
1825 if (aliases == NULL)
1827 /* The variable is not aliased or it is an alias tag. */
1828 if (flags & opf_is_def)
1830 if (flags & opf_kill_def)
1832 /* Only regular variables or struct fields may get a
1833 V_MUST_DEF operand. */
1834 gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
1835 || v_ann->mem_tag_kind == STRUCT_FIELD);
1836 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1837 variable definitions. */
1838 append_v_must_def (var);
1840 else
1842 /* Add a V_MAY_DEF for call-clobbered variables and
1843 memory tags. */
1844 append_v_may_def (var);
1847 else
1849 append_vuse (var);
1850 if (s_ann && v_ann->is_alias_tag)
1851 s_ann->makes_aliased_loads = 1;
1854 else
1856 size_t i;
1858 /* The variable is aliased. Add its aliases to the virtual
1859 operands. */
1860 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1862 if (flags & opf_is_def)
1864 bool added_may_defs_p = false;
1866 /* If the variable is also an alias tag, add a virtual
1867 operand for it, otherwise we will miss representing
1868 references to the members of the variable's alias set.
1869 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1870 if (v_ann->is_alias_tag)
1872 added_may_defs_p = true;
1873 append_v_may_def (var);
1876 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1878 /* While VAR may be modifiable, some of its aliases
1879 may not be. If that's the case, we don't really
1880 need to add them a V_MAY_DEF for them. */
1881 tree alias = VARRAY_TREE (aliases, i);
1883 if (unmodifiable_var_p (alias))
1884 append_vuse (alias);
1885 else
1887 append_v_may_def (alias);
1888 added_may_defs_p = true;
1892 if (s_ann && added_may_defs_p)
1893 s_ann->makes_aliased_stores = 1;
1895 else
1897 /* Similarly, append a virtual uses for VAR itself, when
1898 it is an alias tag. */
1899 if (v_ann->is_alias_tag)
1900 append_vuse (var);
1902 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1903 append_vuse (VARRAY_TREE (aliases, i));
1905 if (s_ann)
1906 s_ann->makes_aliased_loads = 1;
1913 /* Record that VAR had its address taken in the statement with annotations
1914 S_ANN. */
1916 static void
1917 note_addressable (tree var, stmt_ann_t s_ann)
1919 subvar_t svars;
1921 if (!s_ann)
1922 return;
1924 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1925 as the only thing we take the address of.
1926 See PR 21407 and the ensuing mailing list discussion. */
1928 var = get_base_address (var);
1929 if (var && SSA_VAR_P (var))
1931 if (s_ann->addresses_taken == NULL)
1932 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1935 if (var_can_have_subvars (var)
1936 && (svars = get_subvars_for_var (var)))
1938 subvar_t sv;
1939 for (sv = svars; sv; sv = sv->next)
1940 bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
1942 else
1943 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1947 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1948 clobbered variables in the function. */
1950 static void
1951 add_call_clobber_ops (tree stmt)
1953 int i;
1954 unsigned u;
1955 tree t;
1956 bitmap_iterator bi;
1957 stmt_ann_t s_ann = stmt_ann (stmt);
1958 struct stmt_ann_d empty_ann;
1960 /* Functions that are not const, pure or never return may clobber
1961 call-clobbered variables. */
1962 if (s_ann)
1963 s_ann->makes_clobbering_call = true;
1965 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1966 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1967 if (global_var)
1969 add_stmt_operand (&global_var, s_ann, opf_is_def);
1970 return;
1973 /* If cache is valid, copy the elements into the build vectors. */
1974 if (ssa_call_clobbered_cache_valid)
1976 /* Process the caches in reverse order so we are always inserting at
1977 the head of the list. */
1978 for (i = VEC_length (tree, clobbered_vuses) - 1; i >=0; i--)
1980 t = VEC_index (tree, clobbered_vuses, i);
1981 gcc_assert (TREE_CODE (t) != SSA_NAME);
1982 var_ann (t)->in_vuse_list = 1;
1983 opbuild_append_virtual (&build_vuses, t);
1985 for (i = VEC_length (tree, clobbered_v_may_defs) - 1; i >= 0; i--)
1987 t = VEC_index (tree, clobbered_v_may_defs, i);
1988 gcc_assert (TREE_CODE (t) != SSA_NAME);
1989 var_ann (t)->in_v_may_def_list = 1;
1990 opbuild_append_virtual (&build_v_may_defs, t);
1992 if (s_ann)
1994 s_ann->makes_aliased_loads = clobbered_aliased_loads;
1995 s_ann->makes_aliased_stores = clobbered_aliased_stores;
1997 return;
2000 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
2002 /* Add a V_MAY_DEF operand for every call clobbered variable. */
2003 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
2005 tree var = referenced_var (u);
2006 if (unmodifiable_var_p (var))
2007 add_stmt_operand (&var, &empty_ann, opf_none);
2008 else
2009 add_stmt_operand (&var, &empty_ann, opf_is_def | opf_non_specific);
2012 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
2013 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
2015 /* Set the flags for a stmt's annotation. */
2016 if (s_ann)
2018 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
2019 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
2022 /* Prepare empty cache vectors. */
2023 VEC_truncate (tree, clobbered_vuses, 0);
2024 VEC_truncate (tree, clobbered_v_may_defs, 0);
2026 /* Now fill the clobbered cache with the values that have been found. */
2027 for (i = opbuild_first (&build_vuses);
2028 i != OPBUILD_LAST;
2029 i = opbuild_next (&build_vuses, i))
2030 VEC_safe_push (tree, heap, clobbered_vuses,
2031 opbuild_elem_virtual (&build_vuses, i));
2033 gcc_assert (opbuild_num_elems (&build_vuses)
2034 == VEC_length (tree, clobbered_vuses));
2036 for (i = opbuild_first (&build_v_may_defs);
2037 i != OPBUILD_LAST;
2038 i = opbuild_next (&build_v_may_defs, i))
2039 VEC_safe_push (tree, heap, clobbered_v_may_defs,
2040 opbuild_elem_virtual (&build_v_may_defs, i));
2042 gcc_assert (opbuild_num_elems (&build_v_may_defs)
2043 == VEC_length (tree, clobbered_v_may_defs));
2045 ssa_call_clobbered_cache_valid = true;
2049 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
2050 function. */
2052 static void
2053 add_call_read_ops (tree stmt)
2055 int i;
2056 unsigned u;
2057 tree t;
2058 bitmap_iterator bi;
2059 stmt_ann_t s_ann = stmt_ann (stmt);
2060 struct stmt_ann_d empty_ann;
2062 /* if the function is not pure, it may reference memory. Add
2063 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
2064 for the heuristic used to decide whether to create .GLOBAL_VAR. */
2065 if (global_var)
2067 add_stmt_operand (&global_var, s_ann, opf_none);
2068 return;
2071 /* If cache is valid, copy the elements into the build vector. */
2072 if (ssa_ro_call_cache_valid)
2074 for (i = VEC_length (tree, ro_call_vuses) - 1; i >=0 ; i--)
2076 /* Process the caches in reverse order so we are always inserting at
2077 the head of the list. */
2078 t = VEC_index (tree, ro_call_vuses, i);
2079 gcc_assert (TREE_CODE (t) != SSA_NAME);
2080 var_ann (t)->in_vuse_list = 1;
2081 opbuild_append_virtual (&build_vuses, t);
2083 if (s_ann)
2084 s_ann->makes_aliased_loads = ro_call_aliased_loads;
2085 return;
2088 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
2090 /* Add a VUSE for each call-clobbered variable. */
2091 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
2093 tree var = referenced_var (u);
2094 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
2097 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
2098 if (s_ann)
2099 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
2101 /* Prepare empty cache vectors. */
2102 VEC_truncate (tree, ro_call_vuses, 0);
2104 /* Now fill the clobbered cache with the values that have been found. */
2105 for (i = opbuild_first (&build_vuses);
2106 i != OPBUILD_LAST;
2107 i = opbuild_next (&build_vuses, i))
2108 VEC_safe_push (tree, heap, ro_call_vuses,
2109 opbuild_elem_virtual (&build_vuses, i));
2111 gcc_assert (opbuild_num_elems (&build_vuses)
2112 == VEC_length (tree, ro_call_vuses));
2114 ssa_ro_call_cache_valid = true;
2118 /* Scan the immediate_use list for VAR making sure its linked properly.
2119 return RTUE iof there is a problem. */
2121 bool
2122 verify_imm_links (FILE *f, tree var)
2124 use_operand_p ptr, prev, list;
2125 int count;
2127 gcc_assert (TREE_CODE (var) == SSA_NAME);
2129 list = &(SSA_NAME_IMM_USE_NODE (var));
2130 gcc_assert (list->use == NULL);
2132 if (list->prev == NULL)
2134 gcc_assert (list->next == NULL);
2135 return false;
2138 prev = list;
2139 count = 0;
2140 for (ptr = list->next; ptr != list; )
2142 if (prev != ptr->prev)
2143 goto error;
2145 if (ptr->use == NULL)
2146 goto error; /* 2 roots, or SAFE guard node. */
2147 else if (*(ptr->use) != var)
2148 goto error;
2150 prev = ptr;
2151 ptr = ptr->next;
2152 /* Avoid infinite loops. */
2153 if (count++ > 30000)
2154 goto error;
2157 /* Verify list in the other direction. */
2158 prev = list;
2159 for (ptr = list->prev; ptr != list; )
2161 if (prev != ptr->next)
2162 goto error;
2163 prev = ptr;
2164 ptr = ptr->prev;
2165 if (count-- < 0)
2166 goto error;
2169 if (count != 0)
2170 goto error;
2172 return false;
2174 error:
2175 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2177 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2178 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2180 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2181 (void *)ptr->use);
2182 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2183 fprintf(f, "\n");
2184 return true;
2188 /* Dump all the immediate uses to FILE. */
2190 void
2191 dump_immediate_uses_for (FILE *file, tree var)
2193 imm_use_iterator iter;
2194 use_operand_p use_p;
2196 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2198 print_generic_expr (file, var, TDF_SLIM);
2199 fprintf (file, " : -->");
2200 if (has_zero_uses (var))
2201 fprintf (file, " no uses.\n");
2202 else
2203 if (has_single_use (var))
2204 fprintf (file, " single use.\n");
2205 else
2206 fprintf (file, "%d uses.\n", num_imm_uses (var));
2208 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2210 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2211 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2212 else
2213 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2215 fprintf(file, "\n");
2218 /* Dump all the immediate uses to FILE. */
2220 void
2221 dump_immediate_uses (FILE *file)
2223 tree var;
2224 unsigned int x;
2226 fprintf (file, "Immediate_uses: \n\n");
2227 for (x = 1; x < num_ssa_names; x++)
2229 var = ssa_name(x);
2230 if (!var)
2231 continue;
2232 dump_immediate_uses_for (file, var);
2237 /* Dump def-use edges on stderr. */
2239 void
2240 debug_immediate_uses (void)
2242 dump_immediate_uses (stderr);
2245 /* Dump def-use edges on stderr. */
2247 void
2248 debug_immediate_uses_for (tree var)
2250 dump_immediate_uses_for (stderr, var);
2252 #include "gt-tree-ssa-operands.h"