Merge form mainline (hopefully)
[official-gcc.git] / gcc / tree-ssa-operands.c
bloba2a1e642396ee4cae137f3cc738a1f2086b47b6b
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
85 #define opf_none 0
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
106 /* Points to the function declaration of function cache is valid for
107 (if any) */
108 tree ssa_call_clobbered_cache_valid_for;
109 tree ssa_ro_call_cache_valid_for;
111 /* These arrays are the cached operand vectors for call clobbered calls. */
112 static VEC(tree,heap) *clobbered_v_may_defs;
113 static VEC(tree,heap) *clobbered_vuses;
114 static VEC(tree,heap) *ro_call_vuses;
115 static bool clobbered_aliased_loads;
116 static bool clobbered_aliased_stores;
117 static bool ro_call_aliased_loads;
119 static void get_expr_operands (tree, tree *, int);
120 static void get_asm_expr_operands (tree);
121 static void get_indirect_ref_operands (tree, tree, int);
122 static void get_tmr_operands (tree, tree, int);
123 static void get_call_expr_operands (tree, tree);
124 static inline void append_def (tree *);
125 static inline void append_use (tree *);
126 static void append_v_may_def (tree);
127 static void append_v_must_def (tree);
128 static void add_call_clobber_ops (tree, tree);
129 static void add_call_read_ops (tree);
130 static void add_stmt_operand (tree *, stmt_ann_t, int);
131 static void build_ssa_operands (tree stmt);
134 /* Return the DECL_UID of the base varaiable of T. */
136 static inline unsigned
137 get_name_decl (tree t)
139 if (TREE_CODE (t) != SSA_NAME)
140 return DECL_UID (t);
141 else
142 return DECL_UID (SSA_NAME_VAR (t));
145 /* Comparison function for qsort used in operand_build_sort_virtual. */
147 static int
148 operand_build_cmp (const void *p, const void *q)
150 tree e1 = *((const tree *)p);
151 tree e2 = *((const tree *)q);
152 unsigned int u1,u2;
154 u1 = get_name_decl (e1);
155 u2 = get_name_decl (e2);
157 /* We want to sort in ascending order. They can never be equal. */
158 #ifdef ENABLE_CHECKING
159 gcc_assert (u1 != u2);
160 #endif
161 return (u1 > u2 ? 1 : -1);
164 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
166 static inline void
167 operand_build_sort_virtual (VEC(tree,heap) *list)
169 int num = VEC_length (tree, list);
170 if (num < 2)
171 return;
172 if (num == 2)
174 if (get_name_decl (VEC_index (tree, list, 0))
175 > get_name_decl (VEC_index (tree, list, 1)))
177 /* Swap elements if in the wrong order. */
178 tree tmp = VEC_index (tree, list, 0);
179 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
180 VEC_replace (tree, list, 1, tmp);
182 return;
184 /* There are 3 or more elements, call qsort. */
185 qsort (VEC_address (tree, list),
186 VEC_length (tree, list),
187 sizeof (tree),
188 operand_build_cmp);
193 /* Return true if the ssa operands cache is active. */
195 bool
196 ssa_operands_active (void)
198 return cfun->ssa && cfun->ssa->ops_active;
202 /* Initialize the operand cache routines. */
204 void
205 init_ssa_operands (void)
207 build_defs = VEC_alloc (tree, heap, 5);
208 build_uses = VEC_alloc (tree, heap, 10);
209 build_vuses = VEC_alloc (tree, heap, 25);
210 build_v_may_defs = VEC_alloc (tree, heap, 25);
211 build_v_must_defs = VEC_alloc (tree, heap, 25);
213 gcc_assert (operand_memory == NULL);
214 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
215 cfun->ssa->ops_active = true;
219 /* Dispose of anything required by the operand routines. */
221 void
222 fini_ssa_operands (void)
224 struct ssa_operand_memory_d *ptr;
225 VEC_free (tree, heap, build_defs);
226 VEC_free (tree, heap, build_uses);
227 VEC_free (tree, heap, build_v_must_defs);
228 VEC_free (tree, heap, build_v_may_defs);
229 VEC_free (tree, heap, build_vuses);
230 free_defs = NULL;
231 free_uses = NULL;
232 free_vuses = NULL;
233 free_maydefs = NULL;
234 free_mustdefs = NULL;
235 ssa_call_clobbered_cache_valid_for = NULL;
236 ssa_ro_call_cache_valid_for = NULL;
237 while ((ptr = operand_memory) != NULL)
239 operand_memory = operand_memory->next;
240 ggc_free (ptr);
243 VEC_free (tree, heap, clobbered_v_may_defs);
244 VEC_free (tree, heap, clobbered_vuses);
245 VEC_free (tree, heap, ro_call_vuses);
246 cfun->ssa->ops_active = false;
250 /* Return memory for operands of SIZE chunks. */
252 static inline void *
253 ssa_operand_alloc (unsigned size)
255 char *ptr;
256 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
258 struct ssa_operand_memory_d *ptr;
259 ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d));
260 ptr->next = operand_memory;
261 operand_memory = ptr;
262 operand_memory_index = 0;
264 ptr = &(operand_memory->mem[operand_memory_index]);
265 operand_memory_index += size;
266 return ptr;
270 /* Make sure PTR is in the correct immediate use list. Since uses are simply
271 pointers into the stmt TREE, there is no way of telling if anyone has
272 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
273 The contents are different, but the pointer is still the same. This
274 routine will check to make sure PTR is in the correct list, and if it isn't
275 put it in the correct list. We cannot simply check the previous node
276 because all nodes in the same stmt might have be changed. */
278 static inline void
279 correct_use_link (use_operand_p ptr, tree stmt)
281 use_operand_p prev;
282 tree root;
284 /* Fold_stmt () may have changed the stmt pointers. */
285 if (ptr->stmt != stmt)
286 ptr->stmt = stmt;
288 prev = ptr->prev;
289 if (prev)
291 /* Find the root element, making sure we skip any safe iterators. */
292 while (prev->use != NULL || prev->stmt == NULL)
293 prev = prev->prev;
295 /* Get the ssa_name of the list the node is in. */
296 root = prev->stmt;
297 /* If it's the right list, simply return. */
298 if (root == *(ptr->use))
299 return;
301 /* Its in the wrong list if we reach here. */
302 delink_imm_use (ptr);
303 link_imm_use (ptr, *(ptr->use));
307 /* This routine makes sure that PTR is in an immediate use list, and makes
308 sure the stmt pointer is set to the current stmt. Virtual uses do not need
309 the overhead of correct_use_link since they cannot be directly manipulated
310 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
311 static inline void
312 set_virtual_use_link (use_operand_p ptr, tree stmt)
314 /* Fold_stmt () may have changed the stmt pointers. */
315 if (ptr->stmt != stmt)
316 ptr->stmt = stmt;
318 /* If this use isn't in a list, add it to the correct list. */
319 if (!ptr->prev)
320 link_imm_use (ptr, *(ptr->use));
325 #define FINALIZE_OPBUILD build_defs
326 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
327 build_defs, (I))
328 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
329 build_defs, (I))
330 #define FINALIZE_FUNC finalize_ssa_def_ops
331 #define FINALIZE_ALLOC alloc_def
332 #define FINALIZE_FREE free_defs
333 #define FINALIZE_TYPE struct def_optype_d
334 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
335 #define FINALIZE_OPS DEF_OPS
336 #define FINALIZE_BASE(VAR) VAR
337 #define FINALIZE_BASE_TYPE tree *
338 #define FINALIZE_BASE_ZERO NULL
339 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
340 #include "tree-ssa-opfinalize.h"
343 /* This routine will create stmt operands for STMT from the def build list. */
345 static void
346 finalize_ssa_defs (tree stmt)
348 unsigned int num = VEC_length (tree, build_defs);
349 /* There should only be a single real definition per assignment. */
350 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
352 /* If there is an old list, often the new list is identical, or close, so
353 find the elements at the beginning that are the same as the vector. */
355 finalize_ssa_def_ops (stmt);
356 VEC_truncate (tree, build_defs, 0);
359 #define FINALIZE_OPBUILD build_uses
360 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
361 build_uses, (I))
362 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
363 build_uses, (I))
364 #define FINALIZE_FUNC finalize_ssa_use_ops
365 #define FINALIZE_ALLOC alloc_use
366 #define FINALIZE_FREE free_uses
367 #define FINALIZE_TYPE struct use_optype_d
368 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
369 #define FINALIZE_OPS USE_OPS
370 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
371 #define FINALIZE_CORRECT_USE correct_use_link
372 #define FINALIZE_BASE(VAR) VAR
373 #define FINALIZE_BASE_TYPE tree *
374 #define FINALIZE_BASE_ZERO NULL
375 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
376 (PTR)->use_ptr.use = (VAL); \
377 link_imm_use_stmt (&((PTR)->use_ptr), \
378 *(VAL), (STMT))
379 #include "tree-ssa-opfinalize.h"
381 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
383 static void
384 finalize_ssa_uses (tree stmt)
386 #ifdef ENABLE_CHECKING
388 unsigned x;
389 unsigned num = VEC_length (tree, build_uses);
391 /* If the pointer to the operand is the statement itself, something is
392 wrong. It means that we are pointing to a local variable (the
393 initial call to get_stmt_operands does not pass a pointer to a
394 statement). */
395 for (x = 0; x < num; x++)
396 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
398 #endif
399 finalize_ssa_use_ops (stmt);
400 VEC_truncate (tree, build_uses, 0);
404 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
405 #define FINALIZE_OPBUILD build_v_may_defs
406 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
407 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
408 build_v_may_defs, (I)))
409 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
410 #define FINALIZE_ALLOC alloc_maydef
411 #define FINALIZE_FREE free_maydefs
412 #define FINALIZE_TYPE struct maydef_optype_d
413 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
414 #define FINALIZE_OPS MAYDEF_OPS
415 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
416 #define FINALIZE_CORRECT_USE set_virtual_use_link
417 #define FINALIZE_BASE_ZERO 0
418 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
419 #define FINALIZE_BASE_TYPE unsigned
420 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
421 (PTR)->def_var = (VAL); \
422 (PTR)->use_var = (VAL); \
423 (PTR)->use_ptr.use = &((PTR)->use_var); \
424 link_imm_use_stmt (&((PTR)->use_ptr), \
425 (VAL), (STMT))
426 #include "tree-ssa-opfinalize.h"
429 static void
430 finalize_ssa_v_may_defs (tree stmt)
432 finalize_ssa_v_may_def_ops (stmt);
436 /* Clear the in_list bits and empty the build array for v_may_defs. */
438 static inline void
439 cleanup_v_may_defs (void)
441 unsigned x, num;
442 num = VEC_length (tree, build_v_may_defs);
444 for (x = 0; x < num; x++)
446 tree t = VEC_index (tree, build_v_may_defs, x);
447 if (TREE_CODE (t) != SSA_NAME)
449 var_ann_t ann = var_ann (t);
450 ann->in_v_may_def_list = 0;
453 VEC_truncate (tree, build_v_may_defs, 0);
457 #define FINALIZE_OPBUILD build_vuses
458 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
459 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
460 build_vuses, (I)))
461 #define FINALIZE_FUNC finalize_ssa_vuse_ops
462 #define FINALIZE_ALLOC alloc_vuse
463 #define FINALIZE_FREE free_vuses
464 #define FINALIZE_TYPE struct vuse_optype_d
465 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
466 #define FINALIZE_OPS VUSE_OPS
467 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
468 #define FINALIZE_CORRECT_USE set_virtual_use_link
469 #define FINALIZE_BASE_ZERO 0
470 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
471 #define FINALIZE_BASE_TYPE unsigned
472 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
473 (PTR)->use_var = (VAL); \
474 (PTR)->use_ptr.use = &((PTR)->use_var); \
475 link_imm_use_stmt (&((PTR)->use_ptr), \
476 (VAL), (STMT))
477 #include "tree-ssa-opfinalize.h"
480 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
482 static void
483 finalize_ssa_vuses (tree stmt)
485 unsigned num, num_v_may_defs;
486 unsigned vuse_index;
488 /* Remove superfluous VUSE operands. If the statement already has a
489 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
490 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
491 suppose that variable 'a' is aliased:
493 # VUSE <a_2>
494 # a_3 = V_MAY_DEF <a_2>
495 a = a + 1;
497 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
498 operation. */
500 num = VEC_length (tree, build_vuses);
501 num_v_may_defs = VEC_length (tree, build_v_may_defs);
503 if (num > 0 && num_v_may_defs > 0)
505 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
507 tree vuse;
508 vuse = VEC_index (tree, build_vuses, vuse_index);
509 if (TREE_CODE (vuse) != SSA_NAME)
511 var_ann_t ann = var_ann (vuse);
512 ann->in_vuse_list = 0;
513 if (ann->in_v_may_def_list)
515 VEC_ordered_remove (tree, build_vuses, vuse_index);
516 continue;
519 vuse_index++;
522 else
523 /* Clear out the in_list bits. */
524 for (vuse_index = 0;
525 vuse_index < VEC_length (tree, build_vuses);
526 vuse_index++)
528 tree t = VEC_index (tree, build_vuses, vuse_index);
529 if (TREE_CODE (t) != SSA_NAME)
531 var_ann_t ann = var_ann (t);
532 ann->in_vuse_list = 0;
536 finalize_ssa_vuse_ops (stmt);
537 /* The v_may_def build vector wasn't cleaned up because we needed it. */
538 cleanup_v_may_defs ();
540 /* Free the vuses build vector. */
541 VEC_truncate (tree, build_vuses, 0);
545 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
547 #define FINALIZE_OPBUILD build_v_must_defs
548 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
549 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
550 build_v_must_defs, (I)))
551 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
552 #define FINALIZE_ALLOC alloc_mustdef
553 #define FINALIZE_FREE free_mustdefs
554 #define FINALIZE_TYPE struct mustdef_optype_d
555 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
556 #define FINALIZE_OPS MUSTDEF_OPS
557 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
558 #define FINALIZE_CORRECT_USE set_virtual_use_link
559 #define FINALIZE_BASE_ZERO 0
560 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
561 #define FINALIZE_BASE_TYPE unsigned
562 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
563 (PTR)->def_var = (VAL); \
564 (PTR)->kill_var = (VAL); \
565 (PTR)->use_ptr.use = &((PTR)->kill_var);\
566 link_imm_use_stmt (&((PTR)->use_ptr), \
567 (VAL), (STMT))
568 #include "tree-ssa-opfinalize.h"
571 static void
572 finalize_ssa_v_must_defs (tree stmt)
574 /* In the presence of subvars, there may be more than one V_MUST_DEF per
575 statement (one for each subvar). It is a bit expensive to verify that
576 all must-defs in a statement belong to subvars if there is more than one
577 MUST-def, so we don't do it. Suffice to say, if you reach here without
578 having subvars, and have num >1, you have hit a bug. */
580 finalize_ssa_v_must_def_ops (stmt);
581 VEC_truncate (tree, build_v_must_defs, 0);
585 /* Finalize all the build vectors, fill the new ones into INFO. */
587 static inline void
588 finalize_ssa_stmt_operands (tree stmt)
590 finalize_ssa_defs (stmt);
591 finalize_ssa_uses (stmt);
592 finalize_ssa_v_must_defs (stmt);
593 finalize_ssa_v_may_defs (stmt);
594 finalize_ssa_vuses (stmt);
598 /* Start the process of building up operands vectors in INFO. */
600 static inline void
601 start_ssa_stmt_operands (void)
603 gcc_assert (VEC_length (tree, build_defs) == 0);
604 gcc_assert (VEC_length (tree, build_uses) == 0);
605 gcc_assert (VEC_length (tree, build_vuses) == 0);
606 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
607 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
611 /* Add DEF_P to the list of pointers to operands. */
613 static inline void
614 append_def (tree *def_p)
616 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
620 /* Add USE_P to the list of pointers to operands. */
622 static inline void
623 append_use (tree *use_p)
625 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
629 /* Add a new virtual may def for variable VAR to the build array. */
631 static inline void
632 append_v_may_def (tree var)
634 if (TREE_CODE (var) != SSA_NAME)
636 var_ann_t ann = get_var_ann (var);
638 /* Don't allow duplicate entries. */
639 if (ann->in_v_may_def_list)
640 return;
641 ann->in_v_may_def_list = 1;
644 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
648 /* Add VAR to the list of virtual uses. */
650 static inline void
651 append_vuse (tree var)
654 /* Don't allow duplicate entries. */
655 if (TREE_CODE (var) != SSA_NAME)
657 var_ann_t ann = get_var_ann (var);
659 if (ann->in_vuse_list || ann->in_v_may_def_list)
660 return;
661 ann->in_vuse_list = 1;
664 VEC_safe_push (tree, heap, build_vuses, (tree)var);
668 /* Add VAR to the list of virtual must definitions for INFO. */
670 static inline void
671 append_v_must_def (tree var)
673 unsigned i;
675 /* Don't allow duplicate entries. */
676 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
677 if (var == VEC_index (tree, build_v_must_defs, i))
678 return;
680 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
684 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
685 cache for STMT, if it existed before. When finished, the various build_*
686 operand vectors will have potential operands. in them. */
688 static void
689 parse_ssa_operands (tree stmt)
691 enum tree_code code;
693 code = TREE_CODE (stmt);
694 switch (code)
696 case MODIFY_EXPR:
697 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
698 either only part of LHS is modified or if the RHS might throw,
699 otherwise, use V_MUST_DEF.
701 ??? If it might throw, we should represent somehow that it is killed
702 on the fallthrough path. */
704 tree lhs = TREE_OPERAND (stmt, 0);
705 int lhs_flags = opf_is_def;
707 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
709 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
710 or not the entire LHS is modified; that depends on what's
711 inside the VIEW_CONVERT_EXPR. */
712 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
713 lhs = TREE_OPERAND (lhs, 0);
715 if (TREE_CODE (lhs) != ARRAY_REF
716 && TREE_CODE (lhs) != ARRAY_RANGE_REF
717 && TREE_CODE (lhs) != BIT_FIELD_REF
718 && TREE_CODE (lhs) != REALPART_EXPR
719 && TREE_CODE (lhs) != IMAGPART_EXPR)
720 lhs_flags |= opf_kill_def;
722 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
724 break;
726 case COND_EXPR:
727 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
728 break;
730 case SWITCH_EXPR:
731 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
732 break;
734 case ASM_EXPR:
735 get_asm_expr_operands (stmt);
736 break;
738 case RETURN_EXPR:
739 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
740 break;
742 case GOTO_EXPR:
743 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
744 break;
746 case LABEL_EXPR:
747 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
748 break;
750 /* These nodes contain no variable references. */
751 case BIND_EXPR:
752 case CASE_LABEL_EXPR:
753 case TRY_CATCH_EXPR:
754 case TRY_FINALLY_EXPR:
755 case EH_FILTER_EXPR:
756 case CATCH_EXPR:
757 case RESX_EXPR:
758 break;
760 default:
761 /* Notice that if get_expr_operands tries to use &STMT as the operand
762 pointer (which may only happen for USE operands), we will fail in
763 append_use. This default will handle statements like empty
764 statements, or CALL_EXPRs that may appear on the RHS of a statement
765 or as statements themselves. */
766 get_expr_operands (stmt, &stmt, opf_none);
767 break;
771 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
772 original operands, and if ANN is non-null, appropriate stmt flags are set
773 in the stmt's annotation. If ANN is NULL, this is not considered a "real"
774 stmt, and none of the operands will be entered into their respective
775 immediate uses tables. This is to allow stmts to be processed when they
776 are not actually in the CFG.
778 Note that some fields in old_ops may change to NULL, although none of the
779 memory they originally pointed to will be destroyed. It is appropriate
780 to call free_stmt_operands() on the value returned in old_ops.
782 The rationale for this: Certain optimizations wish to examine the difference
783 between new_ops and old_ops after processing. If a set of operands don't
784 change, new_ops will simply assume the pointer in old_ops, and the old_ops
785 pointer will be set to NULL, indicating no memory needs to be cleared.
786 Usage might appear something like:
788 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
789 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
790 <* compare old_ops_copy and new_ops *>
791 free_ssa_operands (old_ops); */
793 static void
794 build_ssa_operands (tree stmt)
796 stmt_ann_t ann = get_stmt_ann (stmt);
798 /* Initially assume that the statement has no volatile operands, nor
799 makes aliased loads or stores. */
800 if (ann)
802 ann->has_volatile_ops = false;
803 ann->makes_aliased_stores = false;
804 ann->makes_aliased_loads = false;
807 start_ssa_stmt_operands ();
809 parse_ssa_operands (stmt);
810 operand_build_sort_virtual (build_vuses);
811 operand_build_sort_virtual (build_v_may_defs);
812 operand_build_sort_virtual (build_v_must_defs);
814 finalize_ssa_stmt_operands (stmt);
818 /* Free any operands vectors in OPS. */
819 void
820 free_ssa_operands (stmt_operands_p ops)
822 ops->def_ops = NULL;
823 ops->use_ops = NULL;
824 ops->maydef_ops = NULL;
825 ops->mustdef_ops = NULL;
826 ops->vuse_ops = NULL;
830 /* Get the operands of statement STMT. Note that repeated calls to
831 get_stmt_operands for the same statement will do nothing until the
832 statement is marked modified by a call to mark_stmt_modified(). */
834 void
835 update_stmt_operands (tree stmt)
837 stmt_ann_t ann = get_stmt_ann (stmt);
838 /* If get_stmt_operands is called before SSA is initialized, dont
839 do anything. */
840 if (!ssa_operands_active ())
841 return;
842 /* The optimizers cannot handle statements that are nothing but a
843 _DECL. This indicates a bug in the gimplifier. */
844 gcc_assert (!SSA_VAR_P (stmt));
846 gcc_assert (ann->modified);
848 timevar_push (TV_TREE_OPS);
850 build_ssa_operands (stmt);
852 /* Clear the modified bit for STMT. Subsequent calls to
853 get_stmt_operands for this statement will do nothing until the
854 statement is marked modified by a call to mark_stmt_modified(). */
855 ann->modified = 0;
857 timevar_pop (TV_TREE_OPS);
861 /* Copies virtual operands from SRC to DST. */
863 void
864 copy_virtual_operands (tree dest, tree src)
866 tree t;
867 ssa_op_iter iter, old_iter;
868 use_operand_p use_p, u2;
869 def_operand_p def_p, d2;
871 build_ssa_operands (dest);
873 /* Copy all the virtual fields. */
874 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
875 append_vuse (t);
876 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
877 append_v_may_def (t);
878 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
879 append_v_must_def (t);
881 if (VEC_length (tree, build_vuses) == 0
882 && VEC_length (tree, build_v_may_defs) == 0
883 && VEC_length (tree, build_v_must_defs) == 0)
884 return;
886 /* Now commit the virtual operands to this stmt. */
887 finalize_ssa_v_must_defs (dest);
888 finalize_ssa_v_may_defs (dest);
889 finalize_ssa_vuses (dest);
891 /* Finally, set the field to the same values as then originals. */
894 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
895 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
897 gcc_assert (!op_iter_done (&old_iter));
898 SET_USE (use_p, t);
899 t = op_iter_next_tree (&old_iter);
901 gcc_assert (op_iter_done (&old_iter));
903 op_iter_init_maydef (&old_iter, src, &u2, &d2);
904 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
906 gcc_assert (!op_iter_done (&old_iter));
907 SET_USE (use_p, USE_FROM_PTR (u2));
908 SET_DEF (def_p, DEF_FROM_PTR (d2));
909 op_iter_next_maymustdef (&u2, &d2, &old_iter);
911 gcc_assert (op_iter_done (&old_iter));
913 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
914 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
916 gcc_assert (!op_iter_done (&old_iter));
917 SET_USE (use_p, USE_FROM_PTR (u2));
918 SET_DEF (def_p, DEF_FROM_PTR (d2));
919 op_iter_next_maymustdef (&u2, &d2, &old_iter);
921 gcc_assert (op_iter_done (&old_iter));
926 /* Specifically for use in DOM's expression analysis. Given a store, we
927 create an artificial stmt which looks like a load from the store, this can
928 be used to eliminate redundant loads. OLD_OPS are the operands from the
929 store stmt, and NEW_STMT is the new load which represents a load of the
930 values stored. */
932 void
933 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
935 stmt_ann_t ann;
936 tree op;
937 ssa_op_iter iter;
938 use_operand_p use_p;
939 unsigned x;
941 ann = get_stmt_ann (new_stmt);
943 /* process the stmt looking for operands. */
944 start_ssa_stmt_operands ();
945 parse_ssa_operands (new_stmt);
947 for (x = 0; x < VEC_length (tree, build_vuses); x++)
949 tree t = VEC_index (tree, build_vuses, x);
950 if (TREE_CODE (t) != SSA_NAME)
952 var_ann_t ann = var_ann (t);
953 ann->in_vuse_list = 0;
957 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
959 tree t = VEC_index (tree, build_v_may_defs, x);
960 if (TREE_CODE (t) != SSA_NAME)
962 var_ann_t ann = var_ann (t);
963 ann->in_v_may_def_list = 0;
966 /* Remove any virtual operands that were found. */
967 VEC_truncate (tree, build_v_may_defs, 0);
968 VEC_truncate (tree, build_v_must_defs, 0);
969 VEC_truncate (tree, build_vuses, 0);
971 /* For each VDEF on the original statement, we want to create a
972 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
973 statement. */
974 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
975 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
976 append_vuse (op);
978 /* Now build the operands for this new stmt. */
979 finalize_ssa_stmt_operands (new_stmt);
981 /* All uses in this fake stmt must not be in the immediate use lists. */
982 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
983 delink_imm_use (use_p);
986 void
987 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
989 tree op0, op1;
990 op0 = *exp0;
991 op1 = *exp1;
993 /* If the operand cache is active, attempt to preserve the relative positions
994 of these two operands in their respective immediate use lists. */
995 if (ssa_operands_active () && op0 != op1)
997 use_optype_p use0, use1, ptr;
998 use0 = use1 = NULL;
999 /* Find the 2 operands in the cache, if they are there. */
1000 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1001 if (USE_OP_PTR (ptr)->use == exp0)
1003 use0 = ptr;
1004 break;
1006 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1007 if (USE_OP_PTR (ptr)->use == exp1)
1009 use1 = ptr;
1010 break;
1012 /* If both uses don't have operand entries, there isn't much we can do
1013 at this point. Presumably we dont need to worry about it. */
1014 if (use0 && use1)
1016 tree *tmp = USE_OP_PTR (use1)->use;
1017 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1018 USE_OP_PTR (use0)->use = tmp;
1022 /* Now swap the data. */
1023 *exp0 = op1;
1024 *exp1 = op0;
1028 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1029 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1030 the operands found. */
1032 static void
1033 get_expr_operands (tree stmt, tree *expr_p, int flags)
1035 enum tree_code code;
1036 enum tree_code_class class;
1037 tree expr = *expr_p;
1038 stmt_ann_t s_ann = stmt_ann (stmt);
1040 if (expr == NULL)
1041 return;
1043 code = TREE_CODE (expr);
1044 class = TREE_CODE_CLASS (code);
1046 switch (code)
1048 case ADDR_EXPR:
1049 /* We could have the address of a component, array member,
1050 etc which has interesting variable references. */
1051 /* Taking the address of a variable does not represent a
1052 reference to it, but the fact that the stmt takes its address will be
1053 of interest to some passes (e.g. alias resolution). */
1054 add_stmt_operand (expr_p, s_ann, 0);
1056 /* If the address is invariant, there may be no interesting variable
1057 references inside. */
1058 if (is_gimple_min_invariant (expr))
1059 return;
1061 /* There should be no VUSEs created, since the referenced objects are
1062 not really accessed. The only operands that we should find here
1063 are ARRAY_REF indices which will always be real operands (GIMPLE
1064 does not allow non-registers as array indices). */
1065 flags |= opf_no_vops;
1067 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1068 return;
1070 case SSA_NAME:
1071 case VAR_DECL:
1072 case PARM_DECL:
1073 case RESULT_DECL:
1074 case CONST_DECL:
1076 subvar_t svars;
1078 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1079 Otherwise, add the variable itself.
1080 Whether it goes to USES or DEFS depends on the operand flags. */
1081 if (var_can_have_subvars (expr)
1082 && (svars = get_subvars_for_var (expr)))
1084 subvar_t sv;
1085 for (sv = svars; sv; sv = sv->next)
1086 add_stmt_operand (&sv->var, s_ann, flags);
1088 else
1090 add_stmt_operand (expr_p, s_ann, flags);
1092 return;
1094 case MISALIGNED_INDIRECT_REF:
1095 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1096 /* fall through */
1098 case ALIGN_INDIRECT_REF:
1099 case INDIRECT_REF:
1100 get_indirect_ref_operands (stmt, expr, flags);
1101 return;
1103 case TARGET_MEM_REF:
1104 get_tmr_operands (stmt, expr, flags);
1105 return;
1107 case ARRAY_REF:
1108 case ARRAY_RANGE_REF:
1109 /* Treat array references as references to the virtual variable
1110 representing the array. The virtual variable for an ARRAY_REF
1111 is the VAR_DECL for the array. */
1113 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1114 according to the value of IS_DEF. Recurse if the LHS of the
1115 ARRAY_REF node is not a regular variable. */
1116 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1117 add_stmt_operand (expr_p, s_ann, flags);
1118 else
1119 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1121 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1122 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1123 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1124 return;
1126 case COMPONENT_REF:
1127 case REALPART_EXPR:
1128 case IMAGPART_EXPR:
1130 tree ref;
1131 unsigned HOST_WIDE_INT offset, size;
1132 /* This component ref becomes an access to all of the subvariables
1133 it can touch, if we can determine that, but *NOT* the real one.
1134 If we can't determine which fields we could touch, the recursion
1135 will eventually get to a variable and add *all* of its subvars, or
1136 whatever is the minimum correct subset. */
1138 ref = okay_component_ref_for_subvars (expr, &offset, &size);
1139 if (ref)
1141 subvar_t svars = get_subvars_for_var (ref);
1142 subvar_t sv;
1143 for (sv = svars; sv; sv = sv->next)
1145 bool exact;
1146 if (overlap_subvar (offset, size, sv, &exact))
1148 int subvar_flags = flags;
1149 if (!exact)
1150 subvar_flags &= ~opf_kill_def;
1151 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1155 else
1156 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1157 flags & ~opf_kill_def);
1159 if (code == COMPONENT_REF)
1161 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1162 s_ann->has_volatile_ops = true;
1163 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1165 return;
1167 case WITH_SIZE_EXPR:
1168 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1169 and an rvalue reference to its second argument. */
1170 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1171 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1172 return;
1174 case CALL_EXPR:
1175 get_call_expr_operands (stmt, expr);
1176 return;
1178 case COND_EXPR:
1179 case VEC_COND_EXPR:
1180 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1181 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1182 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1183 return;
1185 case MODIFY_EXPR:
1187 int subflags;
1188 tree op;
1190 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1192 op = TREE_OPERAND (expr, 0);
1193 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1194 op = TREE_OPERAND (expr, 0);
1195 if (TREE_CODE (op) == ARRAY_REF
1196 || TREE_CODE (op) == ARRAY_RANGE_REF
1197 || TREE_CODE (op) == REALPART_EXPR
1198 || TREE_CODE (op) == IMAGPART_EXPR)
1199 subflags = opf_is_def;
1200 else
1201 subflags = opf_is_def | opf_kill_def;
1203 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1204 return;
1207 case CONSTRUCTOR:
1209 /* General aggregate CONSTRUCTORs have been decomposed, but they
1210 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1211 constructor_elt *ce;
1212 unsigned HOST_WIDE_INT idx;
1214 for (idx = 0;
1215 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1216 idx++)
1217 get_expr_operands (stmt, &ce->value, opf_none);
1219 return;
1222 case TRUTH_NOT_EXPR:
1223 case BIT_FIELD_REF:
1224 case VIEW_CONVERT_EXPR:
1225 do_unary:
1226 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1227 return;
1229 case TRUTH_AND_EXPR:
1230 case TRUTH_OR_EXPR:
1231 case TRUTH_XOR_EXPR:
1232 case COMPOUND_EXPR:
1233 case OBJ_TYPE_REF:
1234 case ASSERT_EXPR:
1235 do_binary:
1237 tree op0 = TREE_OPERAND (expr, 0);
1238 tree op1 = TREE_OPERAND (expr, 1);
1240 /* If it would be profitable to swap the operands, then do so to
1241 canonicalize the statement, enabling better optimization.
1243 By placing canonicalization of such expressions here we
1244 transparently keep statements in canonical form, even
1245 when the statement is modified. */
1246 if (tree_swap_operands_p (op0, op1, false))
1248 /* For relationals we need to swap the operands
1249 and change the code. */
1250 if (code == LT_EXPR
1251 || code == GT_EXPR
1252 || code == LE_EXPR
1253 || code == GE_EXPR)
1255 TREE_SET_CODE (expr, swap_tree_comparison (code));
1256 swap_tree_operands (stmt,
1257 &TREE_OPERAND (expr, 0),
1258 &TREE_OPERAND (expr, 1));
1261 /* For a commutative operator we can just swap the operands. */
1262 else if (commutative_tree_code (code))
1264 swap_tree_operands (stmt,
1265 &TREE_OPERAND (expr, 0),
1266 &TREE_OPERAND (expr, 1));
1270 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1271 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1272 return;
1275 case REALIGN_LOAD_EXPR:
1277 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1278 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1279 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1280 return;
1283 case BLOCK:
1284 case FUNCTION_DECL:
1285 case EXC_PTR_EXPR:
1286 case FILTER_EXPR:
1287 case LABEL_DECL:
1288 /* Expressions that make no memory references. */
1289 return;
1291 default:
1292 if (class == tcc_unary)
1293 goto do_unary;
1294 if (class == tcc_binary || class == tcc_comparison)
1295 goto do_binary;
1296 if (class == tcc_constant || class == tcc_type)
1297 return;
1300 /* If we get here, something has gone wrong. */
1301 #ifdef ENABLE_CHECKING
1302 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1303 debug_tree (expr);
1304 fputs ("\n", stderr);
1305 internal_error ("internal error");
1306 #endif
1307 gcc_unreachable ();
1311 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1313 static void
1314 get_asm_expr_operands (tree stmt)
1316 stmt_ann_t s_ann = stmt_ann (stmt);
1317 int noutputs = list_length (ASM_OUTPUTS (stmt));
1318 const char **oconstraints
1319 = (const char **) alloca ((noutputs) * sizeof (const char *));
1320 int i;
1321 tree link;
1322 const char *constraint;
1323 bool allows_mem, allows_reg, is_inout;
1325 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1327 oconstraints[i] = constraint
1328 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1329 parse_output_constraint (&constraint, i, 0, 0,
1330 &allows_mem, &allows_reg, &is_inout);
1332 /* This should have been split in gimplify_asm_expr. */
1333 gcc_assert (!allows_reg || !is_inout);
1335 /* Memory operands are addressable. Note that STMT needs the
1336 address of this operand. */
1337 if (!allows_reg && allows_mem)
1339 tree t = get_base_address (TREE_VALUE (link));
1340 if (t && DECL_P (t) && s_ann)
1341 add_to_addressable_set (t, &s_ann->addresses_taken);
1344 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1347 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1349 constraint
1350 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1351 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1352 oconstraints, &allows_mem, &allows_reg);
1354 /* Memory operands are addressable. Note that STMT needs the
1355 address of this operand. */
1356 if (!allows_reg && allows_mem)
1358 tree t = get_base_address (TREE_VALUE (link));
1359 if (t && DECL_P (t) && s_ann)
1360 add_to_addressable_set (t, &s_ann->addresses_taken);
1363 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1367 /* Clobber memory for asm ("" : : : "memory"); */
1368 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1369 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1371 unsigned i;
1372 bitmap_iterator bi;
1374 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1375 decided to group them). */
1376 if (global_var)
1377 add_stmt_operand (&global_var, s_ann, opf_is_def);
1378 else
1379 EXECUTE_IF_SET_IN_BITMAP (cfun->ssa->call_clobbered_vars, 0, i, bi)
1381 tree var = referenced_var (i);
1382 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1385 /* Now clobber all addressables. */
1386 EXECUTE_IF_SET_IN_BITMAP (cfun->ssa->addressable_vars, 0, i, bi)
1388 tree var = referenced_var (i);
1390 /* Subvars are explicitly represented in this list, so
1391 we don't need the original to be added to the clobber
1392 ops, but the original *will* be in this list because
1393 we keep the addressability of the original
1394 variable up-to-date so we don't screw up the rest of
1395 the backend. */
1396 if (var_can_have_subvars (var)
1397 && get_subvars_for_var (var) != NULL)
1398 continue;
1400 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1403 break;
1407 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1408 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1410 static void
1411 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1413 tree *pptr = &TREE_OPERAND (expr, 0);
1414 tree ptr = *pptr;
1415 stmt_ann_t s_ann = stmt_ann (stmt);
1417 /* Stores into INDIRECT_REF operands are never killing definitions. */
1418 flags &= ~opf_kill_def;
1420 if (SSA_VAR_P (ptr))
1422 struct ptr_info_def *pi = NULL;
1424 /* If PTR has flow-sensitive points-to information, use it. */
1425 if (TREE_CODE (ptr) == SSA_NAME
1426 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1427 && pi->name_mem_tag)
1429 /* PTR has its own memory tag. Use it. */
1430 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1432 else
1434 /* If PTR is not an SSA_NAME or it doesn't have a name
1435 tag, use its type memory tag. */
1436 var_ann_t v_ann;
1438 /* If we are emitting debugging dumps, display a warning if
1439 PTR is an SSA_NAME with no flow-sensitive alias
1440 information. That means that we may need to compute
1441 aliasing again. */
1442 if (dump_file
1443 && TREE_CODE (ptr) == SSA_NAME
1444 && pi == NULL)
1446 fprintf (dump_file,
1447 "NOTE: no flow-sensitive alias info for ");
1448 print_generic_expr (dump_file, ptr, dump_flags);
1449 fprintf (dump_file, " in ");
1450 print_generic_stmt (dump_file, stmt, dump_flags);
1453 if (TREE_CODE (ptr) == SSA_NAME)
1454 ptr = SSA_NAME_VAR (ptr);
1455 v_ann = var_ann (ptr);
1456 if (v_ann->type_mem_tag)
1457 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1458 /* Aliasing information is missing; mark statement as volatile so we
1459 won't optimize it out too actively. */
1460 else if (s_ann && !aliases_computed_p && (flags & (opf_is_def | opf_kill_def)))
1461 s_ann->has_volatile_ops = true;
1465 /* If a constant is used as a pointer, we can't generate a real
1466 operand for it but we mark the statement volatile to prevent
1467 optimizations from messing things up. */
1468 else if (TREE_CODE (ptr) == INTEGER_CST)
1470 if (s_ann)
1471 s_ann->has_volatile_ops = true;
1472 return;
1475 /* Everything else *should* have been folded elsewhere, but users
1476 are smarter than we in finding ways to write invalid code. We
1477 cannot just assert here. If we were absolutely certain that we
1478 do handle all valid cases, then we could just do nothing here.
1479 That seems optimistic, so attempt to do something logical... */
1480 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1481 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1482 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1484 /* Make sure we know the object is addressable. */
1485 pptr = &TREE_OPERAND (ptr, 0);
1486 add_stmt_operand (pptr, s_ann, 0);
1488 /* Mark the object itself with a VUSE. */
1489 pptr = &TREE_OPERAND (*pptr, 0);
1490 get_expr_operands (stmt, pptr, flags);
1491 return;
1494 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1495 else
1496 gcc_unreachable ();
1498 /* Add a USE operand for the base pointer. */
1499 get_expr_operands (stmt, pptr, opf_none);
1502 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1504 static void
1505 get_tmr_operands (tree stmt, tree expr, int flags)
1507 tree tag = TMR_TAG (expr);
1509 /* First record the real operands. */
1510 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1511 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1513 /* MEM_REFs should never be killing. */
1514 flags &= ~opf_kill_def;
1516 if (TMR_SYMBOL (expr))
1518 stmt_ann_t ann = stmt_ann (stmt);
1519 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1522 if (tag)
1523 get_expr_operands (stmt, &tag, flags);
1524 else
1525 /* Something weird, so ensure that we will be careful. */
1526 stmt_ann (stmt)->has_volatile_ops = true;
1529 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1531 static void
1532 get_call_expr_operands (tree stmt, tree expr)
1534 tree op;
1535 int call_flags = call_expr_flags (expr);
1537 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1538 operands for all the symbols that have been found to be
1539 call-clobbered.
1541 Note that if aliases have not been computed, the global effects
1542 of calls will not be included in the SSA web. This is fine
1543 because no optimizer should run before aliases have been
1544 computed. By not bothering with virtual operands for CALL_EXPRs
1545 we avoid adding superfluous virtual operands, which can be a
1546 significant compile time sink (See PR 15855). */
1547 if (aliases_computed_p
1548 && !bitmap_empty_p (cfun->ssa->call_clobbered_vars)
1549 && !(call_flags & ECF_NOVOPS))
1551 /* A 'pure' or a 'const' function never call-clobbers anything.
1552 A 'noreturn' function might, but since we don't return anyway
1553 there is no point in recording that. */
1554 if (TREE_SIDE_EFFECTS (expr)
1555 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1556 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1557 else if (!(call_flags & ECF_CONST))
1558 add_call_read_ops (stmt);
1561 /* Find uses in the called function. */
1562 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1564 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1565 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1567 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1572 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1573 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1574 the statement's real operands, otherwise it is added to virtual
1575 operands. */
1577 static void
1578 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1580 bool is_real_op;
1581 tree var, sym;
1582 var_ann_t v_ann;
1584 var = *var_p;
1585 STRIP_NOPS (var);
1587 /* If the operand is an ADDR_EXPR, add its operand to the list of
1588 variables that have had their address taken in this statement. */
1589 if (TREE_CODE (var) == ADDR_EXPR && s_ann)
1591 add_to_addressable_set (TREE_OPERAND (var, 0), &s_ann->addresses_taken);
1592 return;
1595 /* If the original variable is not a scalar, it will be added to the list
1596 of virtual operands. In that case, use its base symbol as the virtual
1597 variable representing it. */
1598 is_real_op = is_gimple_reg (var);
1599 if (!is_real_op && !DECL_P (var))
1600 var = get_virtual_var (var);
1602 /* If VAR is not a variable that we care to optimize, do nothing. */
1603 if (var == NULL_TREE || !SSA_VAR_P (var))
1604 return;
1606 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1607 v_ann = var_ann (sym);
1609 /* Mark statements with volatile operands. Optimizers should back
1610 off from statements having volatile operands. */
1611 if (TREE_THIS_VOLATILE (sym) && s_ann)
1612 s_ann->has_volatile_ops = true;
1614 /* If the variable cannot be modified and this is a V_MAY_DEF change
1615 it into a VUSE. This happens when read-only variables are marked
1616 call-clobbered and/or aliased to writable variables. So we only
1617 check that this only happens on non-specific stores.
1619 Note that if this is a specific store, i.e. associated with a
1620 modify_expr, then we can't suppress the V_DEF, lest we run into
1621 validation problems.
1623 This can happen when programs cast away const, leaving us with a
1624 store to read-only memory. If the statement is actually executed
1625 at runtime, then the program is ill formed. If the statement is
1626 not executed then all is well. At the very least, we cannot ICE. */
1627 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1629 gcc_assert (!is_real_op);
1630 flags &= ~(opf_is_def | opf_kill_def);
1633 if (is_real_op)
1635 /* The variable is a GIMPLE register. Add it to real operands. */
1636 if (flags & opf_is_def)
1637 append_def (var_p);
1638 else
1639 append_use (var_p);
1641 else
1643 varray_type aliases;
1645 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1646 virtual operands, unless the caller has specifically requested
1647 not to add virtual operands (used when adding operands inside an
1648 ADDR_EXPR expression). */
1649 if (flags & opf_no_vops)
1650 return;
1652 aliases = v_ann->may_aliases;
1654 if (aliases == NULL)
1656 /* The variable is not aliased or it is an alias tag. */
1657 if (flags & opf_is_def)
1659 if (s_ann && !aliases_computed_p && TREE_ADDRESSABLE (var))
1660 s_ann->has_volatile_ops = true;
1661 if (flags & opf_kill_def)
1663 /* Only regular variables or struct fields may get a
1664 V_MUST_DEF operand. */
1665 gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
1666 || v_ann->mem_tag_kind == STRUCT_FIELD);
1667 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1668 variable definitions. */
1669 append_v_must_def (var);
1671 else
1673 /* Add a V_MAY_DEF for call-clobbered variables and
1674 memory tags. */
1675 append_v_may_def (var);
1678 else
1680 append_vuse (var);
1681 if (s_ann && v_ann->is_alias_tag)
1682 s_ann->makes_aliased_loads = 1;
1685 else
1687 size_t i;
1689 /* The variable is aliased. Add its aliases to the virtual
1690 operands. */
1691 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1693 if (flags & opf_is_def)
1695 /* If the variable is also an alias tag, add a virtual
1696 operand for it, otherwise we will miss representing
1697 references to the members of the variable's alias set.
1698 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1699 if (v_ann->is_alias_tag)
1700 append_v_may_def (var);
1702 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1703 append_v_may_def (VARRAY_TREE (aliases, i));
1705 if (s_ann)
1706 s_ann->makes_aliased_stores = 1;
1708 else
1710 /* Similarly, append a virtual uses for VAR itself, when
1711 it is an alias tag. */
1712 if (v_ann->is_alias_tag)
1713 append_vuse (var);
1715 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1716 append_vuse (VARRAY_TREE (aliases, i));
1718 if (s_ann)
1719 s_ann->makes_aliased_loads = 1;
1726 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1727 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1728 a single variable whose address has been taken or any other valid
1729 GIMPLE memory reference (structure reference, array, etc). If the
1730 base address of REF is a decl that has sub-variables, also add all
1731 of its sub-variables. */
1733 void
1734 add_to_addressable_set (tree ref, bitmap *addresses_taken)
1736 tree var;
1737 subvar_t svars;
1739 gcc_assert (addresses_taken);
1741 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1742 as the only thing we take the address of. If VAR is a structure,
1743 taking the address of a field means that the whole structure may
1744 be referenced using pointer arithmetic. See PR 21407 and the
1745 ensuing mailing list discussion. */
1746 var = get_base_address (ref);
1747 if (var && SSA_VAR_P (var))
1749 if (*addresses_taken == NULL)
1750 *addresses_taken = BITMAP_GGC_ALLOC ();
1752 if (var_can_have_subvars (var)
1753 && (svars = get_subvars_for_var (var)))
1755 subvar_t sv;
1756 for (sv = svars; sv; sv = sv->next)
1758 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1759 TREE_ADDRESSABLE (sv->var) = 1;
1762 else
1764 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1765 TREE_ADDRESSABLE (var) = 1;
1771 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1772 clobbered variables in the function. */
1774 static void
1775 add_call_clobber_ops (tree stmt, tree callee)
1777 unsigned u;
1778 tree t;
1779 bitmap_iterator bi;
1780 stmt_ann_t s_ann = stmt_ann (stmt);
1781 struct stmt_ann_d empty_ann;
1782 bitmap not_read_b, not_written_b;
1784 /* Functions that are not const, pure or never return may clobber
1785 call-clobbered variables. */
1786 if (s_ann)
1787 s_ann->makes_clobbering_call = true;
1789 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1790 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1791 if (global_var)
1793 add_stmt_operand (&global_var, s_ann, opf_is_def);
1794 return;
1797 /* FIXME - if we have better information from the static vars
1798 analysis, we need to make the cache call site specific. This way
1799 we can have the performance benefits even if we are doing good
1800 optimization. */
1802 /* Get info for local and module level statics. There is a bit
1803 set for each static if the call being processed does not read
1804 or write that variable. */
1806 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1807 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1809 /* If cache is valid, copy the elements into the build vectors. */
1810 if (ssa_call_clobbered_cache_valid_for == current_function_decl
1811 && (!not_read_b || bitmap_empty_p (not_read_b))
1812 && (!not_written_b || bitmap_empty_p (not_written_b)))
1814 for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++)
1816 t = VEC_index (tree, clobbered_vuses, u);
1817 gcc_assert (TREE_CODE (t) != SSA_NAME);
1818 var_ann (t)->in_vuse_list = 1;
1819 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1821 for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++)
1823 t = VEC_index (tree, clobbered_v_may_defs, u);
1824 gcc_assert (TREE_CODE (t) != SSA_NAME);
1825 var_ann (t)->in_v_may_def_list = 1;
1826 VEC_safe_push (tree, heap, build_v_may_defs, (tree)t);
1828 if (s_ann)
1830 s_ann->makes_aliased_loads = clobbered_aliased_loads;
1831 s_ann->makes_aliased_stores = clobbered_aliased_stores;
1833 return;
1836 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1838 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1839 EXECUTE_IF_SET_IN_BITMAP (cfun->ssa->call_clobbered_vars, 0, u, bi)
1841 tree var = referenced_var (u);
1842 if (unmodifiable_var_p (var))
1843 add_stmt_operand (&var, &empty_ann, opf_none);
1844 else
1846 bool not_read
1847 = not_read_b ? bitmap_bit_p (not_read_b, u) : false;
1848 bool not_written
1849 = not_written_b ? bitmap_bit_p (not_written_b, u) : false;
1851 if ((TREE_READONLY (var)
1852 && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
1853 || not_written)
1855 if (!not_read)
1856 add_stmt_operand (&var, &empty_ann, opf_none);
1858 else
1859 add_stmt_operand (&var, &empty_ann, opf_is_def);
1863 if ((!not_read_b || bitmap_empty_p (not_read_b))
1864 && (!not_written_b || bitmap_empty_p (not_written_b)))
1866 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
1867 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
1869 /* Set the flags for a stmt's annotation. */
1870 if (s_ann)
1872 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1873 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
1876 /* Prepare empty cache vectors. */
1877 VEC_truncate (tree, clobbered_vuses, 0);
1878 VEC_truncate (tree, clobbered_v_may_defs, 0);
1880 /* Now fill the clobbered cache with the values that have been found. */
1881 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1882 VEC_safe_push (tree, heap, clobbered_vuses,
1883 VEC_index (tree, build_vuses, u));
1885 gcc_assert (VEC_length (tree, build_vuses)
1886 == VEC_length (tree, clobbered_vuses));
1888 for (u = 0; u < VEC_length (tree, build_v_may_defs); u++)
1889 VEC_safe_push (tree, heap, clobbered_v_may_defs,
1890 VEC_index (tree, build_v_may_defs, u));
1892 gcc_assert (VEC_length (tree, build_v_may_defs)
1893 == VEC_length (tree, clobbered_v_may_defs));
1895 ssa_call_clobbered_cache_valid_for = current_function_decl;
1900 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1901 function. */
1903 static void
1904 add_call_read_ops (tree stmt)
1906 unsigned u;
1907 tree t;
1908 bitmap_iterator bi;
1909 stmt_ann_t s_ann = stmt_ann (stmt);
1910 struct stmt_ann_d empty_ann;
1912 /* if the function is not pure, it may reference memory. Add
1913 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1914 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1915 if (global_var)
1917 add_stmt_operand (&global_var, s_ann, opf_none);
1918 return;
1921 /* If cache is valid, copy the elements into the build vector. */
1922 if (ssa_ro_call_cache_valid_for == current_function_decl)
1924 for (u = 0; u < VEC_length (tree, ro_call_vuses); u++)
1926 t = VEC_index (tree, ro_call_vuses, u);
1927 gcc_assert (TREE_CODE (t) != SSA_NAME);
1928 var_ann (t)->in_vuse_list = 1;
1929 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1931 if (s_ann)
1932 s_ann->makes_aliased_loads = ro_call_aliased_loads;
1933 return;
1936 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1938 /* Add a VUSE for each call-clobbered variable. */
1939 EXECUTE_IF_SET_IN_BITMAP (cfun->ssa->call_clobbered_vars, 0, u, bi)
1941 tree var = referenced_var (u);
1942 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
1945 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
1946 if (s_ann)
1947 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1949 /* Prepare empty cache vectors. */
1950 VEC_truncate (tree, ro_call_vuses, 0);
1952 /* Now fill the clobbered cache with the values that have been found. */
1953 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1954 VEC_safe_push (tree, heap, ro_call_vuses,
1955 VEC_index (tree, build_vuses, u));
1957 gcc_assert (VEC_length (tree, build_vuses)
1958 == VEC_length (tree, ro_call_vuses));
1960 ssa_ro_call_cache_valid_for = current_function_decl;
1964 /* Scan the immediate_use list for VAR making sure its linked properly.
1965 return RTUE iof there is a problem. */
1967 bool
1968 verify_imm_links (FILE *f, tree var)
1970 use_operand_p ptr, prev, list;
1971 int count;
1973 gcc_assert (TREE_CODE (var) == SSA_NAME);
1975 list = &(SSA_NAME_IMM_USE_NODE (var));
1976 gcc_assert (list->use == NULL);
1978 if (list->prev == NULL)
1980 gcc_assert (list->next == NULL);
1981 return false;
1984 prev = list;
1985 count = 0;
1986 for (ptr = list->next; ptr != list; )
1988 if (prev != ptr->prev)
1989 goto error;
1991 if (ptr->use == NULL)
1992 goto error; /* 2 roots, or SAFE guard node. */
1993 else if (*(ptr->use) != var)
1994 goto error;
1996 prev = ptr;
1997 ptr = ptr->next;
1998 /* Avoid infinite loops. */
1999 if (count++ > 30000)
2000 goto error;
2003 /* Verify list in the other direction. */
2004 prev = list;
2005 for (ptr = list->prev; ptr != list; )
2007 if (prev != ptr->next)
2008 goto error;
2009 prev = ptr;
2010 ptr = ptr->prev;
2011 if (count-- < 0)
2012 goto error;
2015 if (count != 0)
2016 goto error;
2018 return false;
2020 error:
2021 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2023 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2024 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2026 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2027 (void *)ptr->use);
2028 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2029 fprintf(f, "\n");
2030 return true;
2034 /* Dump all the immediate uses to FILE. */
2036 void
2037 dump_immediate_uses_for (FILE *file, tree var)
2039 imm_use_iterator iter;
2040 use_operand_p use_p;
2042 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2044 print_generic_expr (file, var, TDF_SLIM);
2045 fprintf (file, " : -->");
2046 if (has_zero_uses (var))
2047 fprintf (file, " no uses.\n");
2048 else
2049 if (has_single_use (var))
2050 fprintf (file, " single use.\n");
2051 else
2052 fprintf (file, "%d uses.\n", num_imm_uses (var));
2054 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2056 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2057 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2058 else
2059 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2061 fprintf(file, "\n");
2064 /* Dump all the immediate uses to FILE. */
2066 void
2067 dump_immediate_uses (FILE *file)
2069 tree var;
2070 unsigned int x;
2072 fprintf (file, "Immediate_uses: \n\n");
2073 for (x = 1; x < num_ssa_names; x++)
2075 var = ssa_name(x);
2076 if (!var)
2077 continue;
2078 dump_immediate_uses_for (file, var);
2083 /* Dump def-use edges on stderr. */
2085 void
2086 debug_immediate_uses (void)
2088 dump_immediate_uses (stderr);
2091 /* Dump def-use edges on stderr. */
2093 void
2094 debug_immediate_uses_for (tree var)
2096 dump_immediate_uses_for (stderr, var);