* Make-lang.in (GFORTRAN_TARGET_INSTALL_NAME): Define.
[official-gcc.git] / gcc / tree-ssa-operands.c
blob30845546d287bce9ee582b81e9394eb5358dbb77
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
85 #define opf_none 0
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
107 /* Array for building all the def operands. */
108 static VEC(tree,heap) *build_defs;
110 /* Array for building all the use operands. */
111 static VEC(tree,heap) *build_uses;
113 /* Array for building all the v_may_def operands. */
114 static VEC(tree,heap) *build_v_may_defs;
116 /* Array for building all the vuse operands. */
117 static VEC(tree,heap) *build_vuses;
119 /* Array for building all the v_must_def operands. */
120 static VEC(tree,heap) *build_v_must_defs;
122 /* True if the operands for call clobbered vars are cached and valid. */
123 bool ssa_call_clobbered_cache_valid;
124 bool ssa_ro_call_cache_valid;
126 /* These arrays are the cached operand vectors for call clobbered calls. */
127 static VEC(tree,heap) *clobbered_v_may_defs;
128 static VEC(tree,heap) *clobbered_vuses;
129 static VEC(tree,heap) *ro_call_vuses;
130 static bool clobbered_aliased_loads;
131 static bool clobbered_aliased_stores;
132 static bool ro_call_aliased_loads;
133 static bool ops_active = false;
135 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
136 static unsigned operand_memory_index;
138 static void get_expr_operands (tree, tree *, int);
139 static void get_asm_expr_operands (tree);
140 static void get_indirect_ref_operands (tree, tree, int);
141 static void get_tmr_operands (tree, tree, int);
142 static void get_call_expr_operands (tree, tree);
143 static inline void append_def (tree *);
144 static inline void append_use (tree *);
145 static void append_v_may_def (tree);
146 static void append_v_must_def (tree);
147 static void add_call_clobber_ops (tree, tree);
148 static void add_call_read_ops (tree);
149 static void add_stmt_operand (tree *, stmt_ann_t, int);
150 static void build_ssa_operands (tree stmt);
152 static def_optype_p free_defs = NULL;
153 static use_optype_p free_uses = NULL;
154 static vuse_optype_p free_vuses = NULL;
155 static maydef_optype_p free_maydefs = NULL;
156 static mustdef_optype_p free_mustdefs = NULL;
159 /* Return the DECL_UID of the base varaiable of T. */
161 static inline unsigned
162 get_name_decl (tree t)
164 if (TREE_CODE (t) != SSA_NAME)
165 return DECL_UID (t);
166 else
167 return DECL_UID (SSA_NAME_VAR (t));
170 /* Comparison function for qsort used in operand_build_sort_virtual. */
172 static int
173 operand_build_cmp (const void *p, const void *q)
175 tree e1 = *((const tree *)p);
176 tree e2 = *((const tree *)q);
177 unsigned int u1,u2;
179 u1 = get_name_decl (e1);
180 u2 = get_name_decl (e2);
182 /* We want to sort in ascending order. They can never be equal. */
183 #ifdef ENABLE_CHECKING
184 gcc_assert (u1 != u2);
185 #endif
186 return (u1 > u2 ? 1 : -1);
189 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
191 static inline void
192 operand_build_sort_virtual (VEC(tree,heap) *list)
194 int num = VEC_length (tree, list);
195 if (num < 2)
196 return;
197 if (num == 2)
199 if (get_name_decl (VEC_index (tree, list, 0))
200 > get_name_decl (VEC_index (tree, list, 1)))
202 /* Swap elements if in the wrong order. */
203 tree tmp = VEC_index (tree, list, 0);
204 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
205 VEC_replace (tree, list, 1, tmp);
207 return;
209 /* There are 3 or more elements, call qsort. */
210 qsort (VEC_address (tree, list),
211 VEC_length (tree, list),
212 sizeof (tree),
213 operand_build_cmp);
218 /* Return true if the ssa operands cache is active. */
220 bool
221 ssa_operands_active (void)
223 return ops_active;
227 /* Initialize the operand cache routines. */
229 void
230 init_ssa_operands (void)
232 build_defs = VEC_alloc (tree, heap, 5);
233 build_uses = VEC_alloc (tree, heap, 10);
234 build_vuses = VEC_alloc (tree, heap, 25);
235 build_v_may_defs = VEC_alloc (tree, heap, 25);
236 build_v_must_defs = VEC_alloc (tree, heap, 25);
238 gcc_assert (operand_memory == NULL);
239 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
240 ops_active = true;
244 /* Dispose of anything required by the operand routines. */
246 void
247 fini_ssa_operands (void)
249 struct ssa_operand_memory_d *ptr;
250 VEC_free (tree, heap, build_defs);
251 VEC_free (tree, heap, build_uses);
252 VEC_free (tree, heap, build_v_must_defs);
253 VEC_free (tree, heap, build_v_may_defs);
254 VEC_free (tree, heap, build_vuses);
255 free_defs = NULL;
256 free_uses = NULL;
257 free_vuses = NULL;
258 free_maydefs = NULL;
259 free_mustdefs = NULL;
260 while ((ptr = operand_memory) != NULL)
262 operand_memory = operand_memory->next;
263 ggc_free (ptr);
266 VEC_free (tree, heap, clobbered_v_may_defs);
267 VEC_free (tree, heap, clobbered_vuses);
268 VEC_free (tree, heap, ro_call_vuses);
269 ops_active = false;
273 /* Return memory for operands of SIZE chunks. */
275 static inline void *
276 ssa_operand_alloc (unsigned size)
278 char *ptr;
279 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
281 struct ssa_operand_memory_d *ptr;
282 ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d));
283 ptr->next = operand_memory;
284 operand_memory = ptr;
285 operand_memory_index = 0;
287 ptr = &(operand_memory->mem[operand_memory_index]);
288 operand_memory_index += size;
289 return ptr;
293 /* Make sure PTR is in the correct immediate use list. Since uses are simply
294 pointers into the stmt TREE, there is no way of telling if anyone has
295 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
296 The contents are different, but the pointer is still the same. This
297 routine will check to make sure PTR is in the correct list, and if it isn't
298 put it in the correct list. We cannot simply check the previous node
299 because all nodes in the same stmt might have be changed. */
301 static inline void
302 correct_use_link (use_operand_p ptr, tree stmt)
304 use_operand_p prev;
305 tree root;
307 /* Fold_stmt () may have changed the stmt pointers. */
308 if (ptr->stmt != stmt)
309 ptr->stmt = stmt;
311 prev = ptr->prev;
312 if (prev)
314 bool stmt_mod = true;
315 /* Find the first element which isn't a SAFE iterator, is in a different
316 stmt, and is not a modified stmt. That node is in the correct list,
317 see if we are too. */
319 while (stmt_mod)
321 while (prev->stmt == stmt || prev->stmt == NULL)
322 prev = prev->prev;
323 if (prev->use == NULL)
324 stmt_mod = false;
325 else
326 if ((stmt_mod = stmt_modified_p (prev->stmt)))
327 prev = prev->prev;
330 /* Get the ssa_name of the list the node is in. */
331 if (prev->use == NULL)
332 root = prev->stmt;
333 else
334 root = *(prev->use);
335 /* If it's the right list, simply return. */
336 if (root == *(ptr->use))
337 return;
339 /* Its in the wrong list if we reach here. */
340 delink_imm_use (ptr);
341 link_imm_use (ptr, *(ptr->use));
345 /* This routine makes sure that PTR is in an immediate use list, and makes
346 sure the stmt pointer is set to the current stmt. Virtual uses do not need
347 the overhead of correct_use_link since they cannot be directly manipulated
348 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
349 static inline void
350 set_virtual_use_link (use_operand_p ptr, tree stmt)
352 /* Fold_stmt () may have changed the stmt pointers. */
353 if (ptr->stmt != stmt)
354 ptr->stmt = stmt;
356 /* If this use isn't in a list, add it to the correct list. */
357 if (!ptr->prev)
358 link_imm_use (ptr, *(ptr->use));
363 #define FINALIZE_OPBUILD build_defs
364 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
365 build_defs, (I))
366 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
367 build_defs, (I))
368 #define FINALIZE_FUNC finalize_ssa_def_ops
369 #define FINALIZE_ALLOC alloc_def
370 #define FINALIZE_FREE free_defs
371 #define FINALIZE_TYPE struct def_optype_d
372 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
373 #define FINALIZE_OPS DEF_OPS
374 #define FINALIZE_BASE(VAR) VAR
375 #define FINALIZE_BASE_TYPE tree *
376 #define FINALIZE_BASE_ZERO NULL
377 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
378 #include "tree-ssa-opfinalize.h"
381 /* This routine will create stmt operands for STMT from the def build list. */
383 static void
384 finalize_ssa_defs (tree stmt)
386 unsigned int num = VEC_length (tree, build_defs);
387 /* There should only be a single real definition per assignment. */
388 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
390 /* If there is an old list, often the new list is identical, or close, so
391 find the elements at the beginning that are the same as the vector. */
393 finalize_ssa_def_ops (stmt);
394 VEC_truncate (tree, build_defs, 0);
397 #define FINALIZE_OPBUILD build_uses
398 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
399 build_uses, (I))
400 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
401 build_uses, (I))
402 #define FINALIZE_FUNC finalize_ssa_use_ops
403 #define FINALIZE_ALLOC alloc_use
404 #define FINALIZE_FREE free_uses
405 #define FINALIZE_TYPE struct use_optype_d
406 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
407 #define FINALIZE_OPS USE_OPS
408 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
409 #define FINALIZE_CORRECT_USE correct_use_link
410 #define FINALIZE_BASE(VAR) VAR
411 #define FINALIZE_BASE_TYPE tree *
412 #define FINALIZE_BASE_ZERO NULL
413 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
414 (PTR)->use_ptr.use = (VAL); \
415 link_imm_use_stmt (&((PTR)->use_ptr), \
416 *(VAL), (STMT))
417 #include "tree-ssa-opfinalize.h"
419 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
421 static void
422 finalize_ssa_uses (tree stmt)
424 #ifdef ENABLE_CHECKING
426 unsigned x;
427 unsigned num = VEC_length (tree, build_uses);
429 /* If the pointer to the operand is the statement itself, something is
430 wrong. It means that we are pointing to a local variable (the
431 initial call to get_stmt_operands does not pass a pointer to a
432 statement). */
433 for (x = 0; x < num; x++)
434 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
436 #endif
437 finalize_ssa_use_ops (stmt);
438 VEC_truncate (tree, build_uses, 0);
442 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
443 #define FINALIZE_OPBUILD build_v_may_defs
444 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
445 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
446 build_v_may_defs, (I)))
447 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
448 #define FINALIZE_ALLOC alloc_maydef
449 #define FINALIZE_FREE free_maydefs
450 #define FINALIZE_TYPE struct maydef_optype_d
451 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
452 #define FINALIZE_OPS MAYDEF_OPS
453 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
454 #define FINALIZE_CORRECT_USE set_virtual_use_link
455 #define FINALIZE_BASE_ZERO 0
456 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
457 #define FINALIZE_BASE_TYPE unsigned
458 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
459 (PTR)->def_var = (VAL); \
460 (PTR)->use_var = (VAL); \
461 (PTR)->use_ptr.use = &((PTR)->use_var); \
462 link_imm_use_stmt (&((PTR)->use_ptr), \
463 (VAL), (STMT))
464 #include "tree-ssa-opfinalize.h"
467 static void
468 finalize_ssa_v_may_defs (tree stmt)
470 finalize_ssa_v_may_def_ops (stmt);
474 /* Clear the in_list bits and empty the build array for v_may_defs. */
476 static inline void
477 cleanup_v_may_defs (void)
479 unsigned x, num;
480 num = VEC_length (tree, build_v_may_defs);
482 for (x = 0; x < num; x++)
484 tree t = VEC_index (tree, build_v_may_defs, x);
485 if (TREE_CODE (t) != SSA_NAME)
487 var_ann_t ann = var_ann (t);
488 ann->in_v_may_def_list = 0;
491 VEC_truncate (tree, build_v_may_defs, 0);
495 #define FINALIZE_OPBUILD build_vuses
496 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
497 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
498 build_vuses, (I)))
499 #define FINALIZE_FUNC finalize_ssa_vuse_ops
500 #define FINALIZE_ALLOC alloc_vuse
501 #define FINALIZE_FREE free_vuses
502 #define FINALIZE_TYPE struct vuse_optype_d
503 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
504 #define FINALIZE_OPS VUSE_OPS
505 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
506 #define FINALIZE_CORRECT_USE set_virtual_use_link
507 #define FINALIZE_BASE_ZERO 0
508 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
509 #define FINALIZE_BASE_TYPE unsigned
510 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
511 (PTR)->use_var = (VAL); \
512 (PTR)->use_ptr.use = &((PTR)->use_var); \
513 link_imm_use_stmt (&((PTR)->use_ptr), \
514 (VAL), (STMT))
515 #include "tree-ssa-opfinalize.h"
518 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
520 static void
521 finalize_ssa_vuses (tree stmt)
523 unsigned num, num_v_may_defs;
524 unsigned vuse_index;
526 /* Remove superfluous VUSE operands. If the statement already has a
527 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
528 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
529 suppose that variable 'a' is aliased:
531 # VUSE <a_2>
532 # a_3 = V_MAY_DEF <a_2>
533 a = a + 1;
535 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
536 operation. */
538 num = VEC_length (tree, build_vuses);
539 num_v_may_defs = VEC_length (tree, build_v_may_defs);
541 if (num > 0 && num_v_may_defs > 0)
543 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
545 tree vuse;
546 vuse = VEC_index (tree, build_vuses, vuse_index);
547 if (TREE_CODE (vuse) != SSA_NAME)
549 var_ann_t ann = var_ann (vuse);
550 ann->in_vuse_list = 0;
551 if (ann->in_v_may_def_list)
553 VEC_ordered_remove (tree, build_vuses, vuse_index);
554 continue;
557 vuse_index++;
560 else
561 /* Clear out the in_list bits. */
562 for (vuse_index = 0;
563 vuse_index < VEC_length (tree, build_vuses);
564 vuse_index++)
566 tree t = VEC_index (tree, build_vuses, vuse_index);
567 if (TREE_CODE (t) != SSA_NAME)
569 var_ann_t ann = var_ann (t);
570 ann->in_vuse_list = 0;
574 finalize_ssa_vuse_ops (stmt);
575 /* The v_may_def build vector wasn't cleaned up because we needed it. */
576 cleanup_v_may_defs ();
578 /* Free the vuses build vector. */
579 VEC_truncate (tree, build_vuses, 0);
583 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
585 #define FINALIZE_OPBUILD build_v_must_defs
586 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
587 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
588 build_v_must_defs, (I)))
589 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
590 #define FINALIZE_ALLOC alloc_mustdef
591 #define FINALIZE_FREE free_mustdefs
592 #define FINALIZE_TYPE struct mustdef_optype_d
593 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
594 #define FINALIZE_OPS MUSTDEF_OPS
595 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
596 #define FINALIZE_CORRECT_USE set_virtual_use_link
597 #define FINALIZE_BASE_ZERO 0
598 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
599 #define FINALIZE_BASE_TYPE unsigned
600 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
601 (PTR)->def_var = (VAL); \
602 (PTR)->kill_var = (VAL); \
603 (PTR)->use_ptr.use = &((PTR)->kill_var);\
604 link_imm_use_stmt (&((PTR)->use_ptr), \
605 (VAL), (STMT))
606 #include "tree-ssa-opfinalize.h"
609 static void
610 finalize_ssa_v_must_defs (tree stmt)
612 /* In the presence of subvars, there may be more than one V_MUST_DEF per
613 statement (one for each subvar). It is a bit expensive to verify that
614 all must-defs in a statement belong to subvars if there is more than one
615 MUST-def, so we don't do it. Suffice to say, if you reach here without
616 having subvars, and have num >1, you have hit a bug. */
618 finalize_ssa_v_must_def_ops (stmt);
619 VEC_truncate (tree, build_v_must_defs, 0);
623 /* Finalize all the build vectors, fill the new ones into INFO. */
625 static inline void
626 finalize_ssa_stmt_operands (tree stmt)
628 finalize_ssa_defs (stmt);
629 finalize_ssa_uses (stmt);
630 finalize_ssa_v_must_defs (stmt);
631 finalize_ssa_v_may_defs (stmt);
632 finalize_ssa_vuses (stmt);
636 /* Start the process of building up operands vectors in INFO. */
638 static inline void
639 start_ssa_stmt_operands (void)
641 gcc_assert (VEC_length (tree, build_defs) == 0);
642 gcc_assert (VEC_length (tree, build_uses) == 0);
643 gcc_assert (VEC_length (tree, build_vuses) == 0);
644 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
645 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
649 /* Add DEF_P to the list of pointers to operands. */
651 static inline void
652 append_def (tree *def_p)
654 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
658 /* Add USE_P to the list of pointers to operands. */
660 static inline void
661 append_use (tree *use_p)
663 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
667 /* Add a new virtual may def for variable VAR to the build array. */
669 static inline void
670 append_v_may_def (tree var)
672 if (TREE_CODE (var) != SSA_NAME)
674 var_ann_t ann = get_var_ann (var);
676 /* Don't allow duplicate entries. */
677 if (ann->in_v_may_def_list)
678 return;
679 ann->in_v_may_def_list = 1;
682 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
686 /* Add VAR to the list of virtual uses. */
688 static inline void
689 append_vuse (tree var)
692 /* Don't allow duplicate entries. */
693 if (TREE_CODE (var) != SSA_NAME)
695 var_ann_t ann = get_var_ann (var);
697 if (ann->in_vuse_list || ann->in_v_may_def_list)
698 return;
699 ann->in_vuse_list = 1;
702 VEC_safe_push (tree, heap, build_vuses, (tree)var);
706 /* Add VAR to the list of virtual must definitions for INFO. */
708 static inline void
709 append_v_must_def (tree var)
711 unsigned i;
713 /* Don't allow duplicate entries. */
714 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
715 if (var == VEC_index (tree, build_v_must_defs, i))
716 return;
718 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
722 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
723 cache for STMT, if it existed before. When finished, the various build_*
724 operand vectors will have potential operands. in them. */
726 static void
727 parse_ssa_operands (tree stmt)
729 enum tree_code code;
731 code = TREE_CODE (stmt);
732 switch (code)
734 case MODIFY_EXPR:
735 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
736 either only part of LHS is modified or if the RHS might throw,
737 otherwise, use V_MUST_DEF.
739 ??? If it might throw, we should represent somehow that it is killed
740 on the fallthrough path. */
742 tree lhs = TREE_OPERAND (stmt, 0);
743 int lhs_flags = opf_is_def;
745 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
747 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
748 or not the entire LHS is modified; that depends on what's
749 inside the VIEW_CONVERT_EXPR. */
750 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
751 lhs = TREE_OPERAND (lhs, 0);
753 if (TREE_CODE (lhs) != ARRAY_REF
754 && TREE_CODE (lhs) != ARRAY_RANGE_REF
755 && TREE_CODE (lhs) != BIT_FIELD_REF
756 && TREE_CODE (lhs) != REALPART_EXPR
757 && TREE_CODE (lhs) != IMAGPART_EXPR)
758 lhs_flags |= opf_kill_def;
760 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
762 break;
764 case COND_EXPR:
765 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
766 break;
768 case SWITCH_EXPR:
769 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
770 break;
772 case ASM_EXPR:
773 get_asm_expr_operands (stmt);
774 break;
776 case RETURN_EXPR:
777 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
778 break;
780 case GOTO_EXPR:
781 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
782 break;
784 case LABEL_EXPR:
785 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
786 break;
788 /* These nodes contain no variable references. */
789 case BIND_EXPR:
790 case CASE_LABEL_EXPR:
791 case TRY_CATCH_EXPR:
792 case TRY_FINALLY_EXPR:
793 case EH_FILTER_EXPR:
794 case CATCH_EXPR:
795 case RESX_EXPR:
796 break;
798 default:
799 /* Notice that if get_expr_operands tries to use &STMT as the operand
800 pointer (which may only happen for USE operands), we will fail in
801 append_use. This default will handle statements like empty
802 statements, or CALL_EXPRs that may appear on the RHS of a statement
803 or as statements themselves. */
804 get_expr_operands (stmt, &stmt, opf_none);
805 break;
809 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
810 original operands, and if ANN is non-null, appropriate stmt flags are set
811 in the stmt's annotation. If ANN is NULL, this is not considered a "real"
812 stmt, and none of the operands will be entered into their respective
813 immediate uses tables. This is to allow stmts to be processed when they
814 are not actually in the CFG.
816 Note that some fields in old_ops may change to NULL, although none of the
817 memory they originally pointed to will be destroyed. It is appropriate
818 to call free_stmt_operands() on the value returned in old_ops.
820 The rationale for this: Certain optimizations wish to examine the difference
821 between new_ops and old_ops after processing. If a set of operands don't
822 change, new_ops will simply assume the pointer in old_ops, and the old_ops
823 pointer will be set to NULL, indicating no memory needs to be cleared.
824 Usage might appear something like:
826 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
827 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
828 <* compare old_ops_copy and new_ops *>
829 free_ssa_operands (old_ops); */
831 static void
832 build_ssa_operands (tree stmt)
834 stmt_ann_t ann = get_stmt_ann (stmt);
836 /* Initially assume that the statement has no volatile operands, nor
837 makes aliased loads or stores. */
838 if (ann)
840 ann->has_volatile_ops = false;
841 ann->makes_aliased_stores = false;
842 ann->makes_aliased_loads = false;
845 start_ssa_stmt_operands ();
847 parse_ssa_operands (stmt);
848 operand_build_sort_virtual (build_vuses);
849 operand_build_sort_virtual (build_v_may_defs);
850 operand_build_sort_virtual (build_v_must_defs);
852 finalize_ssa_stmt_operands (stmt);
856 /* Free any operands vectors in OPS. */
857 void
858 free_ssa_operands (stmt_operands_p ops)
860 ops->def_ops = NULL;
861 ops->use_ops = NULL;
862 ops->maydef_ops = NULL;
863 ops->mustdef_ops = NULL;
864 ops->vuse_ops = NULL;
868 /* Get the operands of statement STMT. Note that repeated calls to
869 get_stmt_operands for the same statement will do nothing until the
870 statement is marked modified by a call to mark_stmt_modified(). */
872 void
873 update_stmt_operands (tree stmt)
875 stmt_ann_t ann = get_stmt_ann (stmt);
876 /* If get_stmt_operands is called before SSA is initialized, dont
877 do anything. */
878 if (!ssa_operands_active ())
879 return;
880 /* The optimizers cannot handle statements that are nothing but a
881 _DECL. This indicates a bug in the gimplifier. */
882 gcc_assert (!SSA_VAR_P (stmt));
884 gcc_assert (ann->modified);
886 timevar_push (TV_TREE_OPS);
888 build_ssa_operands (stmt);
890 /* Clear the modified bit for STMT. Subsequent calls to
891 get_stmt_operands for this statement will do nothing until the
892 statement is marked modified by a call to mark_stmt_modified(). */
893 ann->modified = 0;
895 timevar_pop (TV_TREE_OPS);
899 /* Copies virtual operands from SRC to DST. */
901 void
902 copy_virtual_operands (tree dest, tree src)
904 tree t;
905 ssa_op_iter iter, old_iter;
906 use_operand_p use_p, u2;
907 def_operand_p def_p, d2;
909 build_ssa_operands (dest);
911 /* Copy all the virtual fields. */
912 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
913 append_vuse (t);
914 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
915 append_v_may_def (t);
916 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
917 append_v_must_def (t);
919 if (VEC_length (tree, build_vuses) == 0
920 && VEC_length (tree, build_v_may_defs) == 0
921 && VEC_length (tree, build_v_must_defs) == 0)
922 return;
924 /* Now commit the virtual operands to this stmt. */
925 finalize_ssa_v_must_defs (dest);
926 finalize_ssa_v_may_defs (dest);
927 finalize_ssa_vuses (dest);
929 /* Finally, set the field to the same values as then originals. */
932 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
933 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
935 gcc_assert (!op_iter_done (&old_iter));
936 SET_USE (use_p, t);
937 t = op_iter_next_tree (&old_iter);
939 gcc_assert (op_iter_done (&old_iter));
941 op_iter_init_maydef (&old_iter, src, &u2, &d2);
942 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
944 gcc_assert (!op_iter_done (&old_iter));
945 SET_USE (use_p, USE_FROM_PTR (u2));
946 SET_DEF (def_p, DEF_FROM_PTR (d2));
947 op_iter_next_maymustdef (&u2, &d2, &old_iter);
949 gcc_assert (op_iter_done (&old_iter));
951 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
952 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
954 gcc_assert (!op_iter_done (&old_iter));
955 SET_USE (use_p, USE_FROM_PTR (u2));
956 SET_DEF (def_p, DEF_FROM_PTR (d2));
957 op_iter_next_maymustdef (&u2, &d2, &old_iter);
959 gcc_assert (op_iter_done (&old_iter));
964 /* Specifically for use in DOM's expression analysis. Given a store, we
965 create an artificial stmt which looks like a load from the store, this can
966 be used to eliminate redundant loads. OLD_OPS are the operands from the
967 store stmt, and NEW_STMT is the new load which represents a load of the
968 values stored. */
970 void
971 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
973 stmt_ann_t ann;
974 tree op;
975 ssa_op_iter iter;
976 use_operand_p use_p;
977 unsigned x;
979 ann = get_stmt_ann (new_stmt);
981 /* process the stmt looking for operands. */
982 start_ssa_stmt_operands ();
983 parse_ssa_operands (new_stmt);
985 for (x = 0; x < VEC_length (tree, build_vuses); x++)
987 tree t = VEC_index (tree, build_vuses, x);
988 if (TREE_CODE (t) != SSA_NAME)
990 var_ann_t ann = var_ann (t);
991 ann->in_vuse_list = 0;
995 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
997 tree t = VEC_index (tree, build_v_may_defs, x);
998 if (TREE_CODE (t) != SSA_NAME)
1000 var_ann_t ann = var_ann (t);
1001 ann->in_v_may_def_list = 0;
1004 /* Remove any virtual operands that were found. */
1005 VEC_truncate (tree, build_v_may_defs, 0);
1006 VEC_truncate (tree, build_v_must_defs, 0);
1007 VEC_truncate (tree, build_vuses, 0);
1009 /* For each VDEF on the original statement, we want to create a
1010 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1011 statement. */
1012 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
1013 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
1014 append_vuse (op);
1016 /* Now build the operands for this new stmt. */
1017 finalize_ssa_stmt_operands (new_stmt);
1019 /* All uses in this fake stmt must not be in the immediate use lists. */
1020 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1021 delink_imm_use (use_p);
1024 void
1025 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
1027 tree op0, op1;
1028 op0 = *exp0;
1029 op1 = *exp1;
1031 /* If the operand cache is active, attempt to preserve the relative positions
1032 of these two operands in their respective immediate use lists. */
1033 if (ssa_operands_active () && op0 != op1)
1035 use_optype_p use0, use1, ptr;
1036 use0 = use1 = NULL;
1037 /* Find the 2 operands in the cache, if they are there. */
1038 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1039 if (USE_OP_PTR (ptr)->use == exp0)
1041 use0 = ptr;
1042 break;
1044 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1045 if (USE_OP_PTR (ptr)->use == exp1)
1047 use1 = ptr;
1048 break;
1050 /* If both uses don't have operand entries, there isn't much we can do
1051 at this point. Presumably we dont need to worry about it. */
1052 if (use0 && use1)
1054 tree *tmp = USE_OP_PTR (use1)->use;
1055 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1056 USE_OP_PTR (use0)->use = tmp;
1060 /* Now swap the data. */
1061 *exp0 = op1;
1062 *exp1 = op0;
1066 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1067 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1068 the operands found. */
1070 static void
1071 get_expr_operands (tree stmt, tree *expr_p, int flags)
1073 enum tree_code code;
1074 enum tree_code_class class;
1075 tree expr = *expr_p;
1076 stmt_ann_t s_ann = stmt_ann (stmt);
1078 if (expr == NULL)
1079 return;
1081 code = TREE_CODE (expr);
1082 class = TREE_CODE_CLASS (code);
1084 switch (code)
1086 case ADDR_EXPR:
1087 /* We could have the address of a component, array member,
1088 etc which has interesting variable references. */
1089 /* Taking the address of a variable does not represent a
1090 reference to it, but the fact that the stmt takes its address will be
1091 of interest to some passes (e.g. alias resolution). */
1092 add_stmt_operand (expr_p, s_ann, 0);
1094 /* If the address is invariant, there may be no interesting variable
1095 references inside. */
1096 if (is_gimple_min_invariant (expr))
1097 return;
1099 /* There should be no VUSEs created, since the referenced objects are
1100 not really accessed. The only operands that we should find here
1101 are ARRAY_REF indices which will always be real operands (GIMPLE
1102 does not allow non-registers as array indices). */
1103 flags |= opf_no_vops;
1105 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1106 return;
1108 case SSA_NAME:
1109 case VAR_DECL:
1110 case PARM_DECL:
1111 case RESULT_DECL:
1112 case CONST_DECL:
1114 subvar_t svars;
1116 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1117 Otherwise, add the variable itself.
1118 Whether it goes to USES or DEFS depends on the operand flags. */
1119 if (var_can_have_subvars (expr)
1120 && (svars = get_subvars_for_var (expr)))
1122 subvar_t sv;
1123 for (sv = svars; sv; sv = sv->next)
1124 add_stmt_operand (&sv->var, s_ann, flags);
1126 else
1128 add_stmt_operand (expr_p, s_ann, flags);
1130 return;
1132 case MISALIGNED_INDIRECT_REF:
1133 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1134 /* fall through */
1136 case ALIGN_INDIRECT_REF:
1137 case INDIRECT_REF:
1138 get_indirect_ref_operands (stmt, expr, flags);
1139 return;
1141 case TARGET_MEM_REF:
1142 get_tmr_operands (stmt, expr, flags);
1143 return;
1145 case ARRAY_REF:
1146 case ARRAY_RANGE_REF:
1147 /* Treat array references as references to the virtual variable
1148 representing the array. The virtual variable for an ARRAY_REF
1149 is the VAR_DECL for the array. */
1151 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1152 according to the value of IS_DEF. Recurse if the LHS of the
1153 ARRAY_REF node is not a regular variable. */
1154 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1155 add_stmt_operand (expr_p, s_ann, flags);
1156 else
1157 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1159 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1160 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1161 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1162 return;
1164 case COMPONENT_REF:
1165 case REALPART_EXPR:
1166 case IMAGPART_EXPR:
1168 tree ref;
1169 unsigned HOST_WIDE_INT offset, size;
1170 /* This component ref becomes an access to all of the subvariables
1171 it can touch, if we can determine that, but *NOT* the real one.
1172 If we can't determine which fields we could touch, the recursion
1173 will eventually get to a variable and add *all* of its subvars, or
1174 whatever is the minimum correct subset. */
1176 ref = okay_component_ref_for_subvars (expr, &offset, &size);
1177 if (ref)
1179 subvar_t svars = get_subvars_for_var (ref);
1180 subvar_t sv;
1181 for (sv = svars; sv; sv = sv->next)
1183 bool exact;
1184 if (overlap_subvar (offset, size, sv, &exact))
1186 int subvar_flags = flags;
1187 if (!exact)
1188 subvar_flags &= ~opf_kill_def;
1189 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1193 else
1194 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1195 flags & ~opf_kill_def);
1197 if (code == COMPONENT_REF)
1199 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1200 s_ann->has_volatile_ops = true;
1201 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1203 return;
1205 case WITH_SIZE_EXPR:
1206 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1207 and an rvalue reference to its second argument. */
1208 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1209 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1210 return;
1212 case CALL_EXPR:
1213 get_call_expr_operands (stmt, expr);
1214 return;
1216 case COND_EXPR:
1217 case VEC_COND_EXPR:
1218 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1219 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1220 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1221 return;
1223 case MODIFY_EXPR:
1225 int subflags;
1226 tree op;
1228 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1230 op = TREE_OPERAND (expr, 0);
1231 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1232 op = TREE_OPERAND (expr, 0);
1233 if (TREE_CODE (op) == ARRAY_REF
1234 || TREE_CODE (op) == ARRAY_RANGE_REF
1235 || TREE_CODE (op) == REALPART_EXPR
1236 || TREE_CODE (op) == IMAGPART_EXPR)
1237 subflags = opf_is_def;
1238 else
1239 subflags = opf_is_def | opf_kill_def;
1241 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1242 return;
1245 case CONSTRUCTOR:
1247 /* General aggregate CONSTRUCTORs have been decomposed, but they
1248 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1249 constructor_elt *ce;
1250 unsigned HOST_WIDE_INT idx;
1252 for (idx = 0;
1253 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1254 idx++)
1255 get_expr_operands (stmt, &ce->value, opf_none);
1257 return;
1260 case TRUTH_NOT_EXPR:
1261 case BIT_FIELD_REF:
1262 case VIEW_CONVERT_EXPR:
1263 do_unary:
1264 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1265 return;
1267 case TRUTH_AND_EXPR:
1268 case TRUTH_OR_EXPR:
1269 case TRUTH_XOR_EXPR:
1270 case COMPOUND_EXPR:
1271 case OBJ_TYPE_REF:
1272 case ASSERT_EXPR:
1273 do_binary:
1275 tree op0 = TREE_OPERAND (expr, 0);
1276 tree op1 = TREE_OPERAND (expr, 1);
1278 /* If it would be profitable to swap the operands, then do so to
1279 canonicalize the statement, enabling better optimization.
1281 By placing canonicalization of such expressions here we
1282 transparently keep statements in canonical form, even
1283 when the statement is modified. */
1284 if (tree_swap_operands_p (op0, op1, false))
1286 /* For relationals we need to swap the operands
1287 and change the code. */
1288 if (code == LT_EXPR
1289 || code == GT_EXPR
1290 || code == LE_EXPR
1291 || code == GE_EXPR)
1293 TREE_SET_CODE (expr, swap_tree_comparison (code));
1294 swap_tree_operands (stmt,
1295 &TREE_OPERAND (expr, 0),
1296 &TREE_OPERAND (expr, 1));
1299 /* For a commutative operator we can just swap the operands. */
1300 else if (commutative_tree_code (code))
1302 swap_tree_operands (stmt,
1303 &TREE_OPERAND (expr, 0),
1304 &TREE_OPERAND (expr, 1));
1308 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1309 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1310 return;
1313 case REALIGN_LOAD_EXPR:
1315 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1316 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1317 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1318 return;
1321 case BLOCK:
1322 case FUNCTION_DECL:
1323 case EXC_PTR_EXPR:
1324 case FILTER_EXPR:
1325 case LABEL_DECL:
1326 /* Expressions that make no memory references. */
1327 return;
1329 default:
1330 if (class == tcc_unary)
1331 goto do_unary;
1332 if (class == tcc_binary || class == tcc_comparison)
1333 goto do_binary;
1334 if (class == tcc_constant || class == tcc_type)
1335 return;
1338 /* If we get here, something has gone wrong. */
1339 #ifdef ENABLE_CHECKING
1340 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1341 debug_tree (expr);
1342 fputs ("\n", stderr);
1343 internal_error ("internal error");
1344 #endif
1345 gcc_unreachable ();
1349 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1351 static void
1352 get_asm_expr_operands (tree stmt)
1354 stmt_ann_t s_ann = stmt_ann (stmt);
1355 int noutputs = list_length (ASM_OUTPUTS (stmt));
1356 const char **oconstraints
1357 = (const char **) alloca ((noutputs) * sizeof (const char *));
1358 int i;
1359 tree link;
1360 const char *constraint;
1361 bool allows_mem, allows_reg, is_inout;
1363 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1365 oconstraints[i] = constraint
1366 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1367 parse_output_constraint (&constraint, i, 0, 0,
1368 &allows_mem, &allows_reg, &is_inout);
1370 /* This should have been split in gimplify_asm_expr. */
1371 gcc_assert (!allows_reg || !is_inout);
1373 /* Memory operands are addressable. Note that STMT needs the
1374 address of this operand. */
1375 if (!allows_reg && allows_mem)
1377 tree t = get_base_address (TREE_VALUE (link));
1378 if (t && DECL_P (t) && s_ann)
1379 add_to_addressable_set (t, &s_ann->addresses_taken);
1382 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1385 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1387 constraint
1388 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1389 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1390 oconstraints, &allows_mem, &allows_reg);
1392 /* Memory operands are addressable. Note that STMT needs the
1393 address of this operand. */
1394 if (!allows_reg && allows_mem)
1396 tree t = get_base_address (TREE_VALUE (link));
1397 if (t && DECL_P (t) && s_ann)
1398 add_to_addressable_set (t, &s_ann->addresses_taken);
1401 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1405 /* Clobber memory for asm ("" : : : "memory"); */
1406 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1407 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1409 unsigned i;
1410 bitmap_iterator bi;
1412 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1413 decided to group them). */
1414 if (global_var)
1415 add_stmt_operand (&global_var, s_ann, opf_is_def);
1416 else
1417 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1419 tree var = referenced_var (i);
1420 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1423 /* Now clobber all addressables. */
1424 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1426 tree var = referenced_var (i);
1428 /* Subvars are explicitly represented in this list, so
1429 we don't need the original to be added to the clobber
1430 ops, but the original *will* be in this list because
1431 we keep the addressability of the original
1432 variable up-to-date so we don't screw up the rest of
1433 the backend. */
1434 if (var_can_have_subvars (var)
1435 && get_subvars_for_var (var) != NULL)
1436 continue;
1438 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1441 break;
1445 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1446 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1448 static void
1449 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1451 tree *pptr = &TREE_OPERAND (expr, 0);
1452 tree ptr = *pptr;
1453 stmt_ann_t s_ann = stmt_ann (stmt);
1455 /* Stores into INDIRECT_REF operands are never killing definitions. */
1456 flags &= ~opf_kill_def;
1458 if (SSA_VAR_P (ptr))
1460 struct ptr_info_def *pi = NULL;
1462 /* If PTR has flow-sensitive points-to information, use it. */
1463 if (TREE_CODE (ptr) == SSA_NAME
1464 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1465 && pi->name_mem_tag)
1467 /* PTR has its own memory tag. Use it. */
1468 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1470 else
1472 /* If PTR is not an SSA_NAME or it doesn't have a name
1473 tag, use its type memory tag. */
1474 var_ann_t v_ann;
1476 /* If we are emitting debugging dumps, display a warning if
1477 PTR is an SSA_NAME with no flow-sensitive alias
1478 information. That means that we may need to compute
1479 aliasing again. */
1480 if (dump_file
1481 && TREE_CODE (ptr) == SSA_NAME
1482 && pi == NULL)
1484 fprintf (dump_file,
1485 "NOTE: no flow-sensitive alias info for ");
1486 print_generic_expr (dump_file, ptr, dump_flags);
1487 fprintf (dump_file, " in ");
1488 print_generic_stmt (dump_file, stmt, dump_flags);
1491 if (TREE_CODE (ptr) == SSA_NAME)
1492 ptr = SSA_NAME_VAR (ptr);
1493 v_ann = var_ann (ptr);
1494 if (v_ann->type_mem_tag)
1495 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1499 /* If a constant is used as a pointer, we can't generate a real
1500 operand for it but we mark the statement volatile to prevent
1501 optimizations from messing things up. */
1502 else if (TREE_CODE (ptr) == INTEGER_CST)
1504 if (s_ann)
1505 s_ann->has_volatile_ops = true;
1506 return;
1509 /* Everything else *should* have been folded elsewhere, but users
1510 are smarter than we in finding ways to write invalid code. We
1511 cannot just assert here. If we were absolutely certain that we
1512 do handle all valid cases, then we could just do nothing here.
1513 That seems optimistic, so attempt to do something logical... */
1514 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1515 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1516 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1518 /* Make sure we know the object is addressable. */
1519 pptr = &TREE_OPERAND (ptr, 0);
1520 add_stmt_operand (pptr, s_ann, 0);
1522 /* Mark the object itself with a VUSE. */
1523 pptr = &TREE_OPERAND (*pptr, 0);
1524 get_expr_operands (stmt, pptr, flags);
1525 return;
1528 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1529 else
1530 gcc_unreachable ();
1532 /* Add a USE operand for the base pointer. */
1533 get_expr_operands (stmt, pptr, opf_none);
1536 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1538 static void
1539 get_tmr_operands (tree stmt, tree expr, int flags)
1541 tree tag = TMR_TAG (expr);
1543 /* First record the real operands. */
1544 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1545 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1547 /* MEM_REFs should never be killing. */
1548 flags &= ~opf_kill_def;
1550 if (TMR_SYMBOL (expr))
1552 stmt_ann_t ann = stmt_ann (stmt);
1553 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1556 if (tag)
1557 get_expr_operands (stmt, &tag, flags);
1558 else
1559 /* Something weird, so ensure that we will be careful. */
1560 stmt_ann (stmt)->has_volatile_ops = true;
1563 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1565 static void
1566 get_call_expr_operands (tree stmt, tree expr)
1568 tree op;
1569 int call_flags = call_expr_flags (expr);
1571 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1572 operands for all the symbols that have been found to be
1573 call-clobbered.
1575 Note that if aliases have not been computed, the global effects
1576 of calls will not be included in the SSA web. This is fine
1577 because no optimizer should run before aliases have been
1578 computed. By not bothering with virtual operands for CALL_EXPRs
1579 we avoid adding superfluous virtual operands, which can be a
1580 significant compile time sink (See PR 15855). */
1581 if (aliases_computed_p
1582 && !bitmap_empty_p (call_clobbered_vars)
1583 && !(call_flags & ECF_NOVOPS))
1585 /* A 'pure' or a 'const' function never call-clobbers anything.
1586 A 'noreturn' function might, but since we don't return anyway
1587 there is no point in recording that. */
1588 if (TREE_SIDE_EFFECTS (expr)
1589 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1590 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1591 else if (!(call_flags & ECF_CONST))
1592 add_call_read_ops (stmt);
1595 /* Find uses in the called function. */
1596 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1598 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1599 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1601 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1606 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1607 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1608 the statement's real operands, otherwise it is added to virtual
1609 operands. */
1611 static void
1612 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1614 bool is_real_op;
1615 tree var, sym;
1616 var_ann_t v_ann;
1618 var = *var_p;
1619 STRIP_NOPS (var);
1621 /* If the operand is an ADDR_EXPR, add its operand to the list of
1622 variables that have had their address taken in this statement. */
1623 if (TREE_CODE (var) == ADDR_EXPR && s_ann)
1625 add_to_addressable_set (TREE_OPERAND (var, 0), &s_ann->addresses_taken);
1626 return;
1629 /* If the original variable is not a scalar, it will be added to the list
1630 of virtual operands. In that case, use its base symbol as the virtual
1631 variable representing it. */
1632 is_real_op = is_gimple_reg (var);
1633 if (!is_real_op && !DECL_P (var))
1634 var = get_virtual_var (var);
1636 /* If VAR is not a variable that we care to optimize, do nothing. */
1637 if (var == NULL_TREE || !SSA_VAR_P (var))
1638 return;
1640 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1641 v_ann = var_ann (sym);
1643 /* Mark statements with volatile operands. Optimizers should back
1644 off from statements having volatile operands. */
1645 if (TREE_THIS_VOLATILE (sym) && s_ann)
1646 s_ann->has_volatile_ops = true;
1648 /* If the variable cannot be modified and this is a V_MAY_DEF change
1649 it into a VUSE. This happens when read-only variables are marked
1650 call-clobbered and/or aliased to writable variables. So we only
1651 check that this only happens on non-specific stores.
1653 Note that if this is a specific store, i.e. associated with a
1654 modify_expr, then we can't suppress the V_DEF, lest we run into
1655 validation problems.
1657 This can happen when programs cast away const, leaving us with a
1658 store to read-only memory. If the statement is actually executed
1659 at runtime, then the program is ill formed. If the statement is
1660 not executed then all is well. At the very least, we cannot ICE. */
1661 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1663 gcc_assert (!is_real_op);
1664 flags &= ~(opf_is_def | opf_kill_def);
1667 if (is_real_op)
1669 /* The variable is a GIMPLE register. Add it to real operands. */
1670 if (flags & opf_is_def)
1671 append_def (var_p);
1672 else
1673 append_use (var_p);
1675 else
1677 varray_type aliases;
1679 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1680 virtual operands, unless the caller has specifically requested
1681 not to add virtual operands (used when adding operands inside an
1682 ADDR_EXPR expression). */
1683 if (flags & opf_no_vops)
1684 return;
1686 aliases = v_ann->may_aliases;
1688 if (aliases == NULL)
1690 /* The variable is not aliased or it is an alias tag. */
1691 if (flags & opf_is_def)
1693 if (flags & opf_kill_def)
1695 /* Only regular variables or struct fields may get a
1696 V_MUST_DEF operand. */
1697 gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
1698 || v_ann->mem_tag_kind == STRUCT_FIELD);
1699 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1700 variable definitions. */
1701 append_v_must_def (var);
1703 else
1705 /* Add a V_MAY_DEF for call-clobbered variables and
1706 memory tags. */
1707 append_v_may_def (var);
1710 else
1712 append_vuse (var);
1713 if (s_ann && v_ann->is_alias_tag)
1714 s_ann->makes_aliased_loads = 1;
1717 else
1719 size_t i;
1721 /* The variable is aliased. Add its aliases to the virtual
1722 operands. */
1723 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1725 if (flags & opf_is_def)
1727 /* If the variable is also an alias tag, add a virtual
1728 operand for it, otherwise we will miss representing
1729 references to the members of the variable's alias set.
1730 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1731 if (v_ann->is_alias_tag)
1732 append_v_may_def (var);
1734 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1735 append_v_may_def (VARRAY_TREE (aliases, i));
1737 if (s_ann)
1738 s_ann->makes_aliased_stores = 1;
1740 else
1742 /* Similarly, append a virtual uses for VAR itself, when
1743 it is an alias tag. */
1744 if (v_ann->is_alias_tag)
1745 append_vuse (var);
1747 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1748 append_vuse (VARRAY_TREE (aliases, i));
1750 if (s_ann)
1751 s_ann->makes_aliased_loads = 1;
1758 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1759 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1760 a single variable whose address has been taken or any other valid
1761 GIMPLE memory reference (structure reference, array, etc). If the
1762 base address of REF is a decl that has sub-variables, also add all
1763 of its sub-variables. */
1765 void
1766 add_to_addressable_set (tree ref, bitmap *addresses_taken)
1768 tree var;
1769 subvar_t svars;
1771 gcc_assert (addresses_taken);
1773 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1774 as the only thing we take the address of. If VAR is a structure,
1775 taking the address of a field means that the whole structure may
1776 be referenced using pointer arithmetic. See PR 21407 and the
1777 ensuing mailing list discussion. */
1778 var = get_base_address (ref);
1779 if (var && SSA_VAR_P (var))
1781 if (*addresses_taken == NULL)
1782 *addresses_taken = BITMAP_GGC_ALLOC ();
1784 if (var_can_have_subvars (var)
1785 && (svars = get_subvars_for_var (var)))
1787 subvar_t sv;
1788 for (sv = svars; sv; sv = sv->next)
1790 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1791 TREE_ADDRESSABLE (sv->var) = 1;
1794 else
1796 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1797 TREE_ADDRESSABLE (var) = 1;
1803 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1804 clobbered variables in the function. */
1806 static void
1807 add_call_clobber_ops (tree stmt, tree callee)
1809 unsigned u;
1810 tree t;
1811 bitmap_iterator bi;
1812 stmt_ann_t s_ann = stmt_ann (stmt);
1813 struct stmt_ann_d empty_ann;
1814 bitmap not_read_b, not_written_b;
1816 /* Functions that are not const, pure or never return may clobber
1817 call-clobbered variables. */
1818 if (s_ann)
1819 s_ann->makes_clobbering_call = true;
1821 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1822 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1823 if (global_var)
1825 add_stmt_operand (&global_var, s_ann, opf_is_def);
1826 return;
1829 /* FIXME - if we have better information from the static vars
1830 analysis, we need to make the cache call site specific. This way
1831 we can have the performance benefits even if we are doing good
1832 optimization. */
1834 /* Get info for local and module level statics. There is a bit
1835 set for each static if the call being processed does not read
1836 or write that variable. */
1838 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1839 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1841 /* If cache is valid, copy the elements into the build vectors. */
1842 if (ssa_call_clobbered_cache_valid
1843 && (!not_read_b || bitmap_empty_p (not_read_b))
1844 && (!not_written_b || bitmap_empty_p (not_written_b)))
1846 for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++)
1848 t = VEC_index (tree, clobbered_vuses, u);
1849 gcc_assert (TREE_CODE (t) != SSA_NAME);
1850 var_ann (t)->in_vuse_list = 1;
1851 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1853 for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++)
1855 t = VEC_index (tree, clobbered_v_may_defs, u);
1856 gcc_assert (TREE_CODE (t) != SSA_NAME);
1857 var_ann (t)->in_v_may_def_list = 1;
1858 VEC_safe_push (tree, heap, build_v_may_defs, (tree)t);
1860 if (s_ann)
1862 s_ann->makes_aliased_loads = clobbered_aliased_loads;
1863 s_ann->makes_aliased_stores = clobbered_aliased_stores;
1865 return;
1868 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1870 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1871 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1873 tree var = referenced_var (u);
1874 if (unmodifiable_var_p (var))
1875 add_stmt_operand (&var, &empty_ann, opf_none);
1876 else
1878 bool not_read
1879 = not_read_b ? bitmap_bit_p (not_read_b, u) : false;
1880 bool not_written
1881 = not_written_b ? bitmap_bit_p (not_written_b, u) : false;
1883 if ((TREE_READONLY (var)
1884 && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
1885 || not_written)
1887 if (!not_read)
1888 add_stmt_operand (&var, &empty_ann, opf_none);
1890 else
1891 add_stmt_operand (&var, &empty_ann, opf_is_def);
1895 if ((!not_read_b || bitmap_empty_p (not_read_b))
1896 && (!not_written_b || bitmap_empty_p (not_written_b)))
1898 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
1899 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
1901 /* Set the flags for a stmt's annotation. */
1902 if (s_ann)
1904 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1905 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
1908 /* Prepare empty cache vectors. */
1909 VEC_truncate (tree, clobbered_vuses, 0);
1910 VEC_truncate (tree, clobbered_v_may_defs, 0);
1912 /* Now fill the clobbered cache with the values that have been found. */
1913 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1914 VEC_safe_push (tree, heap, clobbered_vuses,
1915 VEC_index (tree, build_vuses, u));
1917 gcc_assert (VEC_length (tree, build_vuses)
1918 == VEC_length (tree, clobbered_vuses));
1920 for (u = 0; u < VEC_length (tree, build_v_may_defs); u++)
1921 VEC_safe_push (tree, heap, clobbered_v_may_defs,
1922 VEC_index (tree, build_v_may_defs, u));
1924 gcc_assert (VEC_length (tree, build_v_may_defs)
1925 == VEC_length (tree, clobbered_v_may_defs));
1927 ssa_call_clobbered_cache_valid = true;
1932 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1933 function. */
1935 static void
1936 add_call_read_ops (tree stmt)
1938 unsigned u;
1939 tree t;
1940 bitmap_iterator bi;
1941 stmt_ann_t s_ann = stmt_ann (stmt);
1942 struct stmt_ann_d empty_ann;
1944 /* if the function is not pure, it may reference memory. Add
1945 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1946 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1947 if (global_var)
1949 add_stmt_operand (&global_var, s_ann, opf_none);
1950 return;
1953 /* If cache is valid, copy the elements into the build vector. */
1954 if (ssa_ro_call_cache_valid)
1956 for (u = 0; u < VEC_length (tree, ro_call_vuses); u++)
1958 t = VEC_index (tree, ro_call_vuses, u);
1959 gcc_assert (TREE_CODE (t) != SSA_NAME);
1960 var_ann (t)->in_vuse_list = 1;
1961 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1963 if (s_ann)
1964 s_ann->makes_aliased_loads = ro_call_aliased_loads;
1965 return;
1968 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1970 /* Add a VUSE for each call-clobbered variable. */
1971 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1973 tree var = referenced_var (u);
1974 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
1977 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
1978 if (s_ann)
1979 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1981 /* Prepare empty cache vectors. */
1982 VEC_truncate (tree, ro_call_vuses, 0);
1984 /* Now fill the clobbered cache with the values that have been found. */
1985 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1986 VEC_safe_push (tree, heap, ro_call_vuses,
1987 VEC_index (tree, build_vuses, u));
1989 gcc_assert (VEC_length (tree, build_vuses)
1990 == VEC_length (tree, ro_call_vuses));
1992 ssa_ro_call_cache_valid = true;
1996 /* Scan the immediate_use list for VAR making sure its linked properly.
1997 return RTUE iof there is a problem. */
1999 bool
2000 verify_imm_links (FILE *f, tree var)
2002 use_operand_p ptr, prev, list;
2003 int count;
2005 gcc_assert (TREE_CODE (var) == SSA_NAME);
2007 list = &(SSA_NAME_IMM_USE_NODE (var));
2008 gcc_assert (list->use == NULL);
2010 if (list->prev == NULL)
2012 gcc_assert (list->next == NULL);
2013 return false;
2016 prev = list;
2017 count = 0;
2018 for (ptr = list->next; ptr != list; )
2020 if (prev != ptr->prev)
2021 goto error;
2023 if (ptr->use == NULL)
2024 goto error; /* 2 roots, or SAFE guard node. */
2025 else if (*(ptr->use) != var)
2026 goto error;
2028 prev = ptr;
2029 ptr = ptr->next;
2030 /* Avoid infinite loops. */
2031 if (count++ > 30000)
2032 goto error;
2035 /* Verify list in the other direction. */
2036 prev = list;
2037 for (ptr = list->prev; ptr != list; )
2039 if (prev != ptr->next)
2040 goto error;
2041 prev = ptr;
2042 ptr = ptr->prev;
2043 if (count-- < 0)
2044 goto error;
2047 if (count != 0)
2048 goto error;
2050 return false;
2052 error:
2053 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2055 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2056 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2058 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2059 (void *)ptr->use);
2060 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2061 fprintf(f, "\n");
2062 return true;
2066 /* Dump all the immediate uses to FILE. */
2068 void
2069 dump_immediate_uses_for (FILE *file, tree var)
2071 imm_use_iterator iter;
2072 use_operand_p use_p;
2074 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2076 print_generic_expr (file, var, TDF_SLIM);
2077 fprintf (file, " : -->");
2078 if (has_zero_uses (var))
2079 fprintf (file, " no uses.\n");
2080 else
2081 if (has_single_use (var))
2082 fprintf (file, " single use.\n");
2083 else
2084 fprintf (file, "%d uses.\n", num_imm_uses (var));
2086 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2088 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2089 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2090 else
2091 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2093 fprintf(file, "\n");
2096 /* Dump all the immediate uses to FILE. */
2098 void
2099 dump_immediate_uses (FILE *file)
2101 tree var;
2102 unsigned int x;
2104 fprintf (file, "Immediate_uses: \n\n");
2105 for (x = 1; x < num_ssa_names; x++)
2107 var = ssa_name(x);
2108 if (!var)
2109 continue;
2110 dump_immediate_uses_for (file, var);
2115 /* Dump def-use edges on stderr. */
2117 void
2118 debug_immediate_uses (void)
2120 dump_immediate_uses (stderr);
2123 /* Dump def-use edges on stderr. */
2125 void
2126 debug_immediate_uses_for (tree var)
2128 dump_immediate_uses_for (stderr, var);
2130 #include "gt-tree-ssa-operands.h"