2005-04-29 Jim Tison <jtison@us.ibm.com>
[official-gcc.git] / gcc / tree-ssa-operands.c
bloba010b3040ef6001ddb66c04d65ba3c733b4a682c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "errors.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
33 #include "ggc.h"
34 #include "timevar.h"
36 #include "langhooks.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
85 #define opf_none 0
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
102 /* Array for building all the def operands. */
103 static GTY (()) varray_type build_defs;
105 /* Array for building all the use operands. */
106 static GTY (()) varray_type build_uses;
108 /* Array for building all the v_may_def operands. */
109 static GTY (()) varray_type build_v_may_defs;
111 /* Array for building all the vuse operands. */
112 static GTY (()) varray_type build_vuses;
114 /* Array for building all the v_must_def operands. */
115 static GTY (()) varray_type build_v_must_defs;
117 /* True if the operands for call clobbered vars are cached and valid. */
118 bool ssa_call_clobbered_cache_valid;
119 bool ssa_ro_call_cache_valid;
121 /* These arrays are the cached operand vectors for call clobbered calls. */
122 static GTY (()) varray_type clobbered_v_may_defs;
123 static GTY (()) varray_type clobbered_vuses;
124 static GTY (()) varray_type ro_call_vuses;
125 static bool clobbered_aliased_loads;
126 static bool clobbered_aliased_stores;
127 static bool ro_call_aliased_loads;
128 static stmt_operands_p parse_old_ops = NULL;
130 def_operand_p NULL_DEF_OPERAND_P = { NULL };
132 static void note_addressable (tree, stmt_ann_t);
133 static void get_expr_operands (tree, tree *, int);
134 static void get_asm_expr_operands (tree);
135 static void get_indirect_ref_operands (tree, tree, int);
136 static void get_call_expr_operands (tree, tree);
137 static inline void append_def (tree *);
138 static inline void append_use (tree *);
139 static void append_v_may_def (tree);
140 static void append_v_must_def (tree);
141 static void add_call_clobber_ops (tree);
142 static void add_call_read_ops (tree);
143 static void add_stmt_operand (tree *, stmt_ann_t, int);
145 /* Return a vector of contiguous memory for NUM def operands. */
147 static inline def_optype
148 allocate_def_optype (unsigned num)
150 def_optype def_ops;
151 unsigned size;
152 size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
153 def_ops = ggc_alloc (size);
154 def_ops->num_defs = num;
155 return def_ops;
159 /* Return a vector of contiguous memory for NUM use operands. */
161 static inline use_optype
162 allocate_use_optype (unsigned num)
164 use_optype use_ops;
165 unsigned size;
166 size = sizeof (struct use_optype_d) + sizeof (use_operand_type_t) * (num - 1);
167 use_ops = ggc_alloc (size);
168 use_ops->num_uses = num;
169 return use_ops;
173 /* Return a vector of contiguous memory for NUM v_may_def operands. */
175 static inline v_may_def_optype
176 allocate_v_may_def_optype (unsigned num)
178 v_may_def_optype v_may_def_ops;
179 unsigned size;
180 size = sizeof (struct v_may_def_optype_d)
181 + sizeof (v_def_use_operand_type_t) * (num - 1);
182 v_may_def_ops = ggc_alloc (size);
183 v_may_def_ops->num_v_may_defs = num;
184 return v_may_def_ops;
188 /* Return a vector of contiguous memory for NUM v_use operands. */
190 static inline vuse_optype
191 allocate_vuse_optype (unsigned num)
193 vuse_optype vuse_ops;
194 unsigned size;
195 size = sizeof (struct vuse_optype_d)
196 + sizeof (vuse_operand_type_t) * (num - 1);
197 vuse_ops = ggc_alloc (size);
198 vuse_ops->num_vuses = num;
199 return vuse_ops;
203 /* Return a vector of contiguous memory for NUM v_must_def operands. */
205 static inline v_must_def_optype
206 allocate_v_must_def_optype (unsigned num)
208 v_must_def_optype v_must_def_ops;
209 unsigned size;
210 size = sizeof (struct v_must_def_optype_d) + sizeof (v_def_use_operand_type_t) * (num - 1);
211 v_must_def_ops = ggc_alloc (size);
212 v_must_def_ops->num_v_must_defs = num;
213 return v_must_def_ops;
217 /* Free memory for USES. */
219 static inline void
220 free_uses (use_optype *uses)
222 if (*uses)
224 unsigned int x;
225 use_optype use = *uses;
226 for (x = 0; x < use->num_uses; x++)
227 delink_imm_use (&(use->uses[x]));
228 ggc_free (*uses);
229 *uses = NULL;
234 /* Free memory for DEFS. */
236 static inline void
237 free_defs (def_optype *defs)
239 if (*defs)
241 ggc_free (*defs);
242 *defs = NULL;
247 /* Free memory for VUSES. */
249 static inline void
250 free_vuses (vuse_optype *vuses)
252 if (*vuses)
254 unsigned int x;
255 vuse_optype vuse = *vuses;
256 for (x = 0; x < vuse->num_vuses; x++)
257 delink_imm_use (&(vuse->vuses[x].imm_use));
258 ggc_free (*vuses);
259 *vuses = NULL;
264 /* Free memory for V_MAY_DEFS. */
266 static inline void
267 free_v_may_defs (v_may_def_optype *v_may_defs)
269 if (*v_may_defs)
271 unsigned int x;
272 v_may_def_optype v_may_def = *v_may_defs;
273 for (x = 0; x < v_may_def->num_v_may_defs; x++)
274 delink_imm_use (&(v_may_def->v_may_defs[x].imm_use));
275 ggc_free (*v_may_defs);
276 *v_may_defs = NULL;
281 /* Free memory for V_MUST_DEFS. */
283 static inline void
284 free_v_must_defs (v_must_def_optype *v_must_defs)
286 if (*v_must_defs)
288 unsigned int x;
289 v_must_def_optype v_must_def = *v_must_defs;
290 for (x = 0; x < v_must_def->num_v_must_defs; x++)
291 delink_imm_use (&(v_must_def->v_must_defs[x].imm_use));
292 ggc_free (*v_must_defs);
293 *v_must_defs = NULL;
298 /* Initialize the operand cache routines. */
300 void
301 init_ssa_operands (void)
303 VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
304 VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
305 VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
306 VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
307 VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
311 /* Dispose of anything required by the operand routines. */
313 void
314 fini_ssa_operands (void)
316 ggc_free (build_defs);
317 ggc_free (build_uses);
318 ggc_free (build_v_may_defs);
319 ggc_free (build_vuses);
320 ggc_free (build_v_must_defs);
321 build_defs = NULL;
322 build_uses = NULL;
323 build_v_may_defs = NULL;
324 build_vuses = NULL;
325 build_v_must_defs = NULL;
326 if (clobbered_v_may_defs)
328 ggc_free (clobbered_v_may_defs);
329 ggc_free (clobbered_vuses);
330 clobbered_v_may_defs = NULL;
331 clobbered_vuses = NULL;
333 if (ro_call_vuses)
335 ggc_free (ro_call_vuses);
336 ro_call_vuses = NULL;
340 /* Initialize V_USES index INDEX to VAL for STMT. If OLD is present, preserve
341 the position of the may-def in the immediate_use list. */
343 static inline void
344 initialize_vuse_operand (vuse_optype vuses, unsigned int index, tree val,
345 tree stmt, ssa_imm_use_t *old)
347 vuse_operand_type_t *ptr;
348 ptr = &(vuses->vuses[index]);
349 ptr->use = val;
350 ptr->imm_use.use = &(ptr->use);
351 if (old)
352 relink_imm_use_stmt (&(ptr->imm_use), old, stmt);
353 else
354 link_imm_use_stmt (&(ptr->imm_use), ptr->use, stmt);
358 /* Initialize V_MAY_DEF_OPS index X to be DEF = MAY_DEF <USE> for STMT. If
359 OLD is present, preserve the position of the may-def in the immediate_use
360 list. */
362 static inline void
363 initialize_v_may_def_operand (v_may_def_optype v_may_def_ops, unsigned int x,
364 tree def, tree use, tree stmt, ssa_imm_use_t *old)
366 v_def_use_operand_type_t *ptr;
367 ptr = &(v_may_def_ops->v_may_defs[x]);
368 ptr->def = def;
369 ptr->use = use;
370 ptr->imm_use.use = &(ptr->use);
371 if (old)
372 relink_imm_use_stmt (&(ptr->imm_use), old, stmt);
373 else
374 link_imm_use_stmt (&(ptr->imm_use), ptr->use, stmt);
378 /* Initialize V_MUST_DEF_OPS index X to be DEF = MUST_DEF <USE> for STMT. If
379 OLD is present, preserve the position of the may-def in the immediate_use
380 list. */
382 static inline void
383 initialize_v_must_def_operand (v_must_def_optype v_must_def_ops, unsigned int x,
384 tree def, tree use, tree stmt, ssa_imm_use_t *old)
386 v_def_use_operand_type_t *ptr;
387 ptr = &(v_must_def_ops->v_must_defs[x]);
388 ptr->def = def;
389 ptr->use = use;
390 ptr->imm_use.use = &(ptr->use);
391 if (old)
392 relink_imm_use_stmt (&(ptr->imm_use), old, stmt);
393 else
394 link_imm_use_stmt (&(ptr->imm_use), ptr->use, stmt);
397 /* All the finalize_ssa_* routines do the work required to turn the build_
398 VARRAY into an operand_vector of the appropriate type. The original vector,
399 if any, is passed in for comparison and virtual SSA_NAME reuse. If the
400 old vector is reused, the pointer passed in is set to NULL so that
401 the memory is not freed when the old operands are freed. */
403 /* Return a new def operand vector for STMT, comparing to OLD_OPS_P. */
405 static def_optype
406 finalize_ssa_defs (def_optype *old_ops_p, tree stmt)
408 unsigned num, x;
409 def_optype def_ops, old_ops;
410 bool build_diff;
412 num = VARRAY_ACTIVE_SIZE (build_defs);
413 if (num == 0)
414 return NULL;
416 /* There should only be a single real definition per assignment. */
417 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
419 old_ops = *old_ops_p;
421 /* Compare old vector and new array. */
422 build_diff = true;
423 if (stmt && old_ops && old_ops->num_defs == num)
425 build_diff = false;
426 for (x = 0; x < num; x++)
427 if (old_ops->defs[x].def != VARRAY_TREE_PTR (build_defs, x))
429 build_diff = true;
430 break;
434 if (!build_diff)
436 def_ops = old_ops;
437 *old_ops_p = NULL;
439 else
441 def_ops = allocate_def_optype (num);
442 for (x = 0; x < num ; x++)
443 def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
446 VARRAY_POP_ALL (build_defs);
448 return def_ops;
452 /* Make sure PTR is inn the correct immediate use list. Since uses are simply
453 pointers into the stmt TREE, there is no way of telling if anyone has
454 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
455 THe contents are different, but the the pointer is still the same. This
456 routine will check to make sure PTR is in the correct list, and if it isn't
457 put it in the correct list. We cannot simply check the previous node
458 because all nodes in the same stmt might have be changed. */
460 static inline void
461 correct_use_link (ssa_imm_use_t *ptr, tree stmt)
463 ssa_imm_use_t *prev;
464 tree root;
466 /* Fold_stmt () may have changed the stmt pointers. */
467 if (ptr->stmt != stmt)
468 ptr->stmt = stmt;
470 prev = ptr->prev;
471 if (prev)
473 bool stmt_mod = true;
474 /* Find the first element which isn't a SAFE iterator, is in a different
475 stmt, and is not a a modified stmt, That node is in the correct list,
476 see if we are too. */
478 while (stmt_mod)
480 while (prev->stmt == stmt || prev->stmt == NULL)
481 prev = prev->prev;
482 if (prev->use == NULL)
483 stmt_mod = false;
484 else
485 if ((stmt_mod = stmt_modified_p (prev->stmt)))
486 prev = prev->prev;
489 /* Get the ssa_name of the list the node is in. */
490 if (prev->use == NULL)
491 root = prev->stmt;
492 else
493 root = *(prev->use);
494 /* If it's the right list, simply return. */
495 if (root == *(ptr->use))
496 return;
498 /* Its in the wrong list if we reach here. */
499 delink_imm_use (ptr);
500 link_imm_use (ptr, *(ptr->use));
504 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
506 static use_optype
507 finalize_ssa_uses (use_optype *old_ops_p, tree stmt)
509 unsigned num, x, num_old, i;
510 use_optype use_ops, old_ops;
511 bool build_diff;
513 num = VARRAY_ACTIVE_SIZE (build_uses);
514 if (num == 0)
515 return NULL;
517 #ifdef ENABLE_CHECKING
519 unsigned x;
520 /* If the pointer to the operand is the statement itself, something is
521 wrong. It means that we are pointing to a local variable. */
522 for (x = 0; x < num; x++)
523 gcc_assert (*(VARRAY_TREE_PTR (build_uses, x)) != stmt);
525 #endif
526 old_ops = *old_ops_p;
527 num_old = ((stmt && old_ops) ? old_ops->num_uses : 0);
529 /* Check if the old vector and the new array are the same. */
530 build_diff = true;
531 if (stmt && old_ops && num_old == num)
533 build_diff = false;
534 for (x = 0; x < num; x++)
536 tree *var_p = VARRAY_TREE_PTR (build_uses, x);
537 tree *node = old_ops->uses[x].use;
538 /* Check the pointer values to see if they are the same. */
539 if (node != var_p)
541 build_diff = true;
542 break;
547 if (!build_diff)
549 use_ops = old_ops;
550 *old_ops_p = NULL;
551 for (i = 0; i < num_old; i++)
552 correct_use_link (&(use_ops->uses[i]), stmt);
554 else
556 use_ops = allocate_use_optype (num);
557 for (x = 0; x < num ; x++)
559 tree *var = VARRAY_TREE_PTR (build_uses, x);
560 use_ops->uses[x].use = var;
561 for (i = 0; i < num_old; i++)
563 ssa_imm_use_t *ptr = &(old_ops->uses[i]);
564 if (ptr->use == var)
566 relink_imm_use_stmt (&(use_ops->uses[x]), ptr, stmt);
567 correct_use_link (&(use_ops->uses[x]), stmt);
568 break;
571 if (i == num_old)
572 link_imm_use_stmt (&(use_ops->uses[x]), *var, stmt);
575 VARRAY_POP_ALL (build_uses);
577 return use_ops;
581 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
583 static v_may_def_optype
584 finalize_ssa_v_may_defs (v_may_def_optype *old_ops_p, tree stmt)
586 unsigned num, x, i, old_num;
587 v_may_def_optype v_may_def_ops, old_ops;
588 tree result, var;
589 bool build_diff;
591 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
592 if (num == 0)
593 return NULL;
595 old_ops = *old_ops_p;
597 /* Check if the old vector and the new array are the same. */
598 build_diff = true;
599 if (stmt && old_ops && old_ops->num_v_may_defs == num)
601 old_num = num;
602 build_diff = false;
603 for (x = 0; x < num; x++)
605 var = old_ops->v_may_defs[x].def;
606 if (TREE_CODE (var) == SSA_NAME)
607 var = SSA_NAME_VAR (var);
608 if (var != VARRAY_TREE (build_v_may_defs, x))
610 build_diff = true;
611 break;
615 else
616 old_num = (old_ops ? old_ops->num_v_may_defs : 0);
618 if (!build_diff)
620 v_may_def_ops = old_ops;
621 *old_ops_p = NULL;
622 for (x = 0; x < num; x++)
623 correct_use_link (&(v_may_def_ops->v_may_defs[x].imm_use), stmt);
625 else
627 v_may_def_ops = allocate_v_may_def_optype (num);
628 for (x = 0; x < num; x++)
630 var = VARRAY_TREE (build_v_may_defs, x);
631 /* Look for VAR in the old operands vector. */
632 for (i = 0; i < old_num; i++)
634 result = old_ops->v_may_defs[i].def;
635 if (TREE_CODE (result) == SSA_NAME)
636 result = SSA_NAME_VAR (result);
637 if (result == var)
639 initialize_v_may_def_operand (v_may_def_ops, x,
640 old_ops->v_may_defs[i].def,
641 old_ops->v_may_defs[i].use,
642 stmt,
643 &(old_ops->v_may_defs[i].imm_use));
644 break;
647 if (i == old_num)
649 initialize_v_may_def_operand (v_may_def_ops, x, var, var, stmt,
650 NULL);
655 /* Empty the V_MAY_DEF build vector after VUSES have been processed. */
657 return v_may_def_ops;
661 /* Clear the in_list bits and empty the build array for v_may_defs. */
663 static inline void
664 cleanup_v_may_defs (void)
666 unsigned x, num;
667 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
669 for (x = 0; x < num; x++)
671 tree t = VARRAY_TREE (build_v_may_defs, x);
672 var_ann_t ann = var_ann (t);
673 ann->in_v_may_def_list = 0;
675 VARRAY_POP_ALL (build_v_may_defs);
678 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
680 static vuse_optype
681 finalize_ssa_vuses (vuse_optype *old_ops_p, tree stmt)
683 unsigned num, x, i, num_v_may_defs, old_num;
684 vuse_optype vuse_ops, old_ops;
685 bool build_diff;
687 num = VARRAY_ACTIVE_SIZE (build_vuses);
688 if (num == 0)
690 cleanup_v_may_defs ();
691 return NULL;
694 /* Remove superfluous VUSE operands. If the statement already has a
695 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
696 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
697 suppose that variable 'a' is aliased:
699 # VUSE <a_2>
700 # a_3 = V_MAY_DEF <a_2>
701 a = a + 1;
703 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
704 operation. */
706 num_v_may_defs = VARRAY_ACTIVE_SIZE (build_v_may_defs);
708 if (num_v_may_defs > 0)
710 size_t i;
711 tree vuse;
712 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
714 vuse = VARRAY_TREE (build_vuses, i);
715 if (TREE_CODE (vuse) != SSA_NAME)
717 var_ann_t ann = var_ann (vuse);
718 ann->in_vuse_list = 0;
719 if (ann->in_v_may_def_list)
721 /* If we found a useless VUSE operand, remove it from the
722 operand array by replacing it with the last active element
723 in the operand array (unless the useless VUSE was the
724 last operand, in which case we simply remove it. */
725 if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
727 VARRAY_TREE (build_vuses, i)
728 = VARRAY_TREE (build_vuses,
729 VARRAY_ACTIVE_SIZE (build_vuses) - 1);
731 VARRAY_POP (build_vuses);
733 /* We want to rescan the element at this index, unless
734 this was the last element, in which case the loop
735 terminates. */
736 i--;
741 else
742 /* Clear out the in_list bits. */
743 for (x = 0; x < num; x++)
745 tree t = VARRAY_TREE (build_vuses, x);
746 if (TREE_CODE (t) != SSA_NAME)
748 var_ann_t ann = var_ann (t);
749 ann->in_vuse_list = 0;
754 num = VARRAY_ACTIVE_SIZE (build_vuses);
755 /* We could have reduced the size to zero now, however. */
756 if (num == 0)
758 cleanup_v_may_defs ();
759 return NULL;
762 old_ops = *old_ops_p;
764 /* Determine whether vuses is the same as the old vector. */
765 build_diff = true;
766 if (stmt && old_ops && old_ops->num_vuses == num)
768 old_num = num;
769 build_diff = false;
770 for (x = 0; x < num ; x++)
772 tree v;
773 v = old_ops->vuses[x].use;
774 if (TREE_CODE (v) == SSA_NAME)
775 v = SSA_NAME_VAR (v);
776 if (v != VARRAY_TREE (build_vuses, x))
778 build_diff = true;
779 break;
783 else
784 old_num = (old_ops ? old_ops->num_vuses : 0);
786 if (!build_diff)
788 vuse_ops = old_ops;
789 *old_ops_p = NULL;
790 for (x = 0; x < num; x++)
791 correct_use_link (&(vuse_ops->vuses[x].imm_use), stmt);
793 else
795 vuse_ops = allocate_vuse_optype (num);
796 for (x = 0; x < num; x++)
798 tree result, var = VARRAY_TREE (build_vuses, x);
799 /* Look for VAR in the old vector, and use that SSA_NAME. */
800 for (i = 0; i < old_num; i++)
802 result = old_ops->vuses[i].use;
803 if (TREE_CODE (result) == SSA_NAME)
804 result = SSA_NAME_VAR (result);
805 if (result == var)
807 initialize_vuse_operand (vuse_ops, x, old_ops->vuses[i].use,
808 stmt, &(old_ops->vuses[i].imm_use));
809 break;
812 if (i == old_num)
813 initialize_vuse_operand (vuse_ops, x, var, stmt, NULL);
817 /* The v_may_def build vector wasn't freed because we needed it here.
818 Free it now with the vuses build vector. */
819 VARRAY_POP_ALL (build_vuses);
820 cleanup_v_may_defs ();
822 return vuse_ops;
825 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
827 static v_must_def_optype
828 finalize_ssa_v_must_defs (v_must_def_optype *old_ops_p, tree stmt)
830 unsigned num, x, i, old_num = 0;
831 v_must_def_optype v_must_def_ops, old_ops;
832 tree result, var;
833 bool build_diff;
835 num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
836 if (num == 0)
837 return NULL;
839 /* In the presence of subvars, there may be more than one V_MUST_DEF per
840 statement (one for each subvar). It is a bit expensive to verify that
841 all must-defs in a statement belong to subvars if there is more than one
842 MUST-def, so we don't do it. Suffice to say, if you reach here without
843 having subvars, and have num >1, you have hit a bug. */
846 old_ops = *old_ops_p;
848 /* Check if the old vector and the new array are the same. */
849 build_diff = true;
850 if (stmt && old_ops && old_ops->num_v_must_defs == num)
852 old_num = num;
853 build_diff = false;
854 for (x = 0; x < num; x++)
856 tree var = old_ops->v_must_defs[x].def;
857 if (TREE_CODE (var) == SSA_NAME)
858 var = SSA_NAME_VAR (var);
859 if (var != VARRAY_TREE (build_v_must_defs, x))
861 build_diff = true;
862 break;
866 else
867 old_num = (old_ops ? old_ops->num_v_must_defs : 0);
869 if (!build_diff)
871 v_must_def_ops = old_ops;
872 *old_ops_p = NULL;
873 for (x = 0; x < num; x++)
874 correct_use_link (&(v_must_def_ops->v_must_defs[x].imm_use), stmt);
876 else
878 v_must_def_ops = allocate_v_must_def_optype (num);
879 for (x = 0; x < num ; x++)
881 var = VARRAY_TREE (build_v_must_defs, x);
882 /* Look for VAR in the original vector. */
883 for (i = 0; i < old_num; i++)
885 result = old_ops->v_must_defs[i].def;
886 if (TREE_CODE (result) == SSA_NAME)
887 result = SSA_NAME_VAR (result);
888 if (result == var)
890 initialize_v_must_def_operand (v_must_def_ops, x,
891 old_ops->v_must_defs[i].def,
892 old_ops->v_must_defs[i].use,
893 stmt,
894 &(old_ops->v_must_defs[i].imm_use));
895 break;
898 if (i == old_num)
900 initialize_v_must_def_operand (v_must_def_ops, x, var, var, stmt,
901 NULL);
905 VARRAY_POP_ALL (build_v_must_defs);
907 return v_must_def_ops;
911 /* Finalize all the build vectors, fill the new ones into INFO. */
913 static inline void
914 finalize_ssa_stmt_operands (tree stmt, stmt_operands_p old_ops,
915 stmt_operands_p new_ops)
917 new_ops->def_ops = finalize_ssa_defs (&(old_ops->def_ops), stmt);
918 new_ops->use_ops = finalize_ssa_uses (&(old_ops->use_ops), stmt);
919 new_ops->v_must_def_ops
920 = finalize_ssa_v_must_defs (&(old_ops->v_must_def_ops), stmt);
921 new_ops->v_may_def_ops
922 = finalize_ssa_v_may_defs (&(old_ops->v_may_def_ops), stmt);
923 new_ops->vuse_ops = finalize_ssa_vuses (&(old_ops->vuse_ops), stmt);
927 /* Start the process of building up operands vectors in INFO. */
929 static inline void
930 start_ssa_stmt_operands (void)
932 gcc_assert (VARRAY_ACTIVE_SIZE (build_defs) == 0);
933 gcc_assert (VARRAY_ACTIVE_SIZE (build_uses) == 0);
934 gcc_assert (VARRAY_ACTIVE_SIZE (build_vuses) == 0);
935 gcc_assert (VARRAY_ACTIVE_SIZE (build_v_may_defs) == 0);
936 gcc_assert (VARRAY_ACTIVE_SIZE (build_v_must_defs) == 0);
940 /* Add DEF_P to the list of pointers to operands. */
942 static inline void
943 append_def (tree *def_p)
945 VARRAY_PUSH_TREE_PTR (build_defs, def_p);
949 /* Add USE_P to the list of pointers to operands. */
951 static inline void
952 append_use (tree *use_p)
954 VARRAY_PUSH_TREE_PTR (build_uses, use_p);
958 /* Add a new virtual may def for variable VAR to the build array. */
960 static inline void
961 append_v_may_def (tree var)
963 var_ann_t ann = get_var_ann (var);
965 /* Don't allow duplicate entries. */
966 if (ann->in_v_may_def_list)
967 return;
968 ann->in_v_may_def_list = 1;
970 VARRAY_PUSH_TREE (build_v_may_defs, var);
974 /* Add VAR to the list of virtual uses. */
976 static inline void
977 append_vuse (tree var)
980 /* Don't allow duplicate entries. */
981 if (TREE_CODE (var) != SSA_NAME)
983 var_ann_t ann = get_var_ann (var);
985 if (ann->in_vuse_list || ann->in_v_may_def_list)
986 return;
987 ann->in_vuse_list = 1;
990 VARRAY_PUSH_TREE (build_vuses, var);
994 /* Add VAR to the list of virtual must definitions for INFO. */
996 static inline void
997 append_v_must_def (tree var)
999 unsigned i;
1001 /* Don't allow duplicate entries. */
1002 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
1003 if (var == VARRAY_TREE (build_v_must_defs, i))
1004 return;
1006 VARRAY_PUSH_TREE (build_v_must_defs, var);
1010 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
1011 cache for STMT, if it existed before. When finished, the various build_*
1012 operand vectors will have potential operands. in them. */
1014 static void
1015 parse_ssa_operands (tree stmt)
1017 enum tree_code code;
1019 code = TREE_CODE (stmt);
1020 switch (code)
1022 case MODIFY_EXPR:
1023 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
1024 either only part of LHS is modified or if the RHS might throw,
1025 otherwise, use V_MUST_DEF.
1027 ??? If it might throw, we should represent somehow that it is killed
1028 on the fallthrough path. */
1030 tree lhs = TREE_OPERAND (stmt, 0);
1031 int lhs_flags = opf_is_def;
1033 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
1035 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
1036 or not the entire LHS is modified; that depends on what's
1037 inside the VIEW_CONVERT_EXPR. */
1038 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1039 lhs = TREE_OPERAND (lhs, 0);
1041 if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
1042 && TREE_CODE (lhs) != BIT_FIELD_REF
1043 && TREE_CODE (lhs) != REALPART_EXPR
1044 && TREE_CODE (lhs) != IMAGPART_EXPR)
1045 lhs_flags |= opf_kill_def;
1047 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
1049 break;
1051 case COND_EXPR:
1052 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
1053 break;
1055 case SWITCH_EXPR:
1056 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
1057 break;
1059 case ASM_EXPR:
1060 get_asm_expr_operands (stmt);
1061 break;
1063 case RETURN_EXPR:
1064 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
1065 break;
1067 case GOTO_EXPR:
1068 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
1069 break;
1071 case LABEL_EXPR:
1072 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
1073 break;
1075 /* These nodes contain no variable references. */
1076 case BIND_EXPR:
1077 case CASE_LABEL_EXPR:
1078 case TRY_CATCH_EXPR:
1079 case TRY_FINALLY_EXPR:
1080 case EH_FILTER_EXPR:
1081 case CATCH_EXPR:
1082 case RESX_EXPR:
1083 break;
1085 default:
1086 /* Notice that if get_expr_operands tries to use &STMT as the operand
1087 pointer (which may only happen for USE operands), we will fail in
1088 append_use. This default will handle statements like empty
1089 statements, or CALL_EXPRs that may appear on the RHS of a statement
1090 or as statements themselves. */
1091 get_expr_operands (stmt, &stmt, opf_none);
1092 break;
1096 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
1097 original operands, and if ANN is non-null, appropriate stmt flags are set
1098 in the stmt's annotation. If ANN is NULL, this is not considered a "real"
1099 stmt, and none of the operands will be entered into their respective
1100 immediate uses tables. This is to allow stmts to be processed when they
1101 are not actually in the CFG.
1103 Note that some fields in old_ops may change to NULL, although none of the
1104 memory they originally pointed to will be destroyed. It is appropriate
1105 to call free_stmt_operands() on the value returned in old_ops.
1107 The rationale for this: Certain optimizations wish to examine the difference
1108 between new_ops and old_ops after processing. If a set of operands don't
1109 change, new_ops will simply assume the pointer in old_ops, and the old_ops
1110 pointer will be set to NULL, indicating no memory needs to be cleared.
1111 Usage might appear something like:
1113 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
1114 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
1115 <* compare old_ops_copy and new_ops *>
1116 free_ssa_operands (old_ops); */
1118 static void
1119 build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
1120 stmt_operands_p new_ops)
1122 tree_ann_t saved_ann = stmt->common.ann;
1124 /* Replace stmt's annotation with the one passed in for the duration
1125 of the operand building process. This allows "fake" stmts to be built
1126 and not be included in other data structures which can be built here. */
1127 stmt->common.ann = (tree_ann_t) ann;
1129 parse_old_ops = old_ops;
1131 /* Initially assume that the statement has no volatile operands, nor
1132 makes aliased loads or stores. */
1133 if (ann)
1135 ann->has_volatile_ops = false;
1136 ann->makes_aliased_stores = false;
1137 ann->makes_aliased_loads = false;
1140 start_ssa_stmt_operands ();
1142 parse_ssa_operands (stmt);
1144 parse_old_ops = NULL;
1146 if (ann)
1147 finalize_ssa_stmt_operands (stmt, old_ops, new_ops);
1148 else
1149 finalize_ssa_stmt_operands (NULL, old_ops, new_ops);
1150 stmt->common.ann = saved_ann;
1154 /* Free any operands vectors in OPS. */
1156 static void
1157 free_ssa_operands (stmt_operands_p ops)
1159 if (ops->def_ops)
1160 free_defs (&(ops->def_ops));
1161 if (ops->use_ops)
1162 free_uses (&(ops->use_ops));
1163 if (ops->vuse_ops)
1164 free_vuses (&(ops->vuse_ops));
1165 if (ops->v_may_def_ops)
1166 free_v_may_defs (&(ops->v_may_def_ops));
1167 if (ops->v_must_def_ops)
1168 free_v_must_defs (&(ops->v_must_def_ops));
1172 /* Swap operands EXP0 and EXP1 in STMT. */
1174 static void
1175 swap_tree_operands (tree *exp0, tree *exp1)
1177 tree op0, op1;
1178 op0 = *exp0;
1179 op1 = *exp1;
1181 /* If the operand cache is active, attempt to preserve the relative positions
1182 of these two operands in their respective immediate use lists. */
1183 if (build_defs != NULL && op0 != op1 && parse_old_ops != NULL)
1185 unsigned x, use0, use1;
1186 use_optype uses = parse_old_ops->use_ops;
1187 use0 = use1 = NUM_USES (uses);
1188 /* Find the 2 operands in the cache, if they are there. */
1189 for (x = 0; x < NUM_USES (uses); x++)
1190 if (USE_OP_PTR (uses, x)->use == exp0)
1192 use0 = x;
1193 break;
1195 for (x = 0; x < NUM_USES (uses); x++)
1196 if (USE_OP_PTR (uses, x)->use == exp1)
1198 use1 = x;
1199 break;
1201 /* If both uses don't have operand entries, there isnt much we can do
1202 at this point. Presumably we dont need to worry about it. */
1203 if (use0 != NUM_USES (uses) && use1 != NUM_USES (uses))
1205 tree *tmp = USE_OP_PTR (uses, use1)->use;
1206 gcc_assert (use0 != use1);
1208 USE_OP_PTR (uses, use1)->use = USE_OP_PTR (uses, use0)->use;
1209 USE_OP_PTR (uses, use0)->use = tmp;
1213 /* Now swap the data. */
1214 *exp0 = op1;
1215 *exp1 = op0;
1218 /* Get the operands of statement STMT. */
1220 void
1221 update_stmt_operands (tree stmt)
1223 stmt_ann_t ann;
1224 stmt_operands_t old_operands;
1226 /* Don't do anything if we are called before SSA is initialized. */
1227 if (build_defs == NULL)
1228 return;
1229 /* The optimizers cannot handle statements that are nothing but a
1230 _DECL. This indicates a bug in the gimplifier. */
1231 gcc_assert (!SSA_VAR_P (stmt));
1233 ann = get_stmt_ann (stmt);
1235 gcc_assert (ann->modified);
1237 timevar_push (TV_TREE_OPS);
1239 old_operands = ann->operands;
1240 memset (&(ann->operands), 0, sizeof (stmt_operands_t));
1242 build_ssa_operands (stmt, ann, &old_operands, &(ann->operands));
1243 free_ssa_operands (&old_operands);
1245 /* Clear the modified bit for STMT. */
1246 ann->modified = 0;
1248 timevar_pop (TV_TREE_OPS);
1252 /* Recursively scan the expression pointed by EXPR_P in statement referred to
1253 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
1254 operands found. */
1256 static void
1257 get_expr_operands (tree stmt, tree *expr_p, int flags)
1259 enum tree_code code;
1260 enum tree_code_class class;
1261 tree expr = *expr_p;
1262 stmt_ann_t s_ann = stmt_ann (stmt);
1264 if (expr == NULL)
1265 return;
1267 code = TREE_CODE (expr);
1268 class = TREE_CODE_CLASS (code);
1270 switch (code)
1272 case ADDR_EXPR:
1273 /* We could have the address of a component, array member,
1274 etc which has interesting variable references. */
1275 /* Taking the address of a variable does not represent a
1276 reference to it, but the fact that the stmt takes its address will be
1277 of interest to some passes (e.g. alias resolution). */
1278 add_stmt_operand (expr_p, s_ann, 0);
1280 /* If the address is invariant, there may be no interesting variable
1281 references inside. */
1282 if (is_gimple_min_invariant (expr))
1283 return;
1285 /* There should be no VUSEs created, since the referenced objects are
1286 not really accessed. The only operands that we should find here
1287 are ARRAY_REF indices which will always be real operands (GIMPLE
1288 does not allow non-registers as array indices). */
1289 flags |= opf_no_vops;
1291 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1292 return;
1294 case SSA_NAME:
1295 case VAR_DECL:
1296 case PARM_DECL:
1297 case RESULT_DECL:
1298 case CONST_DECL:
1300 subvar_t svars;
1302 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1303 Otherwise, add the variable itself.
1304 Whether it goes to USES or DEFS depends on the operand flags. */
1305 if (var_can_have_subvars (expr)
1306 && (svars = get_subvars_for_var (expr)))
1308 subvar_t sv;
1309 for (sv = svars; sv; sv = sv->next)
1310 add_stmt_operand (&sv->var, s_ann, flags);
1312 else
1314 add_stmt_operand (expr_p, s_ann, flags);
1316 return;
1318 case MISALIGNED_INDIRECT_REF:
1319 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1320 /* fall through */
1322 case ALIGN_INDIRECT_REF:
1323 case INDIRECT_REF:
1324 get_indirect_ref_operands (stmt, expr, flags);
1325 return;
1327 case ARRAY_REF:
1328 case ARRAY_RANGE_REF:
1329 /* Treat array references as references to the virtual variable
1330 representing the array. The virtual variable for an ARRAY_REF
1331 is the VAR_DECL for the array. */
1333 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1334 according to the value of IS_DEF. Recurse if the LHS of the
1335 ARRAY_REF node is not a regular variable. */
1336 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1337 add_stmt_operand (expr_p, s_ann, flags);
1338 else
1339 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1341 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1342 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1343 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1344 return;
1346 case COMPONENT_REF:
1347 case REALPART_EXPR:
1348 case IMAGPART_EXPR:
1350 tree ref;
1351 HOST_WIDE_INT offset, size;
1352 /* This component ref becomes an access to all of the subvariables
1353 it can touch, if we can determine that, but *NOT* the real one.
1354 If we can't determine which fields we could touch, the recursion
1355 will eventually get to a variable and add *all* of its subvars, or
1356 whatever is the minimum correct subset. */
1358 ref = okay_component_ref_for_subvars (expr, &offset, &size);
1359 if (ref)
1361 subvar_t svars = get_subvars_for_var (ref);
1362 subvar_t sv;
1363 for (sv = svars; sv; sv = sv->next)
1365 bool exact;
1366 if (overlap_subvar (offset, size, sv, &exact))
1368 if (exact)
1369 flags &= ~opf_kill_def;
1370 add_stmt_operand (&sv->var, s_ann, flags);
1374 else
1375 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1376 flags & ~opf_kill_def);
1378 if (code == COMPONENT_REF)
1379 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1380 return;
1382 case WITH_SIZE_EXPR:
1383 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1384 and an rvalue reference to its second argument. */
1385 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1386 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1387 return;
1389 case CALL_EXPR:
1390 get_call_expr_operands (stmt, expr);
1391 return;
1393 case COND_EXPR:
1394 case VEC_COND_EXPR:
1395 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1396 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1397 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1398 return;
1400 case MODIFY_EXPR:
1402 int subflags;
1403 tree op;
1405 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1407 op = TREE_OPERAND (expr, 0);
1408 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1409 op = TREE_OPERAND (expr, 0);
1410 if (TREE_CODE (op) == ARRAY_REF
1411 || TREE_CODE (op) == ARRAY_RANGE_REF
1412 || TREE_CODE (op) == REALPART_EXPR
1413 || TREE_CODE (op) == IMAGPART_EXPR)
1414 subflags = opf_is_def;
1415 else
1416 subflags = opf_is_def | opf_kill_def;
1418 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1419 return;
1422 case CONSTRUCTOR:
1424 /* General aggregate CONSTRUCTORs have been decomposed, but they
1425 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1427 tree t;
1428 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1429 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1431 return;
1434 case TRUTH_NOT_EXPR:
1435 case BIT_FIELD_REF:
1436 case VIEW_CONVERT_EXPR:
1437 do_unary:
1438 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1439 return;
1441 case TRUTH_AND_EXPR:
1442 case TRUTH_OR_EXPR:
1443 case TRUTH_XOR_EXPR:
1444 case COMPOUND_EXPR:
1445 case OBJ_TYPE_REF:
1446 case ASSERT_EXPR:
1447 do_binary:
1449 tree op0 = TREE_OPERAND (expr, 0);
1450 tree op1 = TREE_OPERAND (expr, 1);
1452 /* If it would be profitable to swap the operands, then do so to
1453 canonicalize the statement, enabling better optimization.
1455 By placing canonicalization of such expressions here we
1456 transparently keep statements in canonical form, even
1457 when the statement is modified. */
1458 if (tree_swap_operands_p (op0, op1, false))
1460 /* For relationals we need to swap the operands
1461 and change the code. */
1462 if (code == LT_EXPR
1463 || code == GT_EXPR
1464 || code == LE_EXPR
1465 || code == GE_EXPR)
1467 TREE_SET_CODE (expr, swap_tree_comparison (code));
1468 swap_tree_operands (&TREE_OPERAND (expr, 0),
1469 &TREE_OPERAND (expr, 1));
1472 /* For a commutative operator we can just swap the operands. */
1473 else if (commutative_tree_code (code))
1475 swap_tree_operands (&TREE_OPERAND (expr, 0),
1476 &TREE_OPERAND (expr, 1));
1480 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1481 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1482 return;
1485 case REALIGN_LOAD_EXPR:
1487 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1488 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1489 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1490 return;
1493 case BLOCK:
1494 case FUNCTION_DECL:
1495 case EXC_PTR_EXPR:
1496 case FILTER_EXPR:
1497 case LABEL_DECL:
1498 /* Expressions that make no memory references. */
1499 return;
1501 default:
1502 if (class == tcc_unary)
1503 goto do_unary;
1504 if (class == tcc_binary || class == tcc_comparison)
1505 goto do_binary;
1506 if (class == tcc_constant || class == tcc_type)
1507 return;
1510 /* If we get here, something has gone wrong. */
1511 #ifdef ENABLE_CHECKING
1512 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1513 debug_tree (expr);
1514 fputs ("\n", stderr);
1515 internal_error ("internal error");
1516 #endif
1517 gcc_unreachable ();
1521 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1523 static void
1524 get_asm_expr_operands (tree stmt)
1526 stmt_ann_t s_ann = stmt_ann (stmt);
1527 int noutputs = list_length (ASM_OUTPUTS (stmt));
1528 const char **oconstraints
1529 = (const char **) alloca ((noutputs) * sizeof (const char *));
1530 int i;
1531 tree link;
1532 const char *constraint;
1533 bool allows_mem, allows_reg, is_inout;
1535 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1537 oconstraints[i] = constraint
1538 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1539 parse_output_constraint (&constraint, i, 0, 0,
1540 &allows_mem, &allows_reg, &is_inout);
1542 /* This should have been split in gimplify_asm_expr. */
1543 gcc_assert (!allows_reg || !is_inout);
1545 /* Memory operands are addressable. Note that STMT needs the
1546 address of this operand. */
1547 if (!allows_reg && allows_mem)
1549 tree t = get_base_address (TREE_VALUE (link));
1550 if (t && DECL_P (t))
1551 note_addressable (t, s_ann);
1554 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1557 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1559 constraint
1560 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1561 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1562 oconstraints, &allows_mem, &allows_reg);
1564 /* Memory operands are addressable. Note that STMT needs the
1565 address of this operand. */
1566 if (!allows_reg && allows_mem)
1568 tree t = get_base_address (TREE_VALUE (link));
1569 if (t && DECL_P (t))
1570 note_addressable (t, s_ann);
1573 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1577 /* Clobber memory for asm ("" : : : "memory"); */
1578 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1579 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1581 unsigned i;
1582 bitmap_iterator bi;
1584 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1585 decided to group them). */
1586 if (global_var)
1587 add_stmt_operand (&global_var, s_ann, opf_is_def);
1588 else
1589 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1591 tree var = referenced_var (i);
1592 add_stmt_operand (&var, s_ann, opf_is_def);
1595 /* Now clobber all addressables. */
1596 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1598 tree var = referenced_var (i);
1600 /* Subvars are explicitly represented in this list, so
1601 we don't need the original to be added to the clobber
1602 ops, but the original *will* be in this list because
1603 we keep the addressability of the original
1604 variable up-to-date so we don't screw up the rest of
1605 the backend. */
1606 if (var_can_have_subvars (var)
1607 && get_subvars_for_var (var) != NULL)
1608 continue;
1610 add_stmt_operand (&var, s_ann, opf_is_def);
1613 break;
1617 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1618 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1620 static void
1621 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1623 tree *pptr = &TREE_OPERAND (expr, 0);
1624 tree ptr = *pptr;
1625 stmt_ann_t s_ann = stmt_ann (stmt);
1627 /* Stores into INDIRECT_REF operands are never killing definitions. */
1628 flags &= ~opf_kill_def;
1630 if (SSA_VAR_P (ptr))
1632 struct ptr_info_def *pi = NULL;
1634 /* If PTR has flow-sensitive points-to information, use it. */
1635 if (TREE_CODE (ptr) == SSA_NAME
1636 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1637 && pi->name_mem_tag)
1639 /* PTR has its own memory tag. Use it. */
1640 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1642 else
1644 /* If PTR is not an SSA_NAME or it doesn't have a name
1645 tag, use its type memory tag. */
1646 var_ann_t v_ann;
1648 /* If we are emitting debugging dumps, display a warning if
1649 PTR is an SSA_NAME with no flow-sensitive alias
1650 information. That means that we may need to compute
1651 aliasing again. */
1652 if (dump_file
1653 && TREE_CODE (ptr) == SSA_NAME
1654 && pi == NULL)
1656 fprintf (dump_file,
1657 "NOTE: no flow-sensitive alias info for ");
1658 print_generic_expr (dump_file, ptr, dump_flags);
1659 fprintf (dump_file, " in ");
1660 print_generic_stmt (dump_file, stmt, dump_flags);
1663 if (TREE_CODE (ptr) == SSA_NAME)
1664 ptr = SSA_NAME_VAR (ptr);
1665 v_ann = var_ann (ptr);
1666 if (v_ann->type_mem_tag)
1667 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1671 /* If a constant is used as a pointer, we can't generate a real
1672 operand for it but we mark the statement volatile to prevent
1673 optimizations from messing things up. */
1674 else if (TREE_CODE (ptr) == INTEGER_CST)
1676 if (s_ann)
1677 s_ann->has_volatile_ops = true;
1678 return;
1681 /* Everything else *should* have been folded elsewhere, but users
1682 are smarter than we in finding ways to write invalid code. We
1683 cannot just assert here. If we were absolutely certain that we
1684 do handle all valid cases, then we could just do nothing here.
1685 That seems optimistic, so attempt to do something logical... */
1686 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1687 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1688 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1690 /* Make sure we know the object is addressable. */
1691 pptr = &TREE_OPERAND (ptr, 0);
1692 add_stmt_operand (pptr, s_ann, 0);
1694 /* Mark the object itself with a VUSE. */
1695 pptr = &TREE_OPERAND (*pptr, 0);
1696 get_expr_operands (stmt, pptr, flags);
1697 return;
1700 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1701 else
1702 gcc_unreachable ();
1704 /* Add a USE operand for the base pointer. */
1705 get_expr_operands (stmt, pptr, opf_none);
1708 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1710 static void
1711 get_call_expr_operands (tree stmt, tree expr)
1713 tree op;
1714 int call_flags = call_expr_flags (expr);
1716 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1717 operands for all the symbols that have been found to be
1718 call-clobbered.
1720 Note that if aliases have not been computed, the global effects
1721 of calls will not be included in the SSA web. This is fine
1722 because no optimizer should run before aliases have been
1723 computed. By not bothering with virtual operands for CALL_EXPRs
1724 we avoid adding superfluous virtual operands, which can be a
1725 significant compile time sink (See PR 15855). */
1726 if (aliases_computed_p
1727 && !bitmap_empty_p (call_clobbered_vars)
1728 && !(call_flags & ECF_NOVOPS))
1730 /* A 'pure' or a 'const' function never call-clobbers anything.
1731 A 'noreturn' function might, but since we don't return anyway
1732 there is no point in recording that. */
1733 if (TREE_SIDE_EFFECTS (expr)
1734 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1735 add_call_clobber_ops (stmt);
1736 else if (!(call_flags & ECF_CONST))
1737 add_call_read_ops (stmt);
1740 /* Find uses in the called function. */
1741 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1743 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1744 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1746 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1751 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1752 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1753 the statement's real operands, otherwise it is added to virtual
1754 operands. */
1756 static void
1757 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1759 bool is_real_op;
1760 tree var, sym;
1761 var_ann_t v_ann;
1763 var = *var_p;
1764 STRIP_NOPS (var);
1766 /* If the operand is an ADDR_EXPR, add its operand to the list of
1767 variables that have had their address taken in this statement. */
1768 if (TREE_CODE (var) == ADDR_EXPR)
1770 note_addressable (TREE_OPERAND (var, 0), s_ann);
1771 return;
1774 /* If the original variable is not a scalar, it will be added to the list
1775 of virtual operands. In that case, use its base symbol as the virtual
1776 variable representing it. */
1777 is_real_op = is_gimple_reg (var);
1778 if (!is_real_op && !DECL_P (var))
1779 var = get_virtual_var (var);
1781 /* If VAR is not a variable that we care to optimize, do nothing. */
1782 if (var == NULL_TREE || !SSA_VAR_P (var))
1783 return;
1785 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1786 v_ann = var_ann (sym);
1788 /* Mark statements with volatile operands. Optimizers should back
1789 off from statements having volatile operands. */
1790 if (TREE_THIS_VOLATILE (sym) && s_ann)
1791 s_ann->has_volatile_ops = true;
1793 /* If the variable cannot be modified and this is a V_MAY_DEF change
1794 it into a VUSE. This happens when read-only variables are marked
1795 call-clobbered and/or aliased to writeable variables. So we only
1796 check that this only happens on stores, and not writes to GIMPLE
1797 registers.
1799 FIXME: The C++ FE is emitting assignments in the IL stream for
1800 read-only globals. This is wrong, but for the time being disable
1801 this transformation on V_MUST_DEF operands (otherwise, we
1802 mis-optimize SPEC2000's eon). */
1803 if ((flags & opf_is_def)
1804 && !(flags & opf_kill_def)
1805 && unmodifiable_var_p (var))
1807 gcc_assert (!is_real_op);
1808 flags &= ~opf_is_def;
1811 if (is_real_op)
1813 /* The variable is a GIMPLE register. Add it to real operands. */
1814 if (flags & opf_is_def)
1815 append_def (var_p);
1816 else
1817 append_use (var_p);
1819 else
1821 varray_type aliases;
1823 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1824 virtual operands, unless the caller has specifically requested
1825 not to add virtual operands (used when adding operands inside an
1826 ADDR_EXPR expression). */
1827 if (flags & opf_no_vops)
1828 return;
1830 aliases = v_ann->may_aliases;
1832 if (aliases == NULL)
1834 /* The variable is not aliased or it is an alias tag. */
1835 if (flags & opf_is_def)
1837 if (flags & opf_kill_def)
1839 /* Only regular variables or struct fields may get a
1840 V_MUST_DEF operand. */
1841 gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
1842 || v_ann->mem_tag_kind == STRUCT_FIELD);
1843 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1844 variable definitions. */
1845 append_v_must_def (var);
1847 else
1849 /* Add a V_MAY_DEF for call-clobbered variables and
1850 memory tags. */
1851 append_v_may_def (var);
1854 else
1856 append_vuse (var);
1857 if (s_ann && v_ann->is_alias_tag)
1858 s_ann->makes_aliased_loads = 1;
1861 else
1863 size_t i;
1865 /* The variable is aliased. Add its aliases to the virtual
1866 operands. */
1867 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1869 if (flags & opf_is_def)
1871 bool added_may_defs_p = false;
1873 /* If the variable is also an alias tag, add a virtual
1874 operand for it, otherwise we will miss representing
1875 references to the members of the variable's alias set.
1876 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1877 if (v_ann->is_alias_tag)
1879 added_may_defs_p = true;
1880 append_v_may_def (var);
1883 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1885 /* While VAR may be modifiable, some of its aliases
1886 may not be. If that's the case, we don't really
1887 need to add them a V_MAY_DEF for them. */
1888 tree alias = VARRAY_TREE (aliases, i);
1890 if (unmodifiable_var_p (alias))
1891 append_vuse (alias);
1892 else
1894 append_v_may_def (alias);
1895 added_may_defs_p = true;
1899 if (s_ann && added_may_defs_p)
1900 s_ann->makes_aliased_stores = 1;
1902 else
1904 /* Similarly, append a virtual uses for VAR itself, when
1905 it is an alias tag. */
1906 if (v_ann->is_alias_tag)
1907 append_vuse (var);
1909 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1910 append_vuse (VARRAY_TREE (aliases, i));
1912 if (s_ann)
1913 s_ann->makes_aliased_loads = 1;
1920 /* Record that VAR had its address taken in the statement with annotations
1921 S_ANN. */
1923 static void
1924 note_addressable (tree var, stmt_ann_t s_ann)
1926 tree ref;
1927 subvar_t svars;
1928 HOST_WIDE_INT offset;
1929 HOST_WIDE_INT size;
1931 if (!s_ann)
1932 return;
1934 /* If this is a COMPONENT_REF, and we know exactly what it touches, we only
1935 take the address of the subvariables it will touch.
1936 Otherwise, we take the address of all the subvariables, plus the real
1937 ones. */
1939 if (var && TREE_CODE (var) == COMPONENT_REF
1940 && (ref = okay_component_ref_for_subvars (var, &offset, &size)))
1942 subvar_t sv;
1943 svars = get_subvars_for_var (ref);
1945 if (s_ann->addresses_taken == NULL)
1946 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1948 for (sv = svars; sv; sv = sv->next)
1950 if (overlap_subvar (offset, size, sv, NULL))
1951 bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
1953 return;
1956 var = get_base_address (var);
1957 if (var && SSA_VAR_P (var))
1959 if (s_ann->addresses_taken == NULL)
1960 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1963 if (var_can_have_subvars (var)
1964 && (svars = get_subvars_for_var (var)))
1966 subvar_t sv;
1967 for (sv = svars; sv; sv = sv->next)
1968 bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
1970 else
1971 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1975 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1976 clobbered variables in the function. */
1978 static void
1979 add_call_clobber_ops (tree stmt)
1981 unsigned i;
1982 tree t;
1983 bitmap_iterator bi;
1984 stmt_ann_t s_ann = stmt_ann (stmt);
1985 struct stmt_ann_d empty_ann;
1987 /* Functions that are not const, pure or never return may clobber
1988 call-clobbered variables. */
1989 if (s_ann)
1990 s_ann->makes_clobbering_call = true;
1992 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1993 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1994 if (global_var)
1996 add_stmt_operand (&global_var, s_ann, opf_is_def);
1997 return;
2000 /* If cache is valid, copy the elements into the build vectors. */
2001 if (ssa_call_clobbered_cache_valid)
2003 for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_vuses); i++)
2005 t = VARRAY_TREE (clobbered_vuses, i);
2006 gcc_assert (TREE_CODE (t) != SSA_NAME);
2007 var_ann (t)->in_vuse_list = 1;
2008 VARRAY_PUSH_TREE (build_vuses, t);
2010 for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_v_may_defs); i++)
2012 t = VARRAY_TREE (clobbered_v_may_defs, i);
2013 gcc_assert (TREE_CODE (t) != SSA_NAME);
2014 var_ann (t)->in_v_may_def_list = 1;
2015 VARRAY_PUSH_TREE (build_v_may_defs, t);
2017 if (s_ann)
2019 s_ann->makes_aliased_loads = clobbered_aliased_loads;
2020 s_ann->makes_aliased_stores = clobbered_aliased_stores;
2022 return;
2025 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
2027 /* Add a V_MAY_DEF operand for every call clobbered variable. */
2028 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
2030 tree var = referenced_var (i);
2031 if (unmodifiable_var_p (var))
2032 add_stmt_operand (&var, &empty_ann, opf_none);
2033 else
2034 add_stmt_operand (&var, &empty_ann, opf_is_def);
2037 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
2038 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
2040 /* Set the flags for a stmt's annotation. */
2041 if (s_ann)
2043 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
2044 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
2047 /* Prepare empty cache vectors. */
2048 if (clobbered_v_may_defs)
2050 VARRAY_POP_ALL (clobbered_vuses);
2051 VARRAY_POP_ALL (clobbered_v_may_defs);
2053 else
2055 VARRAY_TREE_INIT (clobbered_v_may_defs, 10, "clobbered_v_may_defs");
2056 VARRAY_TREE_INIT (clobbered_vuses, 10, "clobbered_vuses");
2059 /* Now fill the clobbered cache with the values that have been found. */
2060 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
2061 VARRAY_PUSH_TREE (clobbered_vuses, VARRAY_TREE (build_vuses, i));
2062 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
2063 VARRAY_PUSH_TREE (clobbered_v_may_defs, VARRAY_TREE (build_v_may_defs, i));
2065 ssa_call_clobbered_cache_valid = true;
2069 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
2070 function. */
2072 static void
2073 add_call_read_ops (tree stmt)
2075 unsigned i;
2076 tree t;
2077 bitmap_iterator bi;
2078 stmt_ann_t s_ann = stmt_ann (stmt);
2079 struct stmt_ann_d empty_ann;
2081 /* if the function is not pure, it may reference memory. Add
2082 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
2083 for the heuristic used to decide whether to create .GLOBAL_VAR. */
2084 if (global_var)
2086 add_stmt_operand (&global_var, s_ann, opf_none);
2087 return;
2090 /* If cache is valid, copy the elements into the build vector. */
2091 if (ssa_ro_call_cache_valid)
2093 for (i = 0; i < VARRAY_ACTIVE_SIZE (ro_call_vuses); i++)
2095 t = VARRAY_TREE (ro_call_vuses, i);
2096 gcc_assert (TREE_CODE (t) != SSA_NAME);
2097 var_ann (t)->in_vuse_list = 1;
2098 VARRAY_PUSH_TREE (build_vuses, t);
2100 if (s_ann)
2101 s_ann->makes_aliased_loads = ro_call_aliased_loads;
2102 return;
2105 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
2107 /* Add a VUSE for each call-clobbered variable. */
2108 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
2110 tree var = referenced_var (i);
2111 add_stmt_operand (&var, &empty_ann, opf_none);
2114 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
2115 if (s_ann)
2116 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
2118 /* Prepare empty cache vectors. */
2119 if (ro_call_vuses)
2120 VARRAY_POP_ALL (ro_call_vuses);
2121 else
2122 VARRAY_TREE_INIT (ro_call_vuses, 10, "ro_call_vuses");
2124 /* Now fill the clobbered cache with the values that have been found. */
2125 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
2126 VARRAY_PUSH_TREE (ro_call_vuses, VARRAY_TREE (build_vuses, i));
2128 ssa_ro_call_cache_valid = true;
2131 /* Copies virtual operands from SRC to DST. */
2133 void
2134 copy_virtual_operands (tree dst, tree src)
2136 unsigned i;
2137 vuse_optype vuses = STMT_VUSE_OPS (src);
2138 v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
2139 v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
2140 vuse_optype *vuses_new = &stmt_ann (dst)->operands.vuse_ops;
2141 v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->operands.v_may_def_ops;
2142 v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->operands.v_must_def_ops;
2144 if (vuses)
2146 *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
2147 for (i = 0; i < NUM_VUSES (vuses); i++)
2148 initialize_vuse_operand (*vuses_new, i, VUSE_OP (vuses, i), dst, NULL);
2151 if (v_may_defs)
2153 *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
2154 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
2156 initialize_v_may_def_operand (*v_may_defs_new, i,
2157 V_MAY_DEF_RESULT (v_may_defs, i),
2158 V_MAY_DEF_OP (v_may_defs, i), dst,
2159 NULL);
2163 if (v_must_defs)
2165 *v_must_defs_new
2166 = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
2167 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
2169 initialize_v_must_def_operand (*v_must_defs_new, i,
2170 V_MUST_DEF_RESULT (v_must_defs, i),
2171 V_MUST_DEF_KILL (v_must_defs, i), dst,
2172 NULL);
2178 /* Specifically for use in DOM's expression analysis. Given a store, we
2179 create an artificial stmt which looks like a load from the store, this can
2180 be used to eliminate redundant loads. OLD_OPS are the operands from the
2181 store stmt, and NEW_STMT is the new load which represents a load of the
2182 values stored. */
2184 void
2185 create_ssa_artficial_load_stmt (stmt_operands_p old_ops, tree new_stmt)
2187 stmt_ann_t ann;
2188 tree op;
2189 stmt_operands_t tmp;
2190 unsigned j;
2192 memset (&tmp, 0, sizeof (stmt_operands_t));
2193 ann = get_stmt_ann (new_stmt);
2195 /* Free operands just in case is was an existing stmt. */
2196 free_ssa_operands (&(ann->operands));
2198 build_ssa_operands (new_stmt, NULL, &tmp, &(ann->operands));
2199 free_vuses (&(ann->operands.vuse_ops));
2200 free_v_may_defs (&(ann->operands.v_may_def_ops));
2201 free_v_must_defs (&(ann->operands.v_must_def_ops));
2203 /* For each VDEF on the original statement, we want to create a
2204 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2205 statement. */
2206 for (j = 0; j < NUM_V_MAY_DEFS (old_ops->v_may_def_ops); j++)
2208 op = V_MAY_DEF_RESULT (old_ops->v_may_def_ops, j);
2209 append_vuse (op);
2212 for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
2214 op = V_MUST_DEF_RESULT (old_ops->v_must_def_ops, j);
2215 append_vuse (op);
2218 /* Now set the vuses for this new stmt. */
2219 ann->operands.vuse_ops = finalize_ssa_vuses (&(tmp.vuse_ops), NULL);
2222 /* Scan the immediate_use list for VAR making sure its linked properly.
2223 return RTUE iof there is a problem. */
2225 bool
2226 verify_imm_links (FILE *f, tree var)
2228 ssa_imm_use_t *ptr, *prev;
2229 ssa_imm_use_t *list;
2230 int count;
2232 gcc_assert (TREE_CODE (var) == SSA_NAME);
2234 list = &(SSA_NAME_IMM_USE_NODE (var));
2235 gcc_assert (list->use == NULL);
2237 if (list->prev == NULL)
2239 gcc_assert (list->next == NULL);
2240 return false;
2243 prev = list;
2244 count = 0;
2245 for (ptr = list->next; ptr != list; )
2247 if (prev != ptr->prev)
2248 goto error;
2250 if (ptr->use == NULL)
2251 goto error; /* 2 roots, or SAFE guard node. */
2252 else if (*(ptr->use) != var)
2253 goto error;
2255 prev = ptr;
2256 ptr = ptr->next;
2257 /* Avoid infinite loops. */
2258 if (count++ > 30000)
2259 goto error;
2262 /* Verify list in the other direction. */
2263 prev = list;
2264 for (ptr = list->prev; ptr != list; )
2266 if (prev != ptr->next)
2267 goto error;
2268 prev = ptr;
2269 ptr = ptr->prev;
2270 if (count-- < 0)
2271 goto error;
2274 if (count != 0)
2275 goto error;
2277 return false;
2279 error:
2280 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2282 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2283 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2285 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2286 (void *)ptr->use);
2287 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2288 fprintf(f, "\n");
2289 return true;
2293 /* Dump all the immediate uses to FILE. */
2295 void
2296 dump_immediate_uses_for (FILE *file, tree var)
2298 imm_use_iterator iter;
2299 use_operand_p use_p;
2301 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2303 print_generic_expr (file, var, TDF_SLIM);
2304 fprintf (file, " : -->");
2305 if (has_zero_uses (var))
2306 fprintf (file, " no uses.\n");
2307 else
2308 if (has_single_use (var))
2309 fprintf (file, " single use.\n");
2310 else
2311 fprintf (file, "%d uses.\n", num_imm_uses (var));
2313 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2315 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2317 fprintf(file, "\n");
2320 /* Dump all the immediate uses to FILE. */
2322 void
2323 dump_immediate_uses (FILE *file)
2325 tree var;
2326 unsigned int x;
2328 fprintf (file, "Immediate_uses: \n\n");
2329 for (x = 1; x < num_ssa_names; x++)
2331 var = ssa_name(x);
2332 if (!var)
2333 continue;
2334 dump_immediate_uses_for (file, var);
2339 /* Dump def-use edges on stderr. */
2341 void
2342 debug_immediate_uses (void)
2344 dump_immediate_uses (stderr);
2347 /* Dump def-use edges on stderr. */
2349 void
2350 debug_immediate_uses_for (tree var)
2352 dump_immediate_uses_for (stderr, var);
2355 #include "gt-tree-ssa-operands.h"