* configure.ac: (target_alias): Default to $host_alias, not
[official-gcc.git] / gcc / tree-ssa-operands.c
blob68acd7ceb187e8920f62ac3120453bcc9334a705
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "errors.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
33 #include "ggc.h"
34 #include "timevar.h"
36 #include "langhooks.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 get_stmt_operands() in the primary entry point.
55 The operand tree is the parsed by the various get_* routines which look
56 through the stmt tree for the occurrence of operands which may be of
57 interest, and calls are made to the append_* routines whenever one is
58 found. There are 5 of these routines, each representing one of the
59 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 Virtual Must Defs.
62 The append_* routines check for duplication, and simply keep a list of
63 unique objects for each operand type in the build_* extendable vectors.
65 Once the stmt tree is completely parsed, the finalize_ssa_operands()
66 routine is called, which proceeds to perform the finalization routine
67 on each of the 5 operand vectors which have been built up.
69 If the stmt had a previous operand cache, the finalization routines
70 attempt to match up the new operands with the old ones. If its a perfect
71 match, the old vector is simply reused. If it isn't a perfect match, then
72 a new vector is created and the new operands are placed there. For
73 virtual operands, if the previous cache had SSA_NAME version of a
74 variable, and that same variable occurs in the same operands cache, then
75 the new cache vector will also get the same SSA_NAME.
77 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
78 vector for VUSE, then the new vector will also be modified such that
79 it contains 'a_5' rather than 'a'.
84 /* Flags to describe operand properties in get_stmt_operands and helpers. */
86 /* By default, operands are loaded. */
87 #define opf_none 0
89 /* Operand is the target of an assignment expression or a
90 call-clobbered variable */
91 #define opf_is_def (1 << 0)
93 /* Operand is the target of an assignment expression. */
94 #define opf_kill_def (1 << 1)
96 /* No virtual operands should be created in the expression. This is used
97 when traversing ADDR_EXPR nodes which have different semantics than
98 other expressions. Inside an ADDR_EXPR node, the only operands that we
99 need to consider are indices into arrays. For instance, &a.b[i] should
100 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
101 VUSE for 'b'. */
102 #define opf_no_vops (1 << 2)
104 /* Array for building all the def operands. */
105 static GTY (()) varray_type build_defs;
107 /* Array for building all the use operands. */
108 static GTY (()) varray_type build_uses;
110 /* Array for building all the v_may_def operands. */
111 static GTY (()) varray_type build_v_may_defs;
113 /* Array for building all the vuse operands. */
114 static GTY (()) varray_type build_vuses;
116 /* Array for building all the v_must_def operands. */
117 static GTY (()) varray_type build_v_must_defs;
120 #ifdef ENABLE_CHECKING
121 /* Used to make sure operand construction is working on the proper stmt. */
122 tree check_build_stmt;
123 #endif
125 def_operand_p NULL_DEF_OPERAND_P = { NULL };
126 use_operand_p NULL_USE_OPERAND_P = { NULL };
128 static void note_addressable (tree, stmt_ann_t);
129 static void get_expr_operands (tree, tree *, int);
130 static void get_asm_expr_operands (tree);
131 static void get_indirect_ref_operands (tree, tree, int);
132 static void get_call_expr_operands (tree, tree);
133 static inline void append_def (tree *);
134 static inline void append_use (tree *);
135 static void append_v_may_def (tree);
136 static void append_v_must_def (tree);
137 static void add_call_clobber_ops (tree);
138 static void add_call_read_ops (tree);
139 static void add_stmt_operand (tree *, tree, int);
141 /* Return a vector of contiguous memory for NUM def operands. */
143 static inline def_optype
144 allocate_def_optype (unsigned num)
146 def_optype def_ops;
147 unsigned size;
148 size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
149 def_ops = ggc_alloc (size);
150 def_ops->num_defs = num;
151 return def_ops;
155 /* Return a vector of contiguous memory for NUM use operands. */
157 static inline use_optype
158 allocate_use_optype (unsigned num)
160 use_optype use_ops;
161 unsigned size;
162 size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
163 use_ops = ggc_alloc (size);
164 use_ops->num_uses = num;
165 return use_ops;
169 /* Return a vector of contiguous memory for NUM v_may_def operands. */
171 static inline v_may_def_optype
172 allocate_v_may_def_optype (unsigned num)
174 v_may_def_optype v_may_def_ops;
175 unsigned size;
176 size = sizeof (struct v_may_def_optype_d)
177 + sizeof (v_def_use_operand_type_t) * (num - 1);
178 v_may_def_ops = ggc_alloc (size);
179 v_may_def_ops->num_v_may_defs = num;
180 return v_may_def_ops;
184 /* Return a vector of contiguous memory for NUM v_use operands. */
186 static inline vuse_optype
187 allocate_vuse_optype (unsigned num)
189 vuse_optype vuse_ops;
190 unsigned size;
191 size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
192 vuse_ops = ggc_alloc (size);
193 vuse_ops->num_vuses = num;
194 return vuse_ops;
198 /* Return a vector of contiguous memory for NUM v_must_def operands. */
200 static inline v_must_def_optype
201 allocate_v_must_def_optype (unsigned num)
203 v_must_def_optype v_must_def_ops;
204 unsigned size;
205 size = sizeof (struct v_must_def_optype_d) + sizeof (v_def_use_operand_type_t) * (num - 1);
206 v_must_def_ops = ggc_alloc (size);
207 v_must_def_ops->num_v_must_defs = num;
208 return v_must_def_ops;
212 /* Free memory for USES. */
214 static inline void
215 free_uses (use_optype *uses)
217 if (*uses)
219 ggc_free (*uses);
220 *uses = NULL;
225 /* Free memory for DEFS. */
227 static inline void
228 free_defs (def_optype *defs)
230 if (*defs)
232 ggc_free (*defs);
233 *defs = NULL;
238 /* Free memory for VUSES. */
240 static inline void
241 free_vuses (vuse_optype *vuses)
243 if (*vuses)
245 ggc_free (*vuses);
246 *vuses = NULL;
251 /* Free memory for V_MAY_DEFS. */
253 static inline void
254 free_v_may_defs (v_may_def_optype *v_may_defs)
256 if (*v_may_defs)
258 ggc_free (*v_may_defs);
259 *v_may_defs = NULL;
264 /* Free memory for V_MUST_DEFS. */
266 static inline void
267 free_v_must_defs (v_must_def_optype *v_must_defs)
269 if (*v_must_defs)
271 ggc_free (*v_must_defs);
272 *v_must_defs = NULL;
277 /* Initialize the operand cache routines. */
279 void
280 init_ssa_operands (void)
282 VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
283 VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
284 VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
285 VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
286 VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
290 /* Dispose of anything required by the operand routines. */
292 void
293 fini_ssa_operands (void)
295 ggc_free (build_defs);
296 ggc_free (build_uses);
297 ggc_free (build_v_may_defs);
298 ggc_free (build_vuses);
299 ggc_free (build_v_must_defs);
300 build_defs = NULL;
301 build_uses = NULL;
302 build_v_may_defs = NULL;
303 build_vuses = NULL;
304 build_v_must_defs = NULL;
308 /* All the finalize_ssa_* routines do the work required to turn the build_
309 VARRAY into an operand_vector of the appropriate type. The original vector,
310 if any, is passed in for comparison and virtual SSA_NAME reuse. If the
311 old vector is reused, the pointer passed in is set to NULL so that
312 the memory is not freed when the old operands are freed. */
314 /* Return a new def operand vector for STMT, comparing to OLD_OPS_P. */
316 static def_optype
317 finalize_ssa_defs (def_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
319 unsigned num, x;
320 def_optype def_ops, old_ops;
321 bool build_diff;
323 num = VARRAY_ACTIVE_SIZE (build_defs);
324 if (num == 0)
325 return NULL;
327 /* There should only be a single real definition per assignment. */
328 gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
330 old_ops = *old_ops_p;
332 /* Compare old vector and new array. */
333 build_diff = true;
334 if (old_ops && old_ops->num_defs == num)
336 build_diff = false;
337 for (x = 0; x < num; x++)
338 if (old_ops->defs[x].def != VARRAY_TREE_PTR (build_defs, x))
340 build_diff = true;
341 break;
345 if (!build_diff)
347 def_ops = old_ops;
348 *old_ops_p = NULL;
350 else
352 def_ops = allocate_def_optype (num);
353 for (x = 0; x < num ; x++)
354 def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
357 VARRAY_POP_ALL (build_defs);
359 return def_ops;
363 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
365 static use_optype
366 finalize_ssa_uses (use_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
368 unsigned num, x;
369 use_optype use_ops, old_ops;
370 bool build_diff;
372 num = VARRAY_ACTIVE_SIZE (build_uses);
373 if (num == 0)
374 return NULL;
376 #ifdef ENABLE_CHECKING
378 unsigned x;
379 /* If the pointer to the operand is the statement itself, something is
380 wrong. It means that we are pointing to a local variable (the
381 initial call to get_stmt_operands does not pass a pointer to a
382 statement). */
383 for (x = 0; x < num; x++)
384 gcc_assert (*(VARRAY_TREE_PTR (build_uses, x)) != stmt);
386 #endif
387 old_ops = *old_ops_p;
389 /* Check if the old vector and the new array are the same. */
390 build_diff = true;
391 if (old_ops && old_ops->num_uses == num)
393 build_diff = false;
394 for (x = 0; x < num; x++)
395 if (old_ops->uses[x].use != VARRAY_TREE_PTR (build_uses, x))
397 build_diff = true;
398 break;
402 if (!build_diff)
404 use_ops = old_ops;
405 *old_ops_p = NULL;
407 else
409 use_ops = allocate_use_optype (num);
410 for (x = 0; x < num ; x++)
411 use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
413 VARRAY_POP_ALL (build_uses);
415 return use_ops;
419 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
421 static v_may_def_optype
422 finalize_ssa_v_may_defs (v_may_def_optype *old_ops_p)
424 unsigned num, x, i, old_num;
425 v_may_def_optype v_may_def_ops, old_ops;
426 tree result, var;
427 bool build_diff;
429 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
430 if (num == 0)
431 return NULL;
433 old_ops = *old_ops_p;
435 /* Check if the old vector and the new array are the same. */
436 build_diff = true;
437 if (old_ops && old_ops->num_v_may_defs == num)
439 old_num = num;
440 build_diff = false;
441 for (x = 0; x < num; x++)
443 var = old_ops->v_may_defs[x].def;
444 if (TREE_CODE (var) == SSA_NAME)
445 var = SSA_NAME_VAR (var);
446 if (var != VARRAY_TREE (build_v_may_defs, x))
448 build_diff = true;
449 break;
453 else
454 old_num = (old_ops ? old_ops->num_v_may_defs : 0);
456 if (!build_diff)
458 v_may_def_ops = old_ops;
459 *old_ops_p = NULL;
461 else
463 v_may_def_ops = allocate_v_may_def_optype (num);
464 for (x = 0; x < num; x++)
466 var = VARRAY_TREE (build_v_may_defs, x);
467 /* Look for VAR in the old operands vector. */
468 for (i = 0; i < old_num; i++)
470 result = old_ops->v_may_defs[i].def;
471 if (TREE_CODE (result) == SSA_NAME)
472 result = SSA_NAME_VAR (result);
473 if (result == var)
475 v_may_def_ops->v_may_defs[x] = old_ops->v_may_defs[i];
476 break;
479 if (i == old_num)
481 v_may_def_ops->v_may_defs[x].def = var;
482 v_may_def_ops->v_may_defs[x].use = var;
487 /* Empty the V_MAY_DEF build vector after VUSES have been processed. */
489 return v_may_def_ops;
493 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
495 static vuse_optype
496 finalize_ssa_vuses (vuse_optype *old_ops_p)
498 unsigned num, x, i, num_v_may_defs, old_num;
499 vuse_optype vuse_ops, old_ops;
500 bool build_diff;
502 num = VARRAY_ACTIVE_SIZE (build_vuses);
503 if (num == 0)
505 VARRAY_POP_ALL (build_v_may_defs);
506 return NULL;
509 /* Remove superfluous VUSE operands. If the statement already has a
510 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
511 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
512 suppose that variable 'a' is aliased:
514 # VUSE <a_2>
515 # a_3 = V_MAY_DEF <a_2>
516 a = a + 1;
518 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
519 operation. */
521 num_v_may_defs = VARRAY_ACTIVE_SIZE (build_v_may_defs);
523 if (num_v_may_defs > 0)
525 size_t i, j;
526 tree vuse;
527 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
529 vuse = VARRAY_TREE (build_vuses, i);
530 for (j = 0; j < num_v_may_defs; j++)
532 if (vuse == VARRAY_TREE (build_v_may_defs, j))
533 break;
536 /* If we found a useless VUSE operand, remove it from the
537 operand array by replacing it with the last active element
538 in the operand array (unless the useless VUSE was the
539 last operand, in which case we simply remove it. */
540 if (j != num_v_may_defs)
542 if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
544 VARRAY_TREE (build_vuses, i)
545 = VARRAY_TREE (build_vuses,
546 VARRAY_ACTIVE_SIZE (build_vuses) - 1);
548 VARRAY_POP (build_vuses);
550 /* We want to rescan the element at this index, unless
551 this was the last element, in which case the loop
552 terminates. */
553 i--;
558 num = VARRAY_ACTIVE_SIZE (build_vuses);
559 /* We could have reduced the size to zero now, however. */
560 if (num == 0)
562 VARRAY_POP_ALL (build_v_may_defs);
563 return NULL;
566 old_ops = *old_ops_p;
568 /* Determine whether vuses is the same as the old vector. */
569 build_diff = true;
570 if (old_ops && old_ops->num_vuses == num)
572 old_num = num;
573 build_diff = false;
574 for (x = 0; x < num ; x++)
576 tree v;
577 v = old_ops->vuses[x];
578 if (TREE_CODE (v) == SSA_NAME)
579 v = SSA_NAME_VAR (v);
580 if (v != VARRAY_TREE (build_vuses, x))
582 build_diff = true;
583 break;
587 else
588 old_num = (old_ops ? old_ops->num_vuses : 0);
590 if (!build_diff)
592 vuse_ops = old_ops;
593 *old_ops_p = NULL;
595 else
597 vuse_ops = allocate_vuse_optype (num);
598 for (x = 0; x < num; x++)
600 tree result, var = VARRAY_TREE (build_vuses, x);
601 /* Look for VAR in the old vector, and use that SSA_NAME. */
602 for (i = 0; i < old_num; i++)
604 result = old_ops->vuses[i];
605 if (TREE_CODE (result) == SSA_NAME)
606 result = SSA_NAME_VAR (result);
607 if (result == var)
609 vuse_ops->vuses[x] = old_ops->vuses[i];
610 break;
613 if (i == old_num)
614 vuse_ops->vuses[x] = var;
618 /* The v_may_def build vector wasn't freed because we needed it here.
619 Free it now with the vuses build vector. */
620 VARRAY_POP_ALL (build_vuses);
621 VARRAY_POP_ALL (build_v_may_defs);
623 return vuse_ops;
626 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
628 static v_must_def_optype
629 finalize_ssa_v_must_defs (v_must_def_optype *old_ops_p,
630 tree stmt ATTRIBUTE_UNUSED)
632 unsigned num, x, i, old_num = 0;
633 v_must_def_optype v_must_def_ops, old_ops;
634 bool build_diff;
636 num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
637 if (num == 0)
638 return NULL;
640 /* There should only be a single V_MUST_DEF per assignment. */
641 gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
643 old_ops = *old_ops_p;
645 /* Check if the old vector and the new array are the same. */
646 build_diff = true;
647 if (old_ops && old_ops->num_v_must_defs == num)
649 old_num = num;
650 build_diff = false;
651 for (x = 0; x < num; x++)
653 tree var = old_ops->v_must_defs[x].def;
654 if (TREE_CODE (var) == SSA_NAME)
655 var = SSA_NAME_VAR (var);
656 if (var != VARRAY_TREE (build_v_must_defs, x))
658 build_diff = true;
659 break;
663 else
664 old_num = (old_ops ? old_ops->num_v_must_defs : 0);
666 if (!build_diff)
668 v_must_def_ops = old_ops;
669 *old_ops_p = NULL;
671 else
673 v_must_def_ops = allocate_v_must_def_optype (num);
674 for (x = 0; x < num ; x++)
676 tree result, var = VARRAY_TREE (build_v_must_defs, x);
677 /* Look for VAR in the original vector. */
678 for (i = 0; i < old_num; i++)
680 result = old_ops->v_must_defs[i].def;
681 if (TREE_CODE (result) == SSA_NAME)
682 result = SSA_NAME_VAR (result);
683 if (result == var)
685 v_must_def_ops->v_must_defs[x].def = old_ops->v_must_defs[i].def;
686 v_must_def_ops->v_must_defs[x].use = old_ops->v_must_defs[i].use;
687 break;
690 if (i == old_num)
692 v_must_def_ops->v_must_defs[x].def = var;
693 v_must_def_ops->v_must_defs[x].use = var;
697 VARRAY_POP_ALL (build_v_must_defs);
699 return v_must_def_ops;
703 /* Finalize all the build vectors, fill the new ones into INFO. */
705 static inline void
706 finalize_ssa_stmt_operands (tree stmt, stmt_operands_p old_ops,
707 stmt_operands_p new_ops)
709 new_ops->def_ops = finalize_ssa_defs (&(old_ops->def_ops), stmt);
710 new_ops->use_ops = finalize_ssa_uses (&(old_ops->use_ops), stmt);
711 new_ops->v_must_def_ops
712 = finalize_ssa_v_must_defs (&(old_ops->v_must_def_ops), stmt);
713 new_ops->v_may_def_ops = finalize_ssa_v_may_defs (&(old_ops->v_may_def_ops));
714 new_ops->vuse_ops = finalize_ssa_vuses (&(old_ops->vuse_ops));
718 /* Start the process of building up operands vectors in INFO. */
720 static inline void
721 start_ssa_stmt_operands (void)
723 gcc_assert (VARRAY_ACTIVE_SIZE (build_defs) == 0);
724 gcc_assert (VARRAY_ACTIVE_SIZE (build_uses) == 0);
725 gcc_assert (VARRAY_ACTIVE_SIZE (build_vuses) == 0);
726 gcc_assert (VARRAY_ACTIVE_SIZE (build_v_may_defs) == 0);
727 gcc_assert (VARRAY_ACTIVE_SIZE (build_v_must_defs) == 0);
731 /* Add DEF_P to the list of pointers to operands. */
733 static inline void
734 append_def (tree *def_p)
736 VARRAY_PUSH_TREE_PTR (build_defs, def_p);
740 /* Add USE_P to the list of pointers to operands. */
742 static inline void
743 append_use (tree *use_p)
745 VARRAY_PUSH_TREE_PTR (build_uses, use_p);
749 /* Add a new virtual may def for variable VAR to the build array. */
751 static inline void
752 append_v_may_def (tree var)
754 unsigned i;
756 /* Don't allow duplicate entries. */
757 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
758 if (var == VARRAY_TREE (build_v_may_defs, i))
759 return;
761 VARRAY_PUSH_TREE (build_v_may_defs, var);
765 /* Add VAR to the list of virtual uses. */
767 static inline void
768 append_vuse (tree var)
770 size_t i;
772 /* Don't allow duplicate entries. */
773 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
774 if (var == VARRAY_TREE (build_vuses, i))
775 return;
777 VARRAY_PUSH_TREE (build_vuses, var);
781 /* Add VAR to the list of virtual must definitions for INFO. */
783 static inline void
784 append_v_must_def (tree var)
786 unsigned i;
788 /* Don't allow duplicate entries. */
789 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
790 if (var == VARRAY_TREE (build_v_must_defs, i))
791 return;
793 VARRAY_PUSH_TREE (build_v_must_defs, var);
796 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
797 original operands, and if ANN is non-null, appropriate stmt flags are set
798 in the stmt's annotation. Note that some fields in old_ops may
799 change to NULL, although none of the memory they originally pointed to
800 will be destroyed. It is appropriate to call free_stmt_operands() on
801 the value returned in old_ops.
803 The rationale for this: Certain optimizations wish to examine the difference
804 between new_ops and old_ops after processing. If a set of operands don't
805 change, new_ops will simply assume the pointer in old_ops, and the old_ops
806 pointer will be set to NULL, indicating no memory needs to be cleared.
807 Usage might appear something like:
809 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
810 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
811 <* compare old_ops_copy and new_ops *>
812 free_ssa_operands (old_ops); */
814 void
815 build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
816 stmt_operands_p new_ops)
818 enum tree_code code;
819 tree_ann_t saved_ann = stmt->common.ann;
821 /* Replace stmt's annotation with the one passed in for the duration
822 of the operand building process. This allows "fake" stmts to be built
823 and not be included in other data structures which can be built here. */
824 stmt->common.ann = (tree_ann_t) ann;
826 /* Initially assume that the statement has no volatile operands, nor
827 makes aliased loads or stores. */
828 if (ann)
830 ann->has_volatile_ops = false;
831 ann->makes_aliased_stores = false;
832 ann->makes_aliased_loads = false;
835 start_ssa_stmt_operands ();
837 code = TREE_CODE (stmt);
838 switch (code)
840 case MODIFY_EXPR:
841 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
842 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
843 || TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_RANGE_REF
844 || TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
845 || TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
846 || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
847 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
848 modified in that case. FIXME we should represent somehow
849 that it is killed on the fallthrough path. */
850 || tree_could_throw_p (TREE_OPERAND (stmt, 1)))
851 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def);
852 else
853 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
854 opf_is_def | opf_kill_def);
855 break;
857 case COND_EXPR:
858 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
859 break;
861 case SWITCH_EXPR:
862 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
863 break;
865 case ASM_EXPR:
866 get_asm_expr_operands (stmt);
867 break;
869 case RETURN_EXPR:
870 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
871 break;
873 case GOTO_EXPR:
874 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
875 break;
877 case LABEL_EXPR:
878 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
879 break;
881 /* These nodes contain no variable references. */
882 case BIND_EXPR:
883 case CASE_LABEL_EXPR:
884 case TRY_CATCH_EXPR:
885 case TRY_FINALLY_EXPR:
886 case EH_FILTER_EXPR:
887 case CATCH_EXPR:
888 case RESX_EXPR:
889 break;
891 default:
892 /* Notice that if get_expr_operands tries to use &STMT as the operand
893 pointer (which may only happen for USE operands), we will abort in
894 append_use. This default will handle statements like empty
895 statements, or CALL_EXPRs that may appear on the RHS of a statement
896 or as statements themselves. */
897 get_expr_operands (stmt, &stmt, opf_none);
898 break;
901 finalize_ssa_stmt_operands (stmt, old_ops, new_ops);
902 stmt->common.ann = saved_ann;
906 /* Free any operands vectors in OPS. */
908 static void
909 free_ssa_operands (stmt_operands_p ops)
911 if (ops->def_ops)
912 free_defs (&(ops->def_ops));
913 if (ops->use_ops)
914 free_uses (&(ops->use_ops));
915 if (ops->vuse_ops)
916 free_vuses (&(ops->vuse_ops));
917 if (ops->v_may_def_ops)
918 free_v_may_defs (&(ops->v_may_def_ops));
919 if (ops->v_must_def_ops)
920 free_v_must_defs (&(ops->v_must_def_ops));
924 /* Get the operands of statement STMT. Note that repeated calls to
925 get_stmt_operands for the same statement will do nothing until the
926 statement is marked modified by a call to modify_stmt(). */
928 void
929 get_stmt_operands (tree stmt)
931 stmt_ann_t ann;
932 stmt_operands_t old_operands;
934 /* The optimizers cannot handle statements that are nothing but a
935 _DECL. This indicates a bug in the gimplifier. */
936 gcc_assert (!SSA_VAR_P (stmt));
938 /* Ignore error statements. */
939 if (TREE_CODE (stmt) == ERROR_MARK)
940 return;
942 ann = get_stmt_ann (stmt);
944 /* If the statement has not been modified, the operands are still valid. */
945 if (!ann->modified)
946 return;
948 timevar_push (TV_TREE_OPS);
950 old_operands = ann->operands;
951 memset (&(ann->operands), 0, sizeof (stmt_operands_t));
953 build_ssa_operands (stmt, ann, &old_operands, &(ann->operands));
954 free_ssa_operands (&old_operands);
956 /* Clear the modified bit for STMT. Subsequent calls to
957 get_stmt_operands for this statement will do nothing until the
958 statement is marked modified by a call to modify_stmt(). */
959 ann->modified = 0;
961 timevar_pop (TV_TREE_OPS);
965 /* Recursively scan the expression pointed by EXPR_P in statement referred to
966 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
967 operands found. */
969 static void
970 get_expr_operands (tree stmt, tree *expr_p, int flags)
972 enum tree_code code;
973 enum tree_code_class class;
974 tree expr = *expr_p;
976 if (expr == NULL || expr == error_mark_node)
977 return;
979 code = TREE_CODE (expr);
980 class = TREE_CODE_CLASS (code);
982 switch (code)
984 case ADDR_EXPR:
985 /* We could have the address of a component, array member,
986 etc which has interesting variable references. */
987 /* Taking the address of a variable does not represent a
988 reference to it, but the fact that the stmt takes its address will be
989 of interest to some passes (e.g. alias resolution). */
990 add_stmt_operand (expr_p, stmt, 0);
992 /* If the address is invariant, there may be no interesting variable
993 references inside. */
994 if (is_gimple_min_invariant (expr))
995 return;
997 /* There should be no VUSEs created, since the referenced objects are
998 not really accessed. The only operands that we should find here
999 are ARRAY_REF indices which will always be real operands (GIMPLE
1000 does not allow non-registers as array indices). */
1001 flags |= opf_no_vops;
1003 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1004 return;
1006 case SSA_NAME:
1007 case VAR_DECL:
1008 case PARM_DECL:
1009 case RESULT_DECL:
1010 case CONST_DECL:
1011 /* If we found a variable, add it to DEFS or USES depending
1012 on the operand flags. */
1013 add_stmt_operand (expr_p, stmt, flags);
1014 return;
1016 case MISALIGNED_INDIRECT_REF:
1017 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1018 /* fall through */
1020 case ALIGN_INDIRECT_REF:
1021 case INDIRECT_REF:
1022 get_indirect_ref_operands (stmt, expr, flags);
1023 return;
1025 case ARRAY_REF:
1026 case ARRAY_RANGE_REF:
1027 /* Treat array references as references to the virtual variable
1028 representing the array. The virtual variable for an ARRAY_REF
1029 is the VAR_DECL for the array. */
1031 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1032 according to the value of IS_DEF. Recurse if the LHS of the
1033 ARRAY_REF node is not a regular variable. */
1034 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1035 add_stmt_operand (expr_p, stmt, flags);
1036 else
1037 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1039 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1040 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1041 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1042 return;
1044 case COMPONENT_REF:
1045 case REALPART_EXPR:
1046 case IMAGPART_EXPR:
1047 /* Similarly to arrays, references to compound variables (complex
1048 types and structures/unions) are globbed.
1050 FIXME: This means that
1052 a.x = 6;
1053 a.y = 7;
1054 foo (a.x, a.y);
1056 will not be constant propagated because the two partial
1057 definitions to 'a' will kill each other. Note that SRA may be
1058 able to fix this problem if 'a' can be scalarized. */
1060 /* If the LHS of the compound reference is not a regular variable,
1061 recurse to keep looking for more operands in the subexpression. */
1062 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1063 add_stmt_operand (expr_p, stmt, flags);
1064 else
1065 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1067 if (code == COMPONENT_REF)
1068 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1069 return;
1071 case WITH_SIZE_EXPR:
1072 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1073 and an rvalue reference to its second argument. */
1074 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1075 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1076 return;
1078 case CALL_EXPR:
1079 get_call_expr_operands (stmt, expr);
1080 return;
1082 case COND_EXPR:
1083 case VEC_COND_EXPR:
1084 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1085 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1086 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1087 return;
1089 case MODIFY_EXPR:
1091 int subflags;
1092 tree op;
1094 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1096 op = TREE_OPERAND (expr, 0);
1097 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1098 op = TREE_OPERAND (expr, 0);
1099 if (TREE_CODE (op) == ARRAY_REF
1100 || TREE_CODE (op) == ARRAY_RANGE_REF
1101 || TREE_CODE (op) == COMPONENT_REF
1102 || TREE_CODE (op) == REALPART_EXPR
1103 || TREE_CODE (op) == IMAGPART_EXPR)
1104 subflags = opf_is_def;
1105 else
1106 subflags = opf_is_def | opf_kill_def;
1108 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1109 return;
1112 case CONSTRUCTOR:
1114 /* General aggregate CONSTRUCTORs have been decomposed, but they
1115 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1117 tree t;
1118 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1119 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1121 return;
1124 case TRUTH_NOT_EXPR:
1125 case BIT_FIELD_REF:
1126 case VIEW_CONVERT_EXPR:
1127 do_unary:
1128 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1129 return;
1131 case TRUTH_AND_EXPR:
1132 case TRUTH_OR_EXPR:
1133 case TRUTH_XOR_EXPR:
1134 case COMPOUND_EXPR:
1135 case OBJ_TYPE_REF:
1136 do_binary:
1138 tree op0 = TREE_OPERAND (expr, 0);
1139 tree op1 = TREE_OPERAND (expr, 1);
1141 /* If it would be profitable to swap the operands, then do so to
1142 canonicalize the statement, enabling better optimization.
1144 By placing canonicalization of such expressions here we
1145 transparently keep statements in canonical form, even
1146 when the statement is modified. */
1147 if (tree_swap_operands_p (op0, op1, false))
1149 /* For relationals we need to swap the operands
1150 and change the code. */
1151 if (code == LT_EXPR
1152 || code == GT_EXPR
1153 || code == LE_EXPR
1154 || code == GE_EXPR)
1156 TREE_SET_CODE (expr, swap_tree_comparison (code));
1157 TREE_OPERAND (expr, 0) = op1;
1158 TREE_OPERAND (expr, 1) = op0;
1161 /* For a commutative operator we can just swap the operands. */
1162 else if (commutative_tree_code (code))
1164 TREE_OPERAND (expr, 0) = op1;
1165 TREE_OPERAND (expr, 1) = op0;
1169 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1170 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1171 return;
1174 case REALIGN_LOAD_EXPR:
1176 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1177 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1178 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1179 return;
1182 case BLOCK:
1183 case FUNCTION_DECL:
1184 case EXC_PTR_EXPR:
1185 case FILTER_EXPR:
1186 case LABEL_DECL:
1187 /* Expressions that make no memory references. */
1188 return;
1190 default:
1191 if (class == tcc_unary)
1192 goto do_unary;
1193 if (class == tcc_binary || class == tcc_comparison)
1194 goto do_binary;
1195 if (class == tcc_constant || class == tcc_type)
1196 return;
1199 /* If we get here, something has gone wrong. */
1200 #ifdef ENABLE_CHECKING
1201 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1202 debug_tree (expr);
1203 fputs ("\n", stderr);
1204 internal_error ("internal error");
1205 #endif
1206 gcc_unreachable ();
1210 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1212 static void
1213 get_asm_expr_operands (tree stmt)
1215 stmt_ann_t s_ann = stmt_ann (stmt);
1216 int noutputs = list_length (ASM_OUTPUTS (stmt));
1217 const char **oconstraints
1218 = (const char **) alloca ((noutputs) * sizeof (const char *));
1219 int i;
1220 tree link;
1221 const char *constraint;
1222 bool allows_mem, allows_reg, is_inout;
1224 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1226 oconstraints[i] = constraint
1227 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1228 parse_output_constraint (&constraint, i, 0, 0,
1229 &allows_mem, &allows_reg, &is_inout);
1231 /* This should have been split in gimplify_asm_expr. */
1232 gcc_assert (!allows_reg || !is_inout);
1234 /* Memory operands are addressable. Note that STMT needs the
1235 address of this operand. */
1236 if (!allows_reg && allows_mem)
1238 tree t = get_base_address (TREE_VALUE (link));
1239 if (t && DECL_P (t))
1240 note_addressable (t, s_ann);
1243 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1246 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1248 constraint
1249 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1250 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1251 oconstraints, &allows_mem, &allows_reg);
1253 /* Memory operands are addressable. Note that STMT needs the
1254 address of this operand. */
1255 if (!allows_reg && allows_mem)
1257 tree t = get_base_address (TREE_VALUE (link));
1258 if (t && DECL_P (t))
1259 note_addressable (t, s_ann);
1262 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1266 /* Clobber memory for asm ("" : : : "memory"); */
1267 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1268 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1270 size_t i;
1271 bitmap_iterator bi;
1273 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1274 decided to group them). */
1275 if (global_var)
1276 add_stmt_operand (&global_var, stmt, opf_is_def);
1277 else
1278 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1280 tree var = referenced_var (i);
1281 add_stmt_operand (&var, stmt, opf_is_def);
1284 /* Now clobber all addressables. */
1285 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1287 tree var = referenced_var (i);
1288 add_stmt_operand (&var, stmt, opf_is_def);
1291 break;
1295 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1296 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1298 static void
1299 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1301 tree *pptr = &TREE_OPERAND (expr, 0);
1302 tree ptr = *pptr;
1303 stmt_ann_t ann = stmt_ann (stmt);
1305 /* Stores into INDIRECT_REF operands are never killing definitions. */
1306 flags &= ~opf_kill_def;
1308 if (REF_ORIGINAL (expr))
1310 enum tree_code ocode = TREE_CODE (REF_ORIGINAL (expr));
1312 /* If we originally accessed part of a structure, we do it still. */
1313 if (ocode == ARRAY_REF
1314 || ocode == COMPONENT_REF
1315 || ocode == REALPART_EXPR
1316 || ocode == IMAGPART_EXPR)
1317 flags &= ~opf_kill_def;
1320 if (SSA_VAR_P (ptr))
1322 struct ptr_info_def *pi = NULL;
1324 /* If PTR has flow-sensitive points-to information, use it. */
1325 if (TREE_CODE (ptr) == SSA_NAME
1326 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1327 && pi->name_mem_tag)
1329 /* PTR has its own memory tag. Use it. */
1330 add_stmt_operand (&pi->name_mem_tag, stmt, flags);
1332 else
1334 /* If PTR is not an SSA_NAME or it doesn't have a name
1335 tag, use its type memory tag. */
1336 var_ann_t ann;
1338 /* If we are emitting debugging dumps, display a warning if
1339 PTR is an SSA_NAME with no flow-sensitive alias
1340 information. That means that we may need to compute
1341 aliasing again. */
1342 if (dump_file
1343 && TREE_CODE (ptr) == SSA_NAME
1344 && pi == NULL)
1346 fprintf (dump_file,
1347 "NOTE: no flow-sensitive alias info for ");
1348 print_generic_expr (dump_file, ptr, dump_flags);
1349 fprintf (dump_file, " in ");
1350 print_generic_stmt (dump_file, stmt, dump_flags);
1353 if (TREE_CODE (ptr) == SSA_NAME)
1354 ptr = SSA_NAME_VAR (ptr);
1355 ann = var_ann (ptr);
1356 if (ann->type_mem_tag)
1357 add_stmt_operand (&ann->type_mem_tag, stmt, flags);
1361 /* If a constant is used as a pointer, we can't generate a real
1362 operand for it but we mark the statement volatile to prevent
1363 optimizations from messing things up. */
1364 else if (TREE_CODE (ptr) == INTEGER_CST)
1366 if (ann)
1367 ann->has_volatile_ops = true;
1368 return;
1371 /* Everything else *should* have been folded elsewhere, but users
1372 are smarter than we in finding ways to write invalid code. We
1373 cannot just abort here. If we were absolutely certain that we
1374 do handle all valid cases, then we could just do nothing here.
1375 That seems optimistic, so attempt to do something logical... */
1376 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1377 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1378 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1380 /* Make sure we know the object is addressable. */
1381 pptr = &TREE_OPERAND (ptr, 0);
1382 add_stmt_operand (pptr, stmt, 0);
1384 /* Mark the object itself with a VUSE. */
1385 pptr = &TREE_OPERAND (*pptr, 0);
1386 get_expr_operands (stmt, pptr, flags);
1387 return;
1390 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1391 else
1392 gcc_unreachable ();
1394 /* Add a USE operand for the base pointer. */
1395 get_expr_operands (stmt, pptr, opf_none);
1398 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1400 static void
1401 get_call_expr_operands (tree stmt, tree expr)
1403 tree op;
1404 int call_flags = call_expr_flags (expr);
1406 /* Find uses in the called function. */
1407 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1409 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1410 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1412 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1414 if (!bitmap_empty_p (call_clobbered_vars))
1416 /* A 'pure' or a 'const' functions never call clobber anything.
1417 A 'noreturn' function might, but since we don't return anyway
1418 there is no point in recording that. */
1419 if (TREE_SIDE_EFFECTS (expr)
1420 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1421 add_call_clobber_ops (stmt);
1422 else if (!(call_flags & ECF_CONST))
1423 add_call_read_ops (stmt);
1428 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1429 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1430 the statement's real operands, otherwise it is added to virtual
1431 operands. */
1433 static void
1434 add_stmt_operand (tree *var_p, tree stmt, int flags)
1436 bool is_real_op;
1437 tree var, sym;
1438 stmt_ann_t s_ann = stmt_ann (stmt);
1439 var_ann_t v_ann;
1441 var = *var_p;
1442 STRIP_NOPS (var);
1444 /* If the operand is an ADDR_EXPR, add its operand to the list of
1445 variables that have had their address taken in this statement. */
1446 if (TREE_CODE (var) == ADDR_EXPR)
1448 note_addressable (TREE_OPERAND (var, 0), s_ann);
1449 return;
1452 /* If the original variable is not a scalar, it will be added to the list
1453 of virtual operands. In that case, use its base symbol as the virtual
1454 variable representing it. */
1455 is_real_op = is_gimple_reg (var);
1456 if (!is_real_op && !DECL_P (var))
1457 var = get_virtual_var (var);
1459 /* If VAR is not a variable that we care to optimize, do nothing. */
1460 if (var == NULL_TREE || !SSA_VAR_P (var))
1461 return;
1463 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1464 v_ann = var_ann (sym);
1466 /* Don't expose volatile variables to the optimizers. */
1467 if (TREE_THIS_VOLATILE (sym))
1469 if (s_ann)
1470 s_ann->has_volatile_ops = true;
1471 return;
1474 if (is_real_op)
1476 /* The variable is a GIMPLE register. Add it to real operands. */
1477 if (flags & opf_is_def)
1478 append_def (var_p);
1479 else
1480 append_use (var_p);
1482 else
1484 varray_type aliases;
1486 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1487 virtual operands, unless the caller has specifically requested
1488 not to add virtual operands (used when adding operands inside an
1489 ADDR_EXPR expression). */
1490 if (flags & opf_no_vops)
1491 return;
1493 aliases = v_ann->may_aliases;
1495 if (aliases == NULL)
1497 /* The variable is not aliased or it is an alias tag. */
1498 if (flags & opf_is_def)
1500 if (flags & opf_kill_def)
1502 /* Only regular variables may get a V_MUST_DEF
1503 operand. */
1504 gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG);
1505 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1506 variable definitions. */
1507 append_v_must_def (var);
1509 else
1511 /* Add a V_MAY_DEF for call-clobbered variables and
1512 memory tags. */
1513 append_v_may_def (var);
1516 else
1518 append_vuse (var);
1519 if (s_ann && v_ann->is_alias_tag)
1520 s_ann->makes_aliased_loads = 1;
1523 else
1525 size_t i;
1527 /* The variable is aliased. Add its aliases to the virtual
1528 operands. */
1529 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1531 if (flags & opf_is_def)
1533 /* If the variable is also an alias tag, add a virtual
1534 operand for it, otherwise we will miss representing
1535 references to the members of the variable's alias set.
1536 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1537 if (v_ann->is_alias_tag)
1538 append_v_may_def (var);
1540 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1541 append_v_may_def (VARRAY_TREE (aliases, i));
1543 if (s_ann)
1544 s_ann->makes_aliased_stores = 1;
1546 else
1548 /* Similarly, append a virtual uses for VAR itself, when
1549 it is an alias tag. */
1550 if (v_ann->is_alias_tag)
1551 append_vuse (var);
1553 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1554 append_vuse (VARRAY_TREE (aliases, i));
1556 if (s_ann)
1557 s_ann->makes_aliased_loads = 1;
1564 /* Record that VAR had its address taken in the statement with annotations
1565 S_ANN. */
1567 static void
1568 note_addressable (tree var, stmt_ann_t s_ann)
1570 if (!s_ann)
1571 return;
1573 var = get_base_address (var);
1574 if (var && SSA_VAR_P (var))
1576 if (s_ann->addresses_taken == NULL)
1577 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1578 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1583 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1584 clobbered variables in the function. */
1586 static void
1587 add_call_clobber_ops (tree stmt)
1589 /* Functions that are not const, pure or never return may clobber
1590 call-clobbered variables. */
1591 if (stmt_ann (stmt))
1592 stmt_ann (stmt)->makes_clobbering_call = true;
1594 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1595 a V_MAY_DEF operand for every call clobbered variable. See
1596 compute_may_aliases for the heuristic used to decide whether
1597 to create .GLOBAL_VAR or not. */
1598 if (global_var)
1599 add_stmt_operand (&global_var, stmt, opf_is_def);
1600 else
1602 size_t i;
1603 bitmap_iterator bi;
1605 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1607 tree var = referenced_var (i);
1608 if (TREE_READONLY (var)
1609 && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
1610 add_stmt_operand (&var, stmt, opf_none);
1611 else
1612 add_stmt_operand (&var, stmt, opf_is_def);
1618 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1619 function. */
1621 static void
1622 add_call_read_ops (tree stmt)
1624 bitmap_iterator bi;
1626 /* Otherwise, if the function is not pure, it may reference memory. Add
1627 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1628 for each call-clobbered variable. See add_referenced_var for the
1629 heuristic used to decide whether to create .GLOBAL_VAR. */
1630 if (global_var)
1631 add_stmt_operand (&global_var, stmt, opf_none);
1632 else
1634 size_t i;
1636 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1638 tree var = referenced_var (i);
1639 add_stmt_operand (&var, stmt, opf_none);
1644 /* Copies virtual operands from SRC to DST. */
1646 void
1647 copy_virtual_operands (tree dst, tree src)
1649 unsigned i;
1650 vuse_optype vuses = STMT_VUSE_OPS (src);
1651 v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
1652 v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
1653 vuse_optype *vuses_new = &stmt_ann (dst)->operands.vuse_ops;
1654 v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->operands.v_may_def_ops;
1655 v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->operands.v_must_def_ops;
1657 if (vuses)
1659 *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
1660 for (i = 0; i < NUM_VUSES (vuses); i++)
1661 SET_VUSE_OP (*vuses_new, i, VUSE_OP (vuses, i));
1664 if (v_may_defs)
1666 *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
1667 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
1669 SET_V_MAY_DEF_OP (*v_may_defs_new, i, V_MAY_DEF_OP (v_may_defs, i));
1670 SET_V_MAY_DEF_RESULT (*v_may_defs_new, i,
1671 V_MAY_DEF_RESULT (v_may_defs, i));
1675 if (v_must_defs)
1677 *v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
1678 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
1680 SET_V_MUST_DEF_RESULT (*v_must_defs_new, i, V_MUST_DEF_RESULT (v_must_defs, i));
1681 SET_V_MUST_DEF_KILL (*v_must_defs_new, i, V_MUST_DEF_KILL (v_must_defs, i));
1687 /* Specifically for use in DOM's expression analysis. Given a store, we
1688 create an artificial stmt which looks like a load from the store, this can
1689 be used to eliminate redundant loads. OLD_OPS are the operands from the
1690 store stmt, and NEW_STMT is the new load which represents a load of the
1691 values stored. */
1693 void
1694 create_ssa_artficial_load_stmt (stmt_operands_p old_ops, tree new_stmt)
1696 stmt_ann_t ann;
1697 tree op;
1698 stmt_operands_t tmp;
1699 unsigned j;
1701 memset (&tmp, 0, sizeof (stmt_operands_t));
1702 ann = get_stmt_ann (new_stmt);
1704 /* Free operands just in case is was an existing stmt. */
1705 free_ssa_operands (&(ann->operands));
1707 build_ssa_operands (new_stmt, NULL, &tmp, &(ann->operands));
1708 free_vuses (&(ann->operands.vuse_ops));
1709 free_v_may_defs (&(ann->operands.v_may_def_ops));
1710 free_v_must_defs (&(ann->operands.v_must_def_ops));
1712 /* For each VDEF on the original statement, we want to create a
1713 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1714 statement. */
1715 for (j = 0; j < NUM_V_MAY_DEFS (old_ops->v_may_def_ops); j++)
1717 op = V_MAY_DEF_RESULT (old_ops->v_may_def_ops, j);
1718 append_vuse (op);
1721 for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
1723 op = V_MUST_DEF_RESULT (old_ops->v_must_def_ops, j);
1724 append_vuse (op);
1727 /* Now set the vuses for this new stmt. */
1728 ann->operands.vuse_ops = finalize_ssa_vuses (&(tmp.vuse_ops));
1731 #include "gt-tree-ssa-operands.h"