* cse.c, tree-ssa-operands.c: Fix comment typos.
[official-gcc.git] / gcc / tree-ssa-operands.c
blob94a83d0184ba52ae5b532ea58271509b577fb24a
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "errors.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
33 #include "ggc.h"
34 #include "timevar.h"
36 #include "langhooks.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 get_stmt_operands() in the primary entry point.
55 The operand tree is the parsed by the various get_* routines which look
56 through the stmt tree for the occurrence of operands which may be of
57 interest, and calls are made to the append_* routines whenever one is
58 found. There are 5 of these routines, each representing one of the
59 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 Virtual Must Defs.
62 The append_* routines check for duplication, and simply keep a list of
63 unique objects for each operand type in the build_* extendable vectors.
65 Once the stmt tree is completely parsed, the finalize_ssa_operands()
66 routine is called, which proceeds to perform the finalization routine
67 on each of the 5 operand vectors which have been built up.
69 If the stmt had a previous operand cache, the finalization routines
70 attempt to match up the new operands with the old ones. If its a perfect
71 match, the old vector is simply reused. If it isn't a perfect match, then
72 a new vector is created and the new operands are placed there. For
73 virtual operands, if the previous cache had SSA_NAME version of a
74 variable, and that same variable occurs in the same operands cache, then
75 the new cache vector will also get the same SSA_NAME.
77 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
78 vector for VUSE, then the new vector will also be modified such that
79 it contains 'a_5' rather than 'a'.
84 /* Flags to describe operand properties in get_stmt_operands and helpers. */
86 /* By default, operands are loaded. */
87 #define opf_none 0
89 /* Operand is the target of an assignment expression or a
90 call-clobbered variable */
91 #define opf_is_def (1 << 0)
93 /* Operand is the target of an assignment expression. */
94 #define opf_kill_def (1 << 1)
96 /* No virtual operands should be created in the expression. This is used
97 when traversing ADDR_EXPR nodes which have different semantics than
98 other expressions. Inside an ADDR_EXPR node, the only operands that we
99 need to consider are indices into arrays. For instance, &a.b[i] should
100 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
101 VUSE for 'b'. */
102 #define opf_no_vops (1 << 2)
104 /* Array for building all the def operands. */
105 static GTY (()) varray_type build_defs;
107 /* Array for building all the use operands. */
108 static GTY (()) varray_type build_uses;
110 /* Array for building all the v_may_def operands. */
111 static GTY (()) varray_type build_v_may_defs;
113 /* Array for building all the vuse operands. */
114 static GTY (()) varray_type build_vuses;
116 /* Array for building all the v_must_def operands. */
117 static GTY (()) varray_type build_v_must_defs;
119 /* True if the operands for call clobbered vars are cached and valid. */
120 bool ssa_call_clobbered_cache_valid;
121 bool ssa_ro_call_cache_valid;
123 /* These arrays are the cached operand vectors for call clobbered calls. */
124 static GTY (()) varray_type clobbered_v_may_defs;
125 static GTY (()) varray_type clobbered_vuses;
126 static GTY (()) varray_type ro_call_vuses;
127 static bool clobbered_aliased_loads;
128 static bool clobbered_aliased_stores;
129 static bool ro_call_aliased_loads;
131 #ifdef ENABLE_CHECKING
132 /* Used to make sure operand construction is working on the proper stmt. */
133 tree check_build_stmt;
134 #endif
136 def_operand_p NULL_DEF_OPERAND_P = { NULL };
137 use_operand_p NULL_USE_OPERAND_P = { NULL };
139 static void note_addressable (tree, stmt_ann_t);
140 static void get_expr_operands (tree, tree *, int);
141 static void get_asm_expr_operands (tree);
142 static void get_indirect_ref_operands (tree, tree, int);
143 static void get_call_expr_operands (tree, tree);
144 static inline void append_def (tree *);
145 static inline void append_use (tree *);
146 static void append_v_may_def (tree);
147 static void append_v_must_def (tree);
148 static void add_call_clobber_ops (tree);
149 static void add_call_read_ops (tree);
150 static void add_stmt_operand (tree *, stmt_ann_t, int);
152 /* Return a vector of contiguous memory for NUM def operands. */
154 static inline def_optype
155 allocate_def_optype (unsigned num)
157 def_optype def_ops;
158 unsigned size;
159 size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
160 def_ops = ggc_alloc (size);
161 def_ops->num_defs = num;
162 return def_ops;
166 /* Return a vector of contiguous memory for NUM use operands. */
168 static inline use_optype
169 allocate_use_optype (unsigned num)
171 use_optype use_ops;
172 unsigned size;
173 size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
174 use_ops = ggc_alloc (size);
175 use_ops->num_uses = num;
176 return use_ops;
180 /* Return a vector of contiguous memory for NUM v_may_def operands. */
182 static inline v_may_def_optype
183 allocate_v_may_def_optype (unsigned num)
185 v_may_def_optype v_may_def_ops;
186 unsigned size;
187 size = sizeof (struct v_may_def_optype_d)
188 + sizeof (v_def_use_operand_type_t) * (num - 1);
189 v_may_def_ops = ggc_alloc (size);
190 v_may_def_ops->num_v_may_defs = num;
191 return v_may_def_ops;
195 /* Return a vector of contiguous memory for NUM v_use operands. */
197 static inline vuse_optype
198 allocate_vuse_optype (unsigned num)
200 vuse_optype vuse_ops;
201 unsigned size;
202 size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
203 vuse_ops = ggc_alloc (size);
204 vuse_ops->num_vuses = num;
205 return vuse_ops;
209 /* Return a vector of contiguous memory for NUM v_must_def operands. */
211 static inline v_must_def_optype
212 allocate_v_must_def_optype (unsigned num)
214 v_must_def_optype v_must_def_ops;
215 unsigned size;
216 size = sizeof (struct v_must_def_optype_d) + sizeof (v_def_use_operand_type_t) * (num - 1);
217 v_must_def_ops = ggc_alloc (size);
218 v_must_def_ops->num_v_must_defs = num;
219 return v_must_def_ops;
223 /* Free memory for USES. */
225 static inline void
226 free_uses (use_optype *uses)
228 if (*uses)
230 ggc_free (*uses);
231 *uses = NULL;
236 /* Free memory for DEFS. */
238 static inline void
239 free_defs (def_optype *defs)
241 if (*defs)
243 ggc_free (*defs);
244 *defs = NULL;
249 /* Free memory for VUSES. */
251 static inline void
252 free_vuses (vuse_optype *vuses)
254 if (*vuses)
256 ggc_free (*vuses);
257 *vuses = NULL;
262 /* Free memory for V_MAY_DEFS. */
264 static inline void
265 free_v_may_defs (v_may_def_optype *v_may_defs)
267 if (*v_may_defs)
269 ggc_free (*v_may_defs);
270 *v_may_defs = NULL;
275 /* Free memory for V_MUST_DEFS. */
277 static inline void
278 free_v_must_defs (v_must_def_optype *v_must_defs)
280 if (*v_must_defs)
282 ggc_free (*v_must_defs);
283 *v_must_defs = NULL;
288 /* Initialize the operand cache routines. */
290 void
291 init_ssa_operands (void)
293 VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
294 VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
295 VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
296 VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
297 VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
301 /* Dispose of anything required by the operand routines. */
303 void
304 fini_ssa_operands (void)
306 ggc_free (build_defs);
307 ggc_free (build_uses);
308 ggc_free (build_v_may_defs);
309 ggc_free (build_vuses);
310 ggc_free (build_v_must_defs);
311 build_defs = NULL;
312 build_uses = NULL;
313 build_v_may_defs = NULL;
314 build_vuses = NULL;
315 build_v_must_defs = NULL;
316 if (clobbered_v_may_defs)
318 ggc_free (clobbered_v_may_defs);
319 ggc_free (clobbered_vuses);
320 clobbered_v_may_defs = NULL;
321 clobbered_vuses = NULL;
323 if (ro_call_vuses)
325 ggc_free (ro_call_vuses);
326 ro_call_vuses = NULL;
331 /* All the finalize_ssa_* routines do the work required to turn the build_
332 VARRAY into an operand_vector of the appropriate type. The original vector,
333 if any, is passed in for comparison and virtual SSA_NAME reuse. If the
334 old vector is reused, the pointer passed in is set to NULL so that
335 the memory is not freed when the old operands are freed. */
337 /* Return a new def operand vector for STMT, comparing to OLD_OPS_P. */
339 static def_optype
340 finalize_ssa_defs (def_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
342 unsigned num, x;
343 def_optype def_ops, old_ops;
344 bool build_diff;
346 num = VARRAY_ACTIVE_SIZE (build_defs);
347 if (num == 0)
348 return NULL;
350 /* There should only be a single real definition per assignment. */
351 gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
353 old_ops = *old_ops_p;
355 /* Compare old vector and new array. */
356 build_diff = true;
357 if (old_ops && old_ops->num_defs == num)
359 build_diff = false;
360 for (x = 0; x < num; x++)
361 if (old_ops->defs[x].def != VARRAY_TREE_PTR (build_defs, x))
363 build_diff = true;
364 break;
368 if (!build_diff)
370 def_ops = old_ops;
371 *old_ops_p = NULL;
373 else
375 def_ops = allocate_def_optype (num);
376 for (x = 0; x < num ; x++)
377 def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
380 VARRAY_POP_ALL (build_defs);
382 return def_ops;
386 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
388 static use_optype
389 finalize_ssa_uses (use_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
391 unsigned num, x;
392 use_optype use_ops, old_ops;
393 bool build_diff;
395 num = VARRAY_ACTIVE_SIZE (build_uses);
396 if (num == 0)
397 return NULL;
399 #ifdef ENABLE_CHECKING
401 unsigned x;
402 /* If the pointer to the operand is the statement itself, something is
403 wrong. It means that we are pointing to a local variable (the
404 initial call to get_stmt_operands does not pass a pointer to a
405 statement). */
406 for (x = 0; x < num; x++)
407 gcc_assert (*(VARRAY_TREE_PTR (build_uses, x)) != stmt);
409 #endif
410 old_ops = *old_ops_p;
412 /* Check if the old vector and the new array are the same. */
413 build_diff = true;
414 if (old_ops && old_ops->num_uses == num)
416 build_diff = false;
417 for (x = 0; x < num; x++)
418 if (old_ops->uses[x].use != VARRAY_TREE_PTR (build_uses, x))
420 build_diff = true;
421 break;
425 if (!build_diff)
427 use_ops = old_ops;
428 *old_ops_p = NULL;
430 else
432 use_ops = allocate_use_optype (num);
433 for (x = 0; x < num ; x++)
434 use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
436 VARRAY_POP_ALL (build_uses);
438 return use_ops;
442 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
444 static v_may_def_optype
445 finalize_ssa_v_may_defs (v_may_def_optype *old_ops_p)
447 unsigned num, x, i, old_num;
448 v_may_def_optype v_may_def_ops, old_ops;
449 tree result, var;
450 bool build_diff;
452 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
453 if (num == 0)
454 return NULL;
456 old_ops = *old_ops_p;
458 /* Check if the old vector and the new array are the same. */
459 build_diff = true;
460 if (old_ops && old_ops->num_v_may_defs == num)
462 old_num = num;
463 build_diff = false;
464 for (x = 0; x < num; x++)
466 var = old_ops->v_may_defs[x].def;
467 if (TREE_CODE (var) == SSA_NAME)
468 var = SSA_NAME_VAR (var);
469 if (var != VARRAY_TREE (build_v_may_defs, x))
471 build_diff = true;
472 break;
476 else
477 old_num = (old_ops ? old_ops->num_v_may_defs : 0);
479 if (!build_diff)
481 v_may_def_ops = old_ops;
482 *old_ops_p = NULL;
484 else
486 v_may_def_ops = allocate_v_may_def_optype (num);
487 for (x = 0; x < num; x++)
489 var = VARRAY_TREE (build_v_may_defs, x);
490 /* Look for VAR in the old operands vector. */
491 for (i = 0; i < old_num; i++)
493 result = old_ops->v_may_defs[i].def;
494 if (TREE_CODE (result) == SSA_NAME)
495 result = SSA_NAME_VAR (result);
496 if (result == var)
498 v_may_def_ops->v_may_defs[x] = old_ops->v_may_defs[i];
499 break;
502 if (i == old_num)
504 v_may_def_ops->v_may_defs[x].def = var;
505 v_may_def_ops->v_may_defs[x].use = var;
510 /* Empty the V_MAY_DEF build vector after VUSES have been processed. */
512 return v_may_def_ops;
516 /* Clear the in_list bits and empty the build array for v_may_defs. */
518 static inline void
519 cleanup_v_may_defs (void)
521 unsigned x, num;
522 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
524 for (x = 0; x < num; x++)
526 tree t = VARRAY_TREE (build_v_may_defs, x);
527 var_ann_t ann = var_ann (t);
528 ann->in_v_may_def_list = 0;
530 VARRAY_POP_ALL (build_v_may_defs);
533 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
535 static vuse_optype
536 finalize_ssa_vuses (vuse_optype *old_ops_p)
538 unsigned num, x, i, num_v_may_defs, old_num;
539 vuse_optype vuse_ops, old_ops;
540 bool build_diff;
542 num = VARRAY_ACTIVE_SIZE (build_vuses);
543 if (num == 0)
545 cleanup_v_may_defs ();
546 return NULL;
549 /* Remove superfluous VUSE operands. If the statement already has a
550 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
551 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
552 suppose that variable 'a' is aliased:
554 # VUSE <a_2>
555 # a_3 = V_MAY_DEF <a_2>
556 a = a + 1;
558 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
559 operation. */
561 num_v_may_defs = VARRAY_ACTIVE_SIZE (build_v_may_defs);
563 if (num_v_may_defs > 0)
565 size_t i;
566 tree vuse;
567 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
569 vuse = VARRAY_TREE (build_vuses, i);
570 if (TREE_CODE (vuse) != SSA_NAME)
572 var_ann_t ann = var_ann (vuse);
573 ann->in_vuse_list = 0;
574 if (ann->in_v_may_def_list)
576 /* If we found a useless VUSE operand, remove it from the
577 operand array by replacing it with the last active element
578 in the operand array (unless the useless VUSE was the
579 last operand, in which case we simply remove it. */
580 if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
582 VARRAY_TREE (build_vuses, i)
583 = VARRAY_TREE (build_vuses,
584 VARRAY_ACTIVE_SIZE (build_vuses) - 1);
586 VARRAY_POP (build_vuses);
588 /* We want to rescan the element at this index, unless
589 this was the last element, in which case the loop
590 terminates. */
591 i--;
596 else
597 /* Clear out the in_list bits. */
598 for (x = 0; x < num; x++)
600 tree t = VARRAY_TREE (build_vuses, x);
601 if (TREE_CODE (t) != SSA_NAME)
603 var_ann_t ann = var_ann (t);
604 ann->in_vuse_list = 0;
609 num = VARRAY_ACTIVE_SIZE (build_vuses);
610 /* We could have reduced the size to zero now, however. */
611 if (num == 0)
613 cleanup_v_may_defs ();
614 return NULL;
617 old_ops = *old_ops_p;
619 /* Determine whether vuses is the same as the old vector. */
620 build_diff = true;
621 if (old_ops && old_ops->num_vuses == num)
623 old_num = num;
624 build_diff = false;
625 for (x = 0; x < num ; x++)
627 tree v;
628 v = old_ops->vuses[x];
629 if (TREE_CODE (v) == SSA_NAME)
630 v = SSA_NAME_VAR (v);
631 if (v != VARRAY_TREE (build_vuses, x))
633 build_diff = true;
634 break;
638 else
639 old_num = (old_ops ? old_ops->num_vuses : 0);
641 if (!build_diff)
643 vuse_ops = old_ops;
644 *old_ops_p = NULL;
646 else
648 vuse_ops = allocate_vuse_optype (num);
649 for (x = 0; x < num; x++)
651 tree result, var = VARRAY_TREE (build_vuses, x);
652 /* Look for VAR in the old vector, and use that SSA_NAME. */
653 for (i = 0; i < old_num; i++)
655 result = old_ops->vuses[i];
656 if (TREE_CODE (result) == SSA_NAME)
657 result = SSA_NAME_VAR (result);
658 if (result == var)
660 vuse_ops->vuses[x] = old_ops->vuses[i];
661 break;
664 if (i == old_num)
665 vuse_ops->vuses[x] = var;
669 /* The v_may_def build vector wasn't freed because we needed it here.
670 Free it now with the vuses build vector. */
671 VARRAY_POP_ALL (build_vuses);
672 cleanup_v_may_defs ();
674 return vuse_ops;
677 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
679 static v_must_def_optype
680 finalize_ssa_v_must_defs (v_must_def_optype *old_ops_p,
681 tree stmt ATTRIBUTE_UNUSED)
683 unsigned num, x, i, old_num = 0;
684 v_must_def_optype v_must_def_ops, old_ops;
685 bool build_diff;
687 num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
688 if (num == 0)
689 return NULL;
691 /* There should only be a single V_MUST_DEF per assignment. */
692 gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
694 old_ops = *old_ops_p;
696 /* Check if the old vector and the new array are the same. */
697 build_diff = true;
698 if (old_ops && old_ops->num_v_must_defs == num)
700 old_num = num;
701 build_diff = false;
702 for (x = 0; x < num; x++)
704 tree var = old_ops->v_must_defs[x].def;
705 if (TREE_CODE (var) == SSA_NAME)
706 var = SSA_NAME_VAR (var);
707 if (var != VARRAY_TREE (build_v_must_defs, x))
709 build_diff = true;
710 break;
714 else
715 old_num = (old_ops ? old_ops->num_v_must_defs : 0);
717 if (!build_diff)
719 v_must_def_ops = old_ops;
720 *old_ops_p = NULL;
722 else
724 v_must_def_ops = allocate_v_must_def_optype (num);
725 for (x = 0; x < num ; x++)
727 tree result, var = VARRAY_TREE (build_v_must_defs, x);
728 /* Look for VAR in the original vector. */
729 for (i = 0; i < old_num; i++)
731 result = old_ops->v_must_defs[i].def;
732 if (TREE_CODE (result) == SSA_NAME)
733 result = SSA_NAME_VAR (result);
734 if (result == var)
736 v_must_def_ops->v_must_defs[x].def = old_ops->v_must_defs[i].def;
737 v_must_def_ops->v_must_defs[x].use = old_ops->v_must_defs[i].use;
738 break;
741 if (i == old_num)
743 v_must_def_ops->v_must_defs[x].def = var;
744 v_must_def_ops->v_must_defs[x].use = var;
748 VARRAY_POP_ALL (build_v_must_defs);
750 return v_must_def_ops;
754 /* Finalize all the build vectors, fill the new ones into INFO. */
756 static inline void
757 finalize_ssa_stmt_operands (tree stmt, stmt_operands_p old_ops,
758 stmt_operands_p new_ops)
760 new_ops->def_ops = finalize_ssa_defs (&(old_ops->def_ops), stmt);
761 new_ops->use_ops = finalize_ssa_uses (&(old_ops->use_ops), stmt);
762 new_ops->v_must_def_ops
763 = finalize_ssa_v_must_defs (&(old_ops->v_must_def_ops), stmt);
764 new_ops->v_may_def_ops = finalize_ssa_v_may_defs (&(old_ops->v_may_def_ops));
765 new_ops->vuse_ops = finalize_ssa_vuses (&(old_ops->vuse_ops));
769 /* Start the process of building up operands vectors in INFO. */
771 static inline void
772 start_ssa_stmt_operands (void)
774 gcc_assert (VARRAY_ACTIVE_SIZE (build_defs) == 0);
775 gcc_assert (VARRAY_ACTIVE_SIZE (build_uses) == 0);
776 gcc_assert (VARRAY_ACTIVE_SIZE (build_vuses) == 0);
777 gcc_assert (VARRAY_ACTIVE_SIZE (build_v_may_defs) == 0);
778 gcc_assert (VARRAY_ACTIVE_SIZE (build_v_must_defs) == 0);
782 /* Add DEF_P to the list of pointers to operands. */
784 static inline void
785 append_def (tree *def_p)
787 VARRAY_PUSH_TREE_PTR (build_defs, def_p);
791 /* Add USE_P to the list of pointers to operands. */
793 static inline void
794 append_use (tree *use_p)
796 VARRAY_PUSH_TREE_PTR (build_uses, use_p);
800 /* Add a new virtual may def for variable VAR to the build array. */
802 static inline void
803 append_v_may_def (tree var)
805 var_ann_t ann = get_var_ann (var);
807 /* Don't allow duplicate entries. */
808 if (ann->in_v_may_def_list)
809 return;
810 ann->in_v_may_def_list = 1;
812 VARRAY_PUSH_TREE (build_v_may_defs, var);
816 /* Add VAR to the list of virtual uses. */
818 static inline void
819 append_vuse (tree var)
822 /* Don't allow duplicate entries. */
823 if (TREE_CODE (var) != SSA_NAME)
825 var_ann_t ann = get_var_ann (var);
827 if (ann->in_vuse_list || ann->in_v_may_def_list)
828 return;
829 ann->in_vuse_list = 1;
832 VARRAY_PUSH_TREE (build_vuses, var);
836 /* Add VAR to the list of virtual must definitions for INFO. */
838 static inline void
839 append_v_must_def (tree var)
841 unsigned i;
843 /* Don't allow duplicate entries. */
844 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
845 if (var == VARRAY_TREE (build_v_must_defs, i))
846 return;
848 VARRAY_PUSH_TREE (build_v_must_defs, var);
851 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
852 original operands, and if ANN is non-null, appropriate stmt flags are set
853 in the stmt's annotation. Note that some fields in old_ops may
854 change to NULL, although none of the memory they originally pointed to
855 will be destroyed. It is appropriate to call free_stmt_operands() on
856 the value returned in old_ops.
858 The rationale for this: Certain optimizations wish to examine the difference
859 between new_ops and old_ops after processing. If a set of operands don't
860 change, new_ops will simply assume the pointer in old_ops, and the old_ops
861 pointer will be set to NULL, indicating no memory needs to be cleared.
862 Usage might appear something like:
864 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
865 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
866 <* compare old_ops_copy and new_ops *>
867 free_ssa_operands (old_ops); */
869 static void
870 build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
871 stmt_operands_p new_ops)
873 enum tree_code code;
874 tree_ann_t saved_ann = stmt->common.ann;
876 /* Replace stmt's annotation with the one passed in for the duration
877 of the operand building process. This allows "fake" stmts to be built
878 and not be included in other data structures which can be built here. */
879 stmt->common.ann = (tree_ann_t) ann;
881 /* Initially assume that the statement has no volatile operands, nor
882 makes aliased loads or stores. */
883 if (ann)
885 ann->has_volatile_ops = false;
886 ann->makes_aliased_stores = false;
887 ann->makes_aliased_loads = false;
890 start_ssa_stmt_operands ();
892 code = TREE_CODE (stmt);
893 switch (code)
895 case MODIFY_EXPR:
896 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
897 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
898 || TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_RANGE_REF
899 || TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
900 || TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
901 || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
902 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
903 modified in that case. FIXME we should represent somehow
904 that it is killed on the fallthrough path. */
905 || tree_could_throw_p (TREE_OPERAND (stmt, 1)))
906 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def);
907 else
908 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
909 opf_is_def | opf_kill_def);
910 break;
912 case COND_EXPR:
913 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
914 break;
916 case SWITCH_EXPR:
917 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
918 break;
920 case ASM_EXPR:
921 get_asm_expr_operands (stmt);
922 break;
924 case RETURN_EXPR:
925 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
926 break;
928 case GOTO_EXPR:
929 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
930 break;
932 case LABEL_EXPR:
933 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
934 break;
936 /* These nodes contain no variable references. */
937 case BIND_EXPR:
938 case CASE_LABEL_EXPR:
939 case TRY_CATCH_EXPR:
940 case TRY_FINALLY_EXPR:
941 case EH_FILTER_EXPR:
942 case CATCH_EXPR:
943 case RESX_EXPR:
944 break;
946 default:
947 /* Notice that if get_expr_operands tries to use &STMT as the operand
948 pointer (which may only happen for USE operands), we will abort in
949 append_use. This default will handle statements like empty
950 statements, or CALL_EXPRs that may appear on the RHS of a statement
951 or as statements themselves. */
952 get_expr_operands (stmt, &stmt, opf_none);
953 break;
956 finalize_ssa_stmt_operands (stmt, old_ops, new_ops);
957 stmt->common.ann = saved_ann;
961 /* Free any operands vectors in OPS. */
963 static void
964 free_ssa_operands (stmt_operands_p ops)
966 if (ops->def_ops)
967 free_defs (&(ops->def_ops));
968 if (ops->use_ops)
969 free_uses (&(ops->use_ops));
970 if (ops->vuse_ops)
971 free_vuses (&(ops->vuse_ops));
972 if (ops->v_may_def_ops)
973 free_v_may_defs (&(ops->v_may_def_ops));
974 if (ops->v_must_def_ops)
975 free_v_must_defs (&(ops->v_must_def_ops));
979 /* Get the operands of statement STMT. Note that repeated calls to
980 get_stmt_operands for the same statement will do nothing until the
981 statement is marked modified by a call to modify_stmt(). */
983 void
984 get_stmt_operands (tree stmt)
986 stmt_ann_t ann;
987 stmt_operands_t old_operands;
989 /* The optimizers cannot handle statements that are nothing but a
990 _DECL. This indicates a bug in the gimplifier. */
991 gcc_assert (!SSA_VAR_P (stmt));
993 /* Ignore error statements. */
994 if (TREE_CODE (stmt) == ERROR_MARK)
995 return;
997 ann = get_stmt_ann (stmt);
999 /* If the statement has not been modified, the operands are still valid. */
1000 if (!ann->modified)
1001 return;
1003 timevar_push (TV_TREE_OPS);
1005 old_operands = ann->operands;
1006 memset (&(ann->operands), 0, sizeof (stmt_operands_t));
1008 build_ssa_operands (stmt, ann, &old_operands, &(ann->operands));
1009 free_ssa_operands (&old_operands);
1011 /* Clear the modified bit for STMT. Subsequent calls to
1012 get_stmt_operands for this statement will do nothing until the
1013 statement is marked modified by a call to modify_stmt(). */
1014 ann->modified = 0;
1016 timevar_pop (TV_TREE_OPS);
1020 /* Recursively scan the expression pointed by EXPR_P in statement referred to
1021 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
1022 operands found. */
1024 static void
1025 get_expr_operands (tree stmt, tree *expr_p, int flags)
1027 enum tree_code code;
1028 enum tree_code_class class;
1029 tree expr = *expr_p;
1030 stmt_ann_t s_ann = stmt_ann (stmt);
1032 if (expr == NULL || expr == error_mark_node)
1033 return;
1035 code = TREE_CODE (expr);
1036 class = TREE_CODE_CLASS (code);
1038 switch (code)
1040 case ADDR_EXPR:
1041 /* We could have the address of a component, array member,
1042 etc which has interesting variable references. */
1043 /* Taking the address of a variable does not represent a
1044 reference to it, but the fact that the stmt takes its address will be
1045 of interest to some passes (e.g. alias resolution). */
1046 add_stmt_operand (expr_p, s_ann, 0);
1048 /* If the address is invariant, there may be no interesting variable
1049 references inside. */
1050 if (is_gimple_min_invariant (expr))
1051 return;
1053 /* There should be no VUSEs created, since the referenced objects are
1054 not really accessed. The only operands that we should find here
1055 are ARRAY_REF indices which will always be real operands (GIMPLE
1056 does not allow non-registers as array indices). */
1057 flags |= opf_no_vops;
1059 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1060 return;
1062 case SSA_NAME:
1063 case VAR_DECL:
1064 case PARM_DECL:
1065 case RESULT_DECL:
1066 case CONST_DECL:
1067 /* If we found a variable, add it to DEFS or USES depending
1068 on the operand flags. */
1069 add_stmt_operand (expr_p, s_ann, flags);
1070 return;
1072 case MISALIGNED_INDIRECT_REF:
1073 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1074 /* fall through */
1076 case ALIGN_INDIRECT_REF:
1077 case INDIRECT_REF:
1078 get_indirect_ref_operands (stmt, expr, flags);
1079 return;
1081 case ARRAY_REF:
1082 case ARRAY_RANGE_REF:
1083 /* Treat array references as references to the virtual variable
1084 representing the array. The virtual variable for an ARRAY_REF
1085 is the VAR_DECL for the array. */
1087 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1088 according to the value of IS_DEF. Recurse if the LHS of the
1089 ARRAY_REF node is not a regular variable. */
1090 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1091 add_stmt_operand (expr_p, s_ann, flags);
1092 else
1093 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1095 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1096 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1097 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1098 return;
1100 case COMPONENT_REF:
1101 case REALPART_EXPR:
1102 case IMAGPART_EXPR:
1103 /* Similarly to arrays, references to compound variables (complex
1104 types and structures/unions) are globbed.
1106 FIXME: This means that
1108 a.x = 6;
1109 a.y = 7;
1110 foo (a.x, a.y);
1112 will not be constant propagated because the two partial
1113 definitions to 'a' will kill each other. Note that SRA may be
1114 able to fix this problem if 'a' can be scalarized. */
1116 /* If the LHS of the compound reference is not a regular variable,
1117 recurse to keep looking for more operands in the subexpression. */
1118 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1119 add_stmt_operand (expr_p, s_ann, flags);
1120 else
1121 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1123 if (code == COMPONENT_REF)
1124 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1125 return;
1127 case WITH_SIZE_EXPR:
1128 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1129 and an rvalue reference to its second argument. */
1130 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1131 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1132 return;
1134 case CALL_EXPR:
1135 get_call_expr_operands (stmt, expr);
1136 return;
1138 case COND_EXPR:
1139 case VEC_COND_EXPR:
1140 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1141 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1142 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1143 return;
1145 case MODIFY_EXPR:
1147 int subflags;
1148 tree op;
1150 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1152 op = TREE_OPERAND (expr, 0);
1153 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1154 op = TREE_OPERAND (expr, 0);
1155 if (TREE_CODE (op) == ARRAY_REF
1156 || TREE_CODE (op) == ARRAY_RANGE_REF
1157 || TREE_CODE (op) == COMPONENT_REF
1158 || TREE_CODE (op) == REALPART_EXPR
1159 || TREE_CODE (op) == IMAGPART_EXPR)
1160 subflags = opf_is_def;
1161 else
1162 subflags = opf_is_def | opf_kill_def;
1164 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1165 return;
1168 case CONSTRUCTOR:
1170 /* General aggregate CONSTRUCTORs have been decomposed, but they
1171 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1173 tree t;
1174 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1175 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1177 return;
1180 case TRUTH_NOT_EXPR:
1181 case BIT_FIELD_REF:
1182 case VIEW_CONVERT_EXPR:
1183 do_unary:
1184 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1185 return;
1187 case TRUTH_AND_EXPR:
1188 case TRUTH_OR_EXPR:
1189 case TRUTH_XOR_EXPR:
1190 case COMPOUND_EXPR:
1191 case OBJ_TYPE_REF:
1192 do_binary:
1194 tree op0 = TREE_OPERAND (expr, 0);
1195 tree op1 = TREE_OPERAND (expr, 1);
1197 /* If it would be profitable to swap the operands, then do so to
1198 canonicalize the statement, enabling better optimization.
1200 By placing canonicalization of such expressions here we
1201 transparently keep statements in canonical form, even
1202 when the statement is modified. */
1203 if (tree_swap_operands_p (op0, op1, false))
1205 /* For relationals we need to swap the operands
1206 and change the code. */
1207 if (code == LT_EXPR
1208 || code == GT_EXPR
1209 || code == LE_EXPR
1210 || code == GE_EXPR)
1212 TREE_SET_CODE (expr, swap_tree_comparison (code));
1213 TREE_OPERAND (expr, 0) = op1;
1214 TREE_OPERAND (expr, 1) = op0;
1217 /* For a commutative operator we can just swap the operands. */
1218 else if (commutative_tree_code (code))
1220 TREE_OPERAND (expr, 0) = op1;
1221 TREE_OPERAND (expr, 1) = op0;
1225 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1226 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1227 return;
1230 case REALIGN_LOAD_EXPR:
1232 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1233 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1234 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1235 return;
1238 case BLOCK:
1239 case FUNCTION_DECL:
1240 case EXC_PTR_EXPR:
1241 case FILTER_EXPR:
1242 case LABEL_DECL:
1243 /* Expressions that make no memory references. */
1244 return;
1246 default:
1247 if (class == tcc_unary)
1248 goto do_unary;
1249 if (class == tcc_binary || class == tcc_comparison)
1250 goto do_binary;
1251 if (class == tcc_constant || class == tcc_type)
1252 return;
1255 /* If we get here, something has gone wrong. */
1256 #ifdef ENABLE_CHECKING
1257 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1258 debug_tree (expr);
1259 fputs ("\n", stderr);
1260 internal_error ("internal error");
1261 #endif
1262 gcc_unreachable ();
1266 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1268 static void
1269 get_asm_expr_operands (tree stmt)
1271 stmt_ann_t s_ann = stmt_ann (stmt);
1272 int noutputs = list_length (ASM_OUTPUTS (stmt));
1273 const char **oconstraints
1274 = (const char **) alloca ((noutputs) * sizeof (const char *));
1275 int i;
1276 tree link;
1277 const char *constraint;
1278 bool allows_mem, allows_reg, is_inout;
1280 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1282 oconstraints[i] = constraint
1283 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1284 parse_output_constraint (&constraint, i, 0, 0,
1285 &allows_mem, &allows_reg, &is_inout);
1287 /* This should have been split in gimplify_asm_expr. */
1288 gcc_assert (!allows_reg || !is_inout);
1290 /* Memory operands are addressable. Note that STMT needs the
1291 address of this operand. */
1292 if (!allows_reg && allows_mem)
1294 tree t = get_base_address (TREE_VALUE (link));
1295 if (t && DECL_P (t))
1296 note_addressable (t, s_ann);
1299 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1302 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1304 constraint
1305 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1306 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1307 oconstraints, &allows_mem, &allows_reg);
1309 /* Memory operands are addressable. Note that STMT needs the
1310 address of this operand. */
1311 if (!allows_reg && allows_mem)
1313 tree t = get_base_address (TREE_VALUE (link));
1314 if (t && DECL_P (t))
1315 note_addressable (t, s_ann);
1318 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1322 /* Clobber memory for asm ("" : : : "memory"); */
1323 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1324 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1326 unsigned i;
1327 bitmap_iterator bi;
1329 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1330 decided to group them). */
1331 if (global_var)
1332 add_stmt_operand (&global_var, s_ann, opf_is_def);
1333 else
1334 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1336 tree var = referenced_var (i);
1337 add_stmt_operand (&var, s_ann, opf_is_def);
1340 /* Now clobber all addressables. */
1341 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1343 tree var = referenced_var (i);
1344 add_stmt_operand (&var, s_ann, opf_is_def);
1347 break;
1351 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1352 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1354 static void
1355 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1357 tree *pptr = &TREE_OPERAND (expr, 0);
1358 tree ptr = *pptr;
1359 stmt_ann_t s_ann = stmt_ann (stmt);
1361 /* Stores into INDIRECT_REF operands are never killing definitions. */
1362 flags &= ~opf_kill_def;
1364 if (REF_ORIGINAL (expr))
1366 enum tree_code ocode = TREE_CODE (REF_ORIGINAL (expr));
1368 /* If we originally accessed part of a structure, we do it still. */
1369 if (ocode == ARRAY_REF
1370 || ocode == COMPONENT_REF
1371 || ocode == REALPART_EXPR
1372 || ocode == IMAGPART_EXPR)
1373 flags &= ~opf_kill_def;
1376 if (SSA_VAR_P (ptr))
1378 struct ptr_info_def *pi = NULL;
1380 /* If PTR has flow-sensitive points-to information, use it. */
1381 if (TREE_CODE (ptr) == SSA_NAME
1382 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1383 && pi->name_mem_tag)
1385 /* PTR has its own memory tag. Use it. */
1386 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1388 else
1390 /* If PTR is not an SSA_NAME or it doesn't have a name
1391 tag, use its type memory tag. */
1392 var_ann_t v_ann;
1394 /* If we are emitting debugging dumps, display a warning if
1395 PTR is an SSA_NAME with no flow-sensitive alias
1396 information. That means that we may need to compute
1397 aliasing again. */
1398 if (dump_file
1399 && TREE_CODE (ptr) == SSA_NAME
1400 && pi == NULL)
1402 fprintf (dump_file,
1403 "NOTE: no flow-sensitive alias info for ");
1404 print_generic_expr (dump_file, ptr, dump_flags);
1405 fprintf (dump_file, " in ");
1406 print_generic_stmt (dump_file, stmt, dump_flags);
1409 if (TREE_CODE (ptr) == SSA_NAME)
1410 ptr = SSA_NAME_VAR (ptr);
1411 v_ann = var_ann (ptr);
1412 if (v_ann->type_mem_tag)
1413 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1417 /* If a constant is used as a pointer, we can't generate a real
1418 operand for it but we mark the statement volatile to prevent
1419 optimizations from messing things up. */
1420 else if (TREE_CODE (ptr) == INTEGER_CST)
1422 if (s_ann)
1423 s_ann->has_volatile_ops = true;
1424 return;
1427 /* Everything else *should* have been folded elsewhere, but users
1428 are smarter than we in finding ways to write invalid code. We
1429 cannot just abort here. If we were absolutely certain that we
1430 do handle all valid cases, then we could just do nothing here.
1431 That seems optimistic, so attempt to do something logical... */
1432 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1433 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1434 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1436 /* Make sure we know the object is addressable. */
1437 pptr = &TREE_OPERAND (ptr, 0);
1438 add_stmt_operand (pptr, s_ann, 0);
1440 /* Mark the object itself with a VUSE. */
1441 pptr = &TREE_OPERAND (*pptr, 0);
1442 get_expr_operands (stmt, pptr, flags);
1443 return;
1446 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1447 else
1448 gcc_unreachable ();
1450 /* Add a USE operand for the base pointer. */
1451 get_expr_operands (stmt, pptr, opf_none);
1454 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1456 static void
1457 get_call_expr_operands (tree stmt, tree expr)
1459 tree op;
1460 int call_flags = call_expr_flags (expr);
1462 if (!bitmap_empty_p (call_clobbered_vars))
1464 /* A 'pure' or a 'const' functions never call clobber anything.
1465 A 'noreturn' function might, but since we don't return anyway
1466 there is no point in recording that. */
1467 if (TREE_SIDE_EFFECTS (expr)
1468 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1469 add_call_clobber_ops (stmt);
1470 else if (!(call_flags & ECF_CONST))
1471 add_call_read_ops (stmt);
1474 /* Find uses in the called function. */
1475 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1477 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1478 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1480 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1485 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1486 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1487 the statement's real operands, otherwise it is added to virtual
1488 operands. */
1490 static void
1491 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1493 bool is_real_op;
1494 tree var, sym;
1495 var_ann_t v_ann;
1497 var = *var_p;
1498 STRIP_NOPS (var);
1500 /* If the operand is an ADDR_EXPR, add its operand to the list of
1501 variables that have had their address taken in this statement. */
1502 if (TREE_CODE (var) == ADDR_EXPR)
1504 note_addressable (TREE_OPERAND (var, 0), s_ann);
1505 return;
1508 /* If the original variable is not a scalar, it will be added to the list
1509 of virtual operands. In that case, use its base symbol as the virtual
1510 variable representing it. */
1511 is_real_op = is_gimple_reg (var);
1512 if (!is_real_op && !DECL_P (var))
1513 var = get_virtual_var (var);
1515 /* If VAR is not a variable that we care to optimize, do nothing. */
1516 if (var == NULL_TREE || !SSA_VAR_P (var))
1517 return;
1519 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1520 v_ann = var_ann (sym);
1522 /* Don't expose volatile variables to the optimizers. */
1523 if (TREE_THIS_VOLATILE (sym))
1525 if (s_ann)
1526 s_ann->has_volatile_ops = true;
1527 return;
1530 if (is_real_op)
1532 /* The variable is a GIMPLE register. Add it to real operands. */
1533 if (flags & opf_is_def)
1534 append_def (var_p);
1535 else
1536 append_use (var_p);
1538 else
1540 varray_type aliases;
1542 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1543 virtual operands, unless the caller has specifically requested
1544 not to add virtual operands (used when adding operands inside an
1545 ADDR_EXPR expression). */
1546 if (flags & opf_no_vops)
1547 return;
1549 aliases = v_ann->may_aliases;
1551 if (aliases == NULL)
1553 /* The variable is not aliased or it is an alias tag. */
1554 if (flags & opf_is_def)
1556 if (flags & opf_kill_def)
1558 /* Only regular variables may get a V_MUST_DEF
1559 operand. */
1560 gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG);
1561 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1562 variable definitions. */
1563 append_v_must_def (var);
1565 else
1567 /* Add a V_MAY_DEF for call-clobbered variables and
1568 memory tags. */
1569 append_v_may_def (var);
1572 else
1574 append_vuse (var);
1575 if (s_ann && v_ann->is_alias_tag)
1576 s_ann->makes_aliased_loads = 1;
1579 else
1581 size_t i;
1583 /* The variable is aliased. Add its aliases to the virtual
1584 operands. */
1585 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1587 if (flags & opf_is_def)
1589 /* If the variable is also an alias tag, add a virtual
1590 operand for it, otherwise we will miss representing
1591 references to the members of the variable's alias set.
1592 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1593 if (v_ann->is_alias_tag)
1594 append_v_may_def (var);
1596 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1597 append_v_may_def (VARRAY_TREE (aliases, i));
1599 if (s_ann)
1600 s_ann->makes_aliased_stores = 1;
1602 else
1604 /* Similarly, append a virtual uses for VAR itself, when
1605 it is an alias tag. */
1606 if (v_ann->is_alias_tag)
1607 append_vuse (var);
1609 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1610 append_vuse (VARRAY_TREE (aliases, i));
1612 if (s_ann)
1613 s_ann->makes_aliased_loads = 1;
1620 /* Record that VAR had its address taken in the statement with annotations
1621 S_ANN. */
1623 static void
1624 note_addressable (tree var, stmt_ann_t s_ann)
1626 if (!s_ann)
1627 return;
1629 var = get_base_address (var);
1630 if (var && SSA_VAR_P (var))
1632 if (s_ann->addresses_taken == NULL)
1633 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1634 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1639 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1640 clobbered variables in the function. */
1642 static void
1643 add_call_clobber_ops (tree stmt)
1645 unsigned i;
1646 tree t;
1647 bitmap_iterator bi;
1648 stmt_ann_t s_ann = stmt_ann (stmt);
1649 struct stmt_ann_d empty_ann;
1651 /* Functions that are not const, pure or never return may clobber
1652 call-clobbered variables. */
1653 if (s_ann)
1654 s_ann->makes_clobbering_call = true;
1656 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1657 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1658 if (global_var)
1660 add_stmt_operand (&global_var, s_ann, opf_is_def);
1661 return;
1664 /* If cache is valid, copy the elements into the build vectors. */
1665 if (ssa_call_clobbered_cache_valid)
1667 for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_vuses); i++)
1669 t = VARRAY_TREE (clobbered_vuses, i);
1670 gcc_assert (TREE_CODE (t) != SSA_NAME);
1671 var_ann (t)->in_vuse_list = 1;
1672 VARRAY_PUSH_TREE (build_vuses, t);
1674 for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_v_may_defs); i++)
1676 t = VARRAY_TREE (clobbered_v_may_defs, i);
1677 gcc_assert (TREE_CODE (t) != SSA_NAME);
1678 var_ann (t)->in_v_may_def_list = 1;
1679 VARRAY_PUSH_TREE (build_v_may_defs, t);
1681 if (s_ann)
1683 s_ann->makes_aliased_loads = clobbered_aliased_loads;
1684 s_ann->makes_aliased_stores = clobbered_aliased_stores;
1686 return;
1689 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1691 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1692 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1694 tree var = referenced_var (i);
1695 if (TREE_READONLY (var)
1696 && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
1697 add_stmt_operand (&var, &empty_ann, opf_none);
1698 else
1699 add_stmt_operand (&var, &empty_ann, opf_is_def);
1702 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
1703 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
1705 /* Set the flags for a stmt's annotation. */
1706 if (s_ann)
1708 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1709 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
1712 /* Prepare empty cache vectors. */
1713 if (clobbered_v_may_defs)
1715 VARRAY_POP_ALL (clobbered_vuses);
1716 VARRAY_POP_ALL (clobbered_v_may_defs);
1718 else
1720 VARRAY_TREE_INIT (clobbered_v_may_defs, 10, "clobbered_v_may_defs");
1721 VARRAY_TREE_INIT (clobbered_vuses, 10, "clobbered_vuses");
1724 /* Now fill the clobbered cache with the values that have been found. */
1725 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
1726 VARRAY_PUSH_TREE (clobbered_vuses, VARRAY_TREE (build_vuses, i));
1727 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
1728 VARRAY_PUSH_TREE (clobbered_v_may_defs, VARRAY_TREE (build_v_may_defs, i));
1730 ssa_call_clobbered_cache_valid = true;
1734 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1735 function. */
1737 static void
1738 add_call_read_ops (tree stmt)
1740 unsigned i;
1741 tree t;
1742 bitmap_iterator bi;
1743 stmt_ann_t s_ann = stmt_ann (stmt);
1744 struct stmt_ann_d empty_ann;
1746 /* if the function is not pure, it may reference memory. Add
1747 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1748 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1749 if (global_var)
1751 add_stmt_operand (&global_var, s_ann, opf_none);
1752 return;
1755 /* If cache is valid, copy the elements into the build vector. */
1756 if (ssa_ro_call_cache_valid)
1758 for (i = 0; i < VARRAY_ACTIVE_SIZE (ro_call_vuses); i++)
1760 t = VARRAY_TREE (ro_call_vuses, i);
1761 gcc_assert (TREE_CODE (t) != SSA_NAME);
1762 var_ann (t)->in_vuse_list = 1;
1763 VARRAY_PUSH_TREE (build_vuses, t);
1765 if (s_ann)
1766 s_ann->makes_aliased_loads = ro_call_aliased_loads;
1767 return;
1770 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1772 /* Add a VUSE for each call-clobbered variable. */
1773 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1775 tree var = referenced_var (i);
1776 add_stmt_operand (&var, &empty_ann, opf_none);
1779 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
1780 if (s_ann)
1781 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1783 /* Prepare empty cache vectors. */
1784 if (ro_call_vuses)
1785 VARRAY_POP_ALL (ro_call_vuses);
1786 else
1787 VARRAY_TREE_INIT (ro_call_vuses, 10, "ro_call_vuses");
1789 /* Now fill the clobbered cache with the values that have been found. */
1790 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
1791 VARRAY_PUSH_TREE (ro_call_vuses, VARRAY_TREE (build_vuses, i));
1793 ssa_ro_call_cache_valid = true;
1796 /* Copies virtual operands from SRC to DST. */
1798 void
1799 copy_virtual_operands (tree dst, tree src)
1801 unsigned i;
1802 vuse_optype vuses = STMT_VUSE_OPS (src);
1803 v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
1804 v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
1805 vuse_optype *vuses_new = &stmt_ann (dst)->operands.vuse_ops;
1806 v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->operands.v_may_def_ops;
1807 v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->operands.v_must_def_ops;
1809 if (vuses)
1811 *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
1812 for (i = 0; i < NUM_VUSES (vuses); i++)
1813 SET_VUSE_OP (*vuses_new, i, VUSE_OP (vuses, i));
1816 if (v_may_defs)
1818 *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
1819 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
1821 SET_V_MAY_DEF_OP (*v_may_defs_new, i, V_MAY_DEF_OP (v_may_defs, i));
1822 SET_V_MAY_DEF_RESULT (*v_may_defs_new, i,
1823 V_MAY_DEF_RESULT (v_may_defs, i));
1827 if (v_must_defs)
1829 *v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
1830 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
1832 SET_V_MUST_DEF_RESULT (*v_must_defs_new, i, V_MUST_DEF_RESULT (v_must_defs, i));
1833 SET_V_MUST_DEF_KILL (*v_must_defs_new, i, V_MUST_DEF_KILL (v_must_defs, i));
1839 /* Specifically for use in DOM's expression analysis. Given a store, we
1840 create an artificial stmt which looks like a load from the store, this can
1841 be used to eliminate redundant loads. OLD_OPS are the operands from the
1842 store stmt, and NEW_STMT is the new load which represents a load of the
1843 values stored. */
1845 void
1846 create_ssa_artficial_load_stmt (stmt_operands_p old_ops, tree new_stmt)
1848 stmt_ann_t ann;
1849 tree op;
1850 stmt_operands_t tmp;
1851 unsigned j;
1853 memset (&tmp, 0, sizeof (stmt_operands_t));
1854 ann = get_stmt_ann (new_stmt);
1856 /* Free operands just in case is was an existing stmt. */
1857 free_ssa_operands (&(ann->operands));
1859 build_ssa_operands (new_stmt, NULL, &tmp, &(ann->operands));
1860 free_vuses (&(ann->operands.vuse_ops));
1861 free_v_may_defs (&(ann->operands.v_may_def_ops));
1862 free_v_must_defs (&(ann->operands.v_must_def_ops));
1864 /* For each VDEF on the original statement, we want to create a
1865 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1866 statement. */
1867 for (j = 0; j < NUM_V_MAY_DEFS (old_ops->v_may_def_ops); j++)
1869 op = V_MAY_DEF_RESULT (old_ops->v_may_def_ops, j);
1870 append_vuse (op);
1873 for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
1875 op = V_MUST_DEF_RESULT (old_ops->v_must_def_ops, j);
1876 append_vuse (op);
1879 /* Now set the vuses for this new stmt. */
1880 ann->operands.vuse_ops = finalize_ssa_vuses (&(tmp.vuse_ops));
1883 #include "gt-tree-ssa-operands.h"