* tree-ssa-operands.c (get_expr_operands): Use a switch. Split out...
[official-gcc.git] / gcc / tree-ssa-operands.c
blobc032c9e46d030a3459c784d3f9a860b1fb6f1f67
1 /* SSA operands management for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
35 /* Flags to describe operand properties in get_stmt_operands and helpers. */
37 /* By default, operands are loaded. */
38 #define opf_none 0
40 /* Operand is the target of an assignment expression or a
41 call-clobbered variable */
42 #define opf_is_def (1 << 0)
44 /* Operand is the target of an assignment expression. */
45 #define opf_kill_def (1 << 2)
47 /* No virtual operands should be created in the expression. This is used
48 when traversing ADDR_EXPR nodes which have different semantics than
49 other expressions. Inside an ADDR_EXPR node, the only operands that we
50 need to consider are indices into arrays. For instance, &a.b[i] should
51 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
52 VUSE for 'b'. */
53 #define opf_no_vops (1 << 1)
55 /* Array for building all the def operands. */
56 static GTY (()) varray_type build_defs;
58 /* Array for building all the use operands. */
59 static GTY (()) varray_type build_uses;
61 /* Array for building all the v_may_def operands. */
62 static GTY (()) varray_type build_v_may_defs;
64 /* Array for building all the vuse operands. */
65 static GTY (()) varray_type build_vuses;
67 /* Array for building all the v_must_def operands. */
68 static GTY (()) varray_type build_v_must_defs;
70 #ifdef ENABLE_CHECKING
71 tree check_build_stmt;
72 #endif
74 typedef struct voperands_d
76 v_may_def_optype v_may_def_ops;
77 vuse_optype vuse_ops;
78 v_must_def_optype v_must_def_ops;
79 } *voperands_t;
81 static void note_addressable (tree, stmt_ann_t);
82 static void get_expr_operands (tree, tree *, int, voperands_t);
83 static void get_asm_expr_operands (tree, voperands_t);
84 static void get_indirect_ref_operands (tree, tree, int, voperands_t);
85 static void get_call_expr_operands (tree, tree, voperands_t);
86 static inline void append_def (tree *, tree);
87 static inline void append_use (tree *, tree);
88 static void append_v_may_def (tree, tree, voperands_t);
89 static void append_v_must_def (tree, tree, voperands_t);
90 static void add_call_clobber_ops (tree, voperands_t);
91 static void add_call_read_ops (tree, voperands_t);
92 static void add_stmt_operand (tree *, tree, int, voperands_t);
94 /* Return a vector of contiguous memory of a specified size. */
96 static inline def_optype
97 allocate_def_optype (unsigned num)
99 def_optype def_ops;
100 unsigned size;
101 size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
102 def_ops = ggc_alloc (size);
103 def_ops->num_defs = num;
104 return def_ops;
107 static inline use_optype
108 allocate_use_optype (unsigned num)
110 use_optype use_ops;
111 unsigned size;
112 size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
113 use_ops = ggc_alloc (size);
114 use_ops->num_uses = num;
115 return use_ops;
118 static inline v_may_def_optype
119 allocate_v_may_def_optype (unsigned num)
121 v_may_def_optype v_may_def_ops;
122 unsigned size;
123 size = sizeof (struct v_may_def_optype_d) + sizeof (tree) * ((num * 2) - 1);
124 v_may_def_ops = ggc_alloc (size);
125 v_may_def_ops->num_v_may_defs = num;
126 return v_may_def_ops;
129 static inline vuse_optype
130 allocate_vuse_optype (unsigned num)
132 vuse_optype vuse_ops;
133 unsigned size;
134 size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
135 vuse_ops = ggc_alloc (size);
136 vuse_ops->num_vuses = num;
137 return vuse_ops;
140 static inline v_must_def_optype
141 allocate_v_must_def_optype (unsigned num)
143 v_must_def_optype v_must_def_ops;
144 unsigned size;
145 size = sizeof (struct v_must_def_optype_d) + sizeof (tree *) * (num - 1);
146 v_must_def_ops = ggc_alloc (size);
147 v_must_def_ops->num_v_must_defs = num;
148 return v_must_def_ops;
151 static inline void
152 free_uses (use_optype *uses, bool dealloc)
154 if (*uses)
156 if (dealloc)
157 ggc_free (*uses);
158 *uses = NULL;
162 static inline void
163 free_defs (def_optype *defs, bool dealloc)
165 if (*defs)
167 if (dealloc)
168 ggc_free (*defs);
169 *defs = NULL;
173 static inline void
174 free_vuses (vuse_optype *vuses, bool dealloc)
176 if (*vuses)
178 if (dealloc)
179 ggc_free (*vuses);
180 *vuses = NULL;
184 static inline void
185 free_v_may_defs (v_may_def_optype *v_may_defs, bool dealloc)
187 if (*v_may_defs)
189 if (dealloc)
190 ggc_free (*v_may_defs);
191 *v_may_defs = NULL;
195 static inline void
196 free_v_must_defs (v_must_def_optype *v_must_defs, bool dealloc)
198 if (*v_must_defs)
200 if (dealloc)
201 ggc_free (*v_must_defs);
202 *v_must_defs = NULL;
206 void
207 remove_vuses (tree stmt)
209 stmt_ann_t ann;
211 ann = stmt_ann (stmt);
212 if (ann)
213 free_vuses (&(ann->vuse_ops), true);
216 void
217 remove_v_may_defs (tree stmt)
219 stmt_ann_t ann;
221 ann = stmt_ann (stmt);
222 if (ann)
223 free_v_may_defs (&(ann->v_may_def_ops), true);
226 void
227 remove_v_must_defs (tree stmt)
229 stmt_ann_t ann;
231 ann = stmt_ann (stmt);
232 if (ann)
233 free_v_must_defs (&(ann->v_must_def_ops), true);
236 void
237 init_ssa_operands (void)
239 VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
240 VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
241 VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
242 VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
243 VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
246 void
247 fini_ssa_operands (void)
251 static void
252 finalize_ssa_defs (tree stmt)
254 unsigned num, x;
255 stmt_ann_t ann;
256 def_optype def_ops;
258 num = VARRAY_ACTIVE_SIZE (build_defs);
259 if (num == 0)
260 return;
262 #ifdef ENABLE_CHECKING
263 /* There should only be a single real definition per assignment. */
264 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
265 abort ();
266 #endif
268 def_ops = allocate_def_optype (num);
269 for (x = 0; x < num ; x++)
270 def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
271 VARRAY_POP_ALL (build_defs);
273 ann = stmt_ann (stmt);
274 ann->def_ops = def_ops;
277 static void
278 finalize_ssa_uses (tree stmt)
280 unsigned num, x;
281 use_optype use_ops;
282 stmt_ann_t ann;
284 num = VARRAY_ACTIVE_SIZE (build_uses);
285 if (num == 0)
286 return;
288 #ifdef ENABLE_CHECKING
290 unsigned x;
291 /* If the pointer to the operand is the statement itself, something is
292 wrong. It means that we are pointing to a local variable (the
293 initial call to get_stmt_operands does not pass a pointer to a
294 statement). */
295 for (x = 0; x < num; x++)
296 if (*(VARRAY_TREE_PTR (build_uses, x)) == stmt)
297 abort ();
299 #endif
301 use_ops = allocate_use_optype (num);
302 for (x = 0; x < num ; x++)
303 use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
304 VARRAY_POP_ALL (build_uses);
306 ann = stmt_ann (stmt);
307 ann->use_ops = use_ops;
310 static void
311 finalize_ssa_v_may_defs (tree stmt)
313 unsigned num, x;
314 v_may_def_optype v_may_def_ops;
315 stmt_ann_t ann;
317 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
318 if (num == 0)
319 return;
321 #ifdef ENABLE_CHECKING
322 /* V_MAY_DEFs must be entered in pairs of result/uses. */
323 if (num % 2 != 0)
324 abort();
325 #endif
327 v_may_def_ops = allocate_v_may_def_optype (num / 2);
328 for (x = 0; x < num; x++)
329 v_may_def_ops->v_may_defs[x] = VARRAY_TREE (build_v_may_defs, x);
330 VARRAY_CLEAR (build_v_may_defs);
332 ann = stmt_ann (stmt);
333 ann->v_may_def_ops = v_may_def_ops;
336 static inline void
337 finalize_ssa_vuses (tree stmt)
339 unsigned num, x;
340 stmt_ann_t ann;
341 vuse_optype vuse_ops;
342 v_may_def_optype v_may_defs;
344 #ifdef ENABLE_CHECKING
345 if (VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0)
347 fprintf (stderr, "Please finalize V_MAY_DEFs before finalize VUSES.\n");
348 abort ();
350 #endif
352 num = VARRAY_ACTIVE_SIZE (build_vuses);
353 if (num == 0)
354 return;
356 /* Remove superfluous VUSE operands. If the statement already has a
357 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
358 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
359 suppose that variable 'a' is aliased:
361 # VUSE <a_2>
362 # a_3 = V_MAY_DEF <a_2>
363 a = a + 1;
365 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
366 operation. */
368 ann = stmt_ann (stmt);
369 v_may_defs = V_MAY_DEF_OPS (ann);
370 if (NUM_V_MAY_DEFS (v_may_defs) > 0)
372 size_t i, j;
373 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
375 bool found = false;
376 for (j = 0; j < NUM_V_MAY_DEFS (v_may_defs); j++)
378 tree vuse_var, v_may_def_var;
379 tree vuse = VARRAY_TREE (build_vuses, i);
380 tree v_may_def = V_MAY_DEF_OP (v_may_defs, j);
382 if (TREE_CODE (vuse) == SSA_NAME)
383 vuse_var = SSA_NAME_VAR (vuse);
384 else
385 vuse_var = vuse;
387 if (TREE_CODE (v_may_def) == SSA_NAME)
388 v_may_def_var = SSA_NAME_VAR (v_may_def);
389 else
390 v_may_def_var = v_may_def;
392 if (vuse_var == v_may_def_var)
394 found = true;
395 break;
399 /* If we found a useless VUSE operand, remove it from the
400 operand array by replacing it with the last active element
401 in the operand array (unless the useless VUSE was the
402 last operand, in which case we simply remove it. */
403 if (found)
405 if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
407 VARRAY_TREE (build_vuses, i)
408 = VARRAY_TREE (build_vuses,
409 VARRAY_ACTIVE_SIZE (build_vuses) - 1);
411 VARRAY_POP (build_vuses);
413 /* We want to rescan the element at this index, unless
414 this was the last element, in which case the loop
415 terminates. */
416 i--;
421 num = VARRAY_ACTIVE_SIZE (build_vuses);
422 /* We could have reduced the size to zero now, however. */
423 if (num == 0)
424 return;
426 vuse_ops = allocate_vuse_optype (num);
427 for (x = 0; x < num; x++)
428 vuse_ops->vuses[x] = VARRAY_TREE (build_vuses, x);
429 VARRAY_CLEAR (build_vuses);
430 ann->vuse_ops = vuse_ops;
433 static void
434 finalize_ssa_v_must_defs (tree stmt)
436 unsigned num, x;
437 stmt_ann_t ann;
438 v_must_def_optype v_must_def_ops;
440 num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
441 if (num == 0)
442 return;
444 #ifdef ENABLE_CHECKING
445 /* There should only be a single V_MUST_DEF per assignment. */
446 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
447 abort ();
448 #endif
450 v_must_def_ops = allocate_v_must_def_optype (num);
451 for (x = 0; x < num ; x++)
452 v_must_def_ops->v_must_defs[x] = VARRAY_TREE (build_v_must_defs, x);
453 VARRAY_POP_ALL (build_v_must_defs);
455 ann = stmt_ann (stmt);
456 ann->v_must_def_ops = v_must_def_ops;
459 extern void
460 finalize_ssa_stmt_operands (tree stmt)
462 #ifdef ENABLE_CHECKING
463 if (check_build_stmt == NULL)
464 abort();
465 #endif
467 finalize_ssa_defs (stmt);
468 finalize_ssa_uses (stmt);
469 finalize_ssa_v_must_defs (stmt);
470 finalize_ssa_v_may_defs (stmt);
471 finalize_ssa_vuses (stmt);
473 #ifdef ENABLE_CHECKING
474 check_build_stmt = NULL;
475 #endif
479 extern void
480 verify_start_operands (tree stmt ATTRIBUTE_UNUSED)
482 #ifdef ENABLE_CHECKING
483 if (VARRAY_ACTIVE_SIZE (build_defs) > 0
484 || VARRAY_ACTIVE_SIZE (build_uses) > 0
485 || VARRAY_ACTIVE_SIZE (build_vuses) > 0
486 || VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0
487 || VARRAY_ACTIVE_SIZE (build_v_must_defs) > 0)
488 abort ();
489 if (check_build_stmt != NULL)
490 abort();
491 check_build_stmt = stmt;
492 #endif
496 /* Add DEF_P to the list of pointers to operands defined by STMT. */
498 static inline void
499 append_def (tree *def_p, tree stmt ATTRIBUTE_UNUSED)
501 #ifdef ENABLE_CHECKING
502 if (check_build_stmt != stmt)
503 abort();
504 #endif
505 VARRAY_PUSH_TREE_PTR (build_defs, def_p);
509 /* Add USE_P to the list of pointers to operands used by STMT. */
511 static inline void
512 append_use (tree *use_p, tree stmt ATTRIBUTE_UNUSED)
514 #ifdef ENABLE_CHECKING
515 if (check_build_stmt != stmt)
516 abort();
517 #endif
518 VARRAY_PUSH_TREE_PTR (build_uses, use_p);
522 /* Add a new virtual def for variable VAR to statement STMT. If PREV_VOPS
523 is not NULL, the existing entries are preserved and no new entries are
524 added here. This is done to preserve the SSA numbering of virtual
525 operands. */
527 static void
528 append_v_may_def (tree var, tree stmt, voperands_t prev_vops)
530 stmt_ann_t ann;
531 size_t i;
532 tree result, source;
534 #ifdef ENABLE_CHECKING
535 if (check_build_stmt != stmt)
536 abort();
537 #endif
539 ann = stmt_ann (stmt);
541 /* Don't allow duplicate entries. */
543 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i += 2)
545 tree result = VARRAY_TREE (build_v_may_defs, i);
546 if (var == result
547 || (TREE_CODE (result) == SSA_NAME
548 && var == SSA_NAME_VAR (result)))
549 return;
552 /* If the statement already had virtual definitions, see if any of the
553 existing V_MAY_DEFs matches VAR. If so, re-use it, otherwise add a new
554 V_MAY_DEF for VAR. */
555 result = NULL_TREE;
556 source = NULL_TREE;
557 if (prev_vops)
558 for (i = 0; i < NUM_V_MAY_DEFS (prev_vops->v_may_def_ops); i++)
560 result = V_MAY_DEF_RESULT (prev_vops->v_may_def_ops, i);
561 if (result == var
562 || (TREE_CODE (result) == SSA_NAME
563 && SSA_NAME_VAR (result) == var))
565 source = V_MAY_DEF_OP (prev_vops->v_may_def_ops, i);
566 break;
570 /* If no previous V_MAY_DEF operand was found for VAR, create one now. */
571 if (source == NULL_TREE)
573 result = var;
574 source = var;
577 VARRAY_PUSH_TREE (build_v_may_defs, result);
578 VARRAY_PUSH_TREE (build_v_may_defs, source);
582 /* Add VAR to the list of virtual uses for STMT. If PREV_VOPS
583 is not NULL, the existing entries are preserved and no new entries are
584 added here. This is done to preserve the SSA numbering of virtual
585 operands. */
587 static void
588 append_vuse (tree var, tree stmt, voperands_t prev_vops)
590 stmt_ann_t ann;
591 size_t i;
592 bool found;
593 tree vuse;
595 #ifdef ENABLE_CHECKING
596 if (check_build_stmt != stmt)
597 abort();
598 #endif
600 ann = stmt_ann (stmt);
602 /* Don't allow duplicate entries. */
603 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
605 tree vuse_var = VARRAY_TREE (build_vuses, i);
606 if (var == vuse_var
607 || (TREE_CODE (vuse_var) == SSA_NAME
608 && var == SSA_NAME_VAR (vuse_var)))
609 return;
612 /* If the statement already had virtual uses, see if any of the
613 existing VUSEs matches VAR. If so, re-use it, otherwise add a new
614 VUSE for VAR. */
615 found = false;
616 vuse = NULL_TREE;
617 if (prev_vops)
618 for (i = 0; i < NUM_VUSES (prev_vops->vuse_ops); i++)
620 vuse = VUSE_OP (prev_vops->vuse_ops, i);
621 if (vuse == var
622 || (TREE_CODE (vuse) == SSA_NAME
623 && SSA_NAME_VAR (vuse) == var))
625 found = true;
626 break;
630 /* If VAR existed already in PREV_VOPS, re-use it. */
631 if (found)
632 var = vuse;
634 VARRAY_PUSH_TREE (build_vuses, var);
637 /* Add VAR to the list of virtual must definitions for STMT. If PREV_VOPS
638 is not NULL, the existing entries are preserved and no new entries are
639 added here. This is done to preserve the SSA numbering of virtual
640 operands. */
642 static void
643 append_v_must_def (tree var, tree stmt, voperands_t prev_vops)
645 stmt_ann_t ann;
646 size_t i;
647 bool found;
648 tree v_must_def;
650 #ifdef ENABLE_CHECKING
651 if (check_build_stmt != stmt)
652 abort();
653 #endif
655 ann = stmt_ann (stmt);
657 /* Don't allow duplicate entries. */
658 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
660 tree v_must_def_var = VARRAY_TREE (build_v_must_defs, i);
661 if (var == v_must_def_var
662 || (TREE_CODE (v_must_def_var) == SSA_NAME
663 && var == SSA_NAME_VAR (v_must_def_var)))
664 return;
667 /* If the statement already had virtual must defs, see if any of the
668 existing V_MUST_DEFs matches VAR. If so, re-use it, otherwise add a new
669 V_MUST_DEF for VAR. */
670 found = false;
671 v_must_def = NULL_TREE;
672 if (prev_vops)
673 for (i = 0; i < NUM_V_MUST_DEFS (prev_vops->v_must_def_ops); i++)
675 v_must_def = V_MUST_DEF_OP (prev_vops->v_must_def_ops, i);
676 if (v_must_def == var
677 || (TREE_CODE (v_must_def) == SSA_NAME
678 && SSA_NAME_VAR (v_must_def) == var))
680 found = true;
681 break;
685 /* If VAR existed already in PREV_VOPS, re-use it. */
686 if (found)
687 var = v_must_def;
689 VARRAY_PUSH_TREE (build_v_must_defs, var);
693 /* External entry point which by-passes the previous vops mechanism. */
694 void
695 add_vuse (tree var, tree stmt)
697 append_vuse (var, stmt, NULL);
701 /* Get the operands of statement STMT. Note that repeated calls to
702 get_stmt_operands for the same statement will do nothing until the
703 statement is marked modified by a call to modify_stmt(). */
705 void
706 get_stmt_operands (tree stmt)
708 enum tree_code code;
709 stmt_ann_t ann;
710 struct voperands_d prev_vops;
712 #if defined ENABLE_CHECKING
713 /* The optimizers cannot handle statements that are nothing but a
714 _DECL. This indicates a bug in the gimplifier. */
715 if (SSA_VAR_P (stmt))
716 abort ();
717 #endif
719 /* Ignore error statements. */
720 if (TREE_CODE (stmt) == ERROR_MARK)
721 return;
723 ann = get_stmt_ann (stmt);
725 /* If the statement has not been modified, the operands are still valid. */
726 if (!ann->modified)
727 return;
729 timevar_push (TV_TREE_OPS);
731 /* Initially assume that the statement has no volatile operands, nor
732 makes aliased loads or stores. */
733 ann->has_volatile_ops = false;
734 ann->makes_aliased_stores = false;
735 ann->makes_aliased_loads = false;
737 /* Remove any existing operands as they will be scanned again. */
738 free_defs (&(ann->def_ops), true);
739 free_uses (&(ann->use_ops), true);
741 /* Before removing existing virtual operands, save them in PREV_VOPS so
742 that we can re-use their SSA versions. */
743 prev_vops.v_may_def_ops = V_MAY_DEF_OPS (ann);
744 prev_vops.vuse_ops = VUSE_OPS (ann);
745 prev_vops.v_must_def_ops = V_MUST_DEF_OPS (ann);
747 /* Don't free the previous values to memory since we're still using them. */
748 free_v_may_defs (&(ann->v_may_def_ops), false);
749 free_vuses (&(ann->vuse_ops), false);
750 free_v_must_defs (&(ann->v_must_def_ops), false);
752 start_ssa_stmt_operands (stmt);
754 code = TREE_CODE (stmt);
755 switch (code)
757 case MODIFY_EXPR:
758 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none, &prev_vops);
759 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
760 || TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
761 || TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
762 || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
763 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
764 modified in that case. FIXME we should represent somehow
765 that it is killed on the fallthrough path. */
766 || tree_could_throw_p (TREE_OPERAND (stmt, 1)))
767 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def,
768 &prev_vops);
769 else
770 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
771 opf_is_def | opf_kill_def, &prev_vops);
772 break;
774 case COND_EXPR:
775 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none, &prev_vops);
776 break;
778 case SWITCH_EXPR:
779 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none, &prev_vops);
780 break;
782 case ASM_EXPR:
783 get_asm_expr_operands (stmt, &prev_vops);
784 break;
786 case RETURN_EXPR:
787 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none, &prev_vops);
788 break;
790 case GOTO_EXPR:
791 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none, &prev_vops);
792 break;
794 case LABEL_EXPR:
795 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none, &prev_vops);
796 break;
798 /* These nodes contain no variable references. */
799 case BIND_EXPR:
800 case CASE_LABEL_EXPR:
801 case TRY_CATCH_EXPR:
802 case TRY_FINALLY_EXPR:
803 case EH_FILTER_EXPR:
804 case CATCH_EXPR:
805 case RESX_EXPR:
806 break;
808 default:
809 /* Notice that if get_expr_operands tries to use &STMT as the operand
810 pointer (which may only happen for USE operands), we will abort in
811 append_use. This default will handle statements like empty statements,
812 CALL_EXPRs or VA_ARG_EXPRs that may appear on the RHS of a statement
813 or as statements themselves. */
814 get_expr_operands (stmt, &stmt, opf_none, &prev_vops);
815 break;
818 finalize_ssa_stmt_operands (stmt);
820 /* Now free the previous virtual ops to memory. */
821 free_v_may_defs (&(prev_vops.v_may_def_ops), true);
822 free_vuses (&(prev_vops.vuse_ops), true);
823 free_v_must_defs (&(prev_vops.v_must_def_ops), true);
825 /* Clear the modified bit for STMT. Subsequent calls to
826 get_stmt_operands for this statement will do nothing until the
827 statement is marked modified by a call to modify_stmt(). */
828 ann->modified = 0;
830 timevar_pop (TV_TREE_OPS);
834 /* Recursively scan the expression pointed by EXPR_P in statement STMT.
835 FLAGS is one of the OPF_* constants modifying how to interpret the
836 operands found. PREV_VOPS is as in append_v_may_def and append_vuse. */
838 static void
839 get_expr_operands (tree stmt, tree *expr_p, int flags, voperands_t prev_vops)
841 enum tree_code code;
842 char class;
843 tree expr = *expr_p;
845 if (expr == NULL || expr == error_mark_node)
846 return;
848 code = TREE_CODE (expr);
849 class = TREE_CODE_CLASS (code);
851 switch (code)
853 case ADDR_EXPR:
854 /* We could have the address of a component, array member,
855 etc which has interesting variable references. */
856 /* Taking the address of a variable does not represent a
857 reference to it, but the fact that STMT takes its address will be
858 of interest to some passes (e.g. alias resolution). */
859 add_stmt_operand (expr_p, stmt, 0, NULL);
861 /* If the address is constant (invariant is not sufficient), there will
862 be no interesting variable references inside. */
863 if (TREE_CONSTANT (expr))
864 return;
866 /* There should be no VUSEs created, since the referenced objects are
867 not really accessed. The only operands that we should find here
868 are ARRAY_REF indices which will always be real operands (GIMPLE
869 does not allow non-registers as array indices). */
870 flags |= opf_no_vops;
872 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
873 return;
875 case SSA_NAME:
876 case VAR_DECL:
877 case PARM_DECL:
878 case RESULT_DECL:
879 /* If we found a variable, add it to DEFS or USES depending
880 on the operand flags. */
881 add_stmt_operand (expr_p, stmt, flags, prev_vops);
882 return;
884 case INDIRECT_REF:
885 get_indirect_ref_operands (stmt, expr, flags, prev_vops);
886 return;
888 case ARRAY_REF:
889 case ARRAY_RANGE_REF:
890 /* Treat array references as references to the virtual variable
891 representing the array. The virtual variable for an ARRAY_REF
892 is the VAR_DECL for the array. */
894 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
895 according to the value of IS_DEF. Recurse if the LHS of the
896 ARRAY_REF node is not a regular variable. */
897 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
898 add_stmt_operand (expr_p, stmt, flags, prev_vops);
899 else
900 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
902 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none, prev_vops);
903 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
904 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none, prev_vops);
905 return;
907 case COMPONENT_REF:
908 case REALPART_EXPR:
909 case IMAGPART_EXPR:
910 /* Similarly to arrays, references to compound variables (complex
911 types and structures/unions) are globbed.
913 FIXME: This means that
915 a.x = 6;
916 a.y = 7;
917 foo (a.x, a.y);
919 will not be constant propagated because the two partial
920 definitions to 'a' will kill each other. Note that SRA may be
921 able to fix this problem if 'a' can be scalarized. */
923 /* If the LHS of the compound reference is not a regular variable,
924 recurse to keep looking for more operands in the subexpression. */
925 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
926 add_stmt_operand (expr_p, stmt, flags, prev_vops);
927 else
928 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
930 if (code == COMPONENT_REF)
931 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
932 return;
934 case CALL_EXPR:
935 get_call_expr_operands (stmt, expr, prev_vops);
936 return;
938 case MODIFY_EXPR:
939 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none, prev_vops);
941 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ARRAY_REF
942 || TREE_CODE (TREE_OPERAND (expr, 0)) == COMPONENT_REF
943 || TREE_CODE (TREE_OPERAND (expr, 0)) == REALPART_EXPR
944 || TREE_CODE (TREE_OPERAND (expr, 0)) == IMAGPART_EXPR)
945 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def,
946 prev_vops);
947 else
948 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
949 opf_is_def | opf_kill_def, prev_vops);
950 return;
952 case VA_ARG_EXPR:
953 /* Mark VA_ARG_EXPR nodes as making volatile references. FIXME,
954 this is needed because we currently do not gimplify VA_ARG_EXPR
955 properly. */
956 stmt_ann (stmt)->has_volatile_ops = true;
957 return;
959 case TRUTH_NOT_EXPR:
960 case BIT_FIELD_REF:
961 do_unary:
962 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
963 return;
965 case TRUTH_AND_EXPR:
966 case TRUTH_OR_EXPR:
967 case TRUTH_XOR_EXPR:
968 case COMPOUND_EXPR:
969 case OBJ_TYPE_REF:
970 do_binary:
972 tree op0 = TREE_OPERAND (expr, 0);
973 tree op1 = TREE_OPERAND (expr, 1);
975 /* If it would be profitable to swap the operands, then do so to
976 canonicalize the statement, enabling better optimization.
978 By placing canonicalization of such expressions here we
979 transparently keep statements in canonical form, even
980 when the statement is modified. */
981 if (tree_swap_operands_p (op0, op1, false))
983 /* For relationals we need to swap the operands
984 and change the code. */
985 if (code == LT_EXPR
986 || code == GT_EXPR
987 || code == LE_EXPR
988 || code == GE_EXPR)
990 TREE_SET_CODE (expr, swap_tree_comparison (code));
991 TREE_OPERAND (expr, 0) = op1;
992 TREE_OPERAND (expr, 1) = op0;
995 /* For a commutative operator we can just swap the operands. */
996 else if (commutative_tree_code (code))
998 TREE_OPERAND (expr, 0) = op1;
999 TREE_OPERAND (expr, 1) = op0;
1003 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
1004 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags, prev_vops);
1005 return;
1008 case BLOCK:
1009 case FUNCTION_DECL:
1010 case EXC_PTR_EXPR:
1011 case FILTER_EXPR:
1012 case LABEL_DECL:
1013 case CONSTRUCTOR:
1014 /* Expressions that make no memory references. */
1015 return;
1017 default:
1018 if (class == '1')
1019 goto do_unary;
1020 if (class == '2' || class == '<')
1021 goto do_binary;
1022 if (class == 'c' || class == 't')
1023 return;
1026 /* If we get here, something has gone wrong. */
1027 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1028 debug_tree (expr);
1029 fputs ("\n", stderr);
1030 abort ();
1034 /* Scan operands in ASM_EXPR STMT. PREV_VOPS is as in append_v_may_def and
1035 append_vuse. */
1037 static void
1038 get_asm_expr_operands (tree stmt, voperands_t prev_vops)
1040 int noutputs = list_length (ASM_OUTPUTS (stmt));
1041 const char **oconstraints
1042 = (const char **) alloca ((noutputs) * sizeof (const char *));
1043 int i;
1044 tree link;
1045 const char *constraint;
1046 bool allows_mem, allows_reg, is_inout;
1047 stmt_ann_t s_ann = stmt_ann (stmt);
1049 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1051 oconstraints[i] = constraint
1052 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1053 parse_output_constraint (&constraint, i, 0, 0,
1054 &allows_mem, &allows_reg, &is_inout);
1056 #if defined ENABLE_CHECKING
1057 /* This should have been split in gimplify_asm_expr. */
1058 if (allows_reg && is_inout)
1059 abort ();
1060 #endif
1062 /* Memory operands are addressable. Note that STMT needs the
1063 address of this operand. */
1064 if (!allows_reg && allows_mem)
1066 tree t = get_base_address (TREE_VALUE (link));
1067 if (t && DECL_P (t))
1068 note_addressable (t, s_ann);
1071 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def, prev_vops);
1074 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1076 constraint
1077 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1078 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1079 oconstraints, &allows_mem, &allows_reg);
1081 /* Memory operands are addressable. Note that STMT needs the
1082 address of this operand. */
1083 if (!allows_reg && allows_mem)
1085 tree t = get_base_address (TREE_VALUE (link));
1086 if (t && DECL_P (t))
1087 note_addressable (t, s_ann);
1090 get_expr_operands (stmt, &TREE_VALUE (link), 0, prev_vops);
1094 /* Clobber memory for asm ("" : : : "memory"); */
1095 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1096 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1098 size_t i;
1100 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1101 decided to group them). */
1102 if (global_var)
1103 add_stmt_operand (&global_var, stmt, opf_is_def, prev_vops);
1104 else
1105 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1107 tree var = referenced_var (i);
1108 add_stmt_operand (&var, stmt, opf_is_def, prev_vops);
1111 /* Now clobber all addressables. */
1112 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i,
1114 tree var = referenced_var (i);
1115 add_stmt_operand (&var, stmt, opf_is_def, prev_vops);
1118 /* If we don't have call-clobbered nor addressable vars and we
1119 still have not computed aliasing information, just mark the
1120 statement as having volatile operands. If the alias pass
1121 finds some, we will add them at that point. */
1122 if (!aliases_computed_p)
1123 stmt_ann (stmt)->has_volatile_ops = true;
1125 break;
1129 /* A subroutine of get_expr_operands to handle INDIRECT_REF. */
1131 static void
1132 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1133 voperands_t prev_vops)
1135 tree *pptr = &TREE_OPERAND (expr, 0);
1136 tree ptr = *pptr;
1138 if (SSA_VAR_P (ptr))
1140 if (!aliases_computed_p)
1142 /* If the pointer does not have a memory tag and aliases have not
1143 been computed yet, mark the statement as having volatile
1144 operands to prevent DOM from entering it in equivalence tables
1145 and DCE from killing it. */
1146 stmt_ann (stmt)->has_volatile_ops = true;
1148 else
1150 struct ptr_info_def *pi = NULL;
1152 /* If we have computed aliasing already, check if PTR has
1153 flow-sensitive points-to information. */
1154 if (TREE_CODE (ptr) == SSA_NAME
1155 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1156 && pi->name_mem_tag)
1158 /* PTR has its own memory tag. Use it. */
1159 add_stmt_operand (&pi->name_mem_tag, stmt, flags, prev_vops);
1161 else
1163 /* If PTR is not an SSA_NAME or it doesn't have a name
1164 tag, use its type memory tag. */
1165 var_ann_t ann;
1167 /* If we are emitting debugging dumps, display a warning if
1168 PTR is an SSA_NAME with no flow-sensitive alias
1169 information. That means that we may need to compute
1170 aliasing again. */
1171 if (dump_file
1172 && TREE_CODE (ptr) == SSA_NAME
1173 && pi == NULL)
1175 fprintf (dump_file,
1176 "NOTE: no flow-sensitive alias info for ");
1177 print_generic_expr (dump_file, ptr, dump_flags);
1178 fprintf (dump_file, " in ");
1179 print_generic_stmt (dump_file, stmt, dump_flags);
1182 if (TREE_CODE (ptr) == SSA_NAME)
1183 ptr = SSA_NAME_VAR (ptr);
1184 ann = var_ann (ptr);
1185 add_stmt_operand (&ann->type_mem_tag, stmt, flags, prev_vops);
1190 /* If a constant is used as a pointer, we can't generate a real
1191 operand for it but we mark the statement volatile to prevent
1192 optimizations from messing things up. */
1193 else if (TREE_CODE (ptr) == INTEGER_CST)
1195 stmt_ann (stmt)->has_volatile_ops = true;
1196 return;
1199 /* Everything else *should* have been folded elsewhere, but users
1200 are smarter than we in finding ways to write invalid code. We
1201 cannot just abort here. If we were absolutely certain that we
1202 do handle all valid cases, then we could just do nothing here.
1203 That seems optimistic, so attempt to do something logical... */
1204 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1205 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1206 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1208 /* Make sure we know the object is addressable. */
1209 pptr = &TREE_OPERAND (ptr, 0);
1210 add_stmt_operand (pptr, stmt, 0, NULL);
1212 /* Mark the object itself with a VUSE. */
1213 pptr = &TREE_OPERAND (*pptr, 0);
1214 get_expr_operands (stmt, pptr, flags, prev_vops);
1215 return;
1218 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1219 else
1220 abort ();
1222 /* Add a USE operand for the base pointer. */
1223 get_expr_operands (stmt, pptr, opf_none, prev_vops);
1226 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1228 static void
1229 get_call_expr_operands (tree stmt, tree expr, voperands_t prev_vops)
1231 tree op;
1232 int call_flags = call_expr_flags (expr);
1234 /* Find uses in the called function. */
1235 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none, prev_vops);
1237 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1238 get_expr_operands (stmt, &TREE_VALUE (op), opf_none, prev_vops);
1240 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
1242 if (bitmap_first_set_bit (call_clobbered_vars) >= 0)
1244 /* A 'pure' or a 'const' functions never call clobber anything.
1245 A 'noreturn' function might, but since we don't return anyway
1246 there is no point in recording that. */
1247 if (!(call_flags
1248 & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1249 add_call_clobber_ops (stmt, prev_vops);
1250 else if (!(call_flags & (ECF_CONST | ECF_NORETURN)))
1251 add_call_read_ops (stmt, prev_vops);
1253 else if (!aliases_computed_p)
1254 stmt_ann (stmt)->has_volatile_ops = true;
1258 /* Add *VAR_P to the appropriate operand array of STMT. FLAGS is as in
1259 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1260 the statement's real operands, otherwise it is added to virtual
1261 operands.
1263 PREV_VOPS is used when adding virtual operands to statements that
1264 already had them (See append_v_may_def and append_vuse). */
1266 static void
1267 add_stmt_operand (tree *var_p, tree stmt, int flags, voperands_t prev_vops)
1269 bool is_real_op;
1270 tree var, sym;
1271 stmt_ann_t s_ann;
1272 var_ann_t v_ann;
1274 var = *var_p;
1275 STRIP_NOPS (var);
1277 s_ann = stmt_ann (stmt);
1279 /* If the operand is an ADDR_EXPR, add its operand to the list of
1280 variables that have had their address taken in this statement. */
1281 if (TREE_CODE (var) == ADDR_EXPR)
1283 note_addressable (TREE_OPERAND (var, 0), s_ann);
1284 return;
1287 /* If the original variable is not a scalar, it will be added to the list
1288 of virtual operands. In that case, use its base symbol as the virtual
1289 variable representing it. */
1290 is_real_op = is_gimple_reg (var);
1291 if (!is_real_op && !DECL_P (var))
1292 var = get_virtual_var (var);
1294 /* If VAR is not a variable that we care to optimize, do nothing. */
1295 if (var == NULL_TREE || !SSA_VAR_P (var))
1296 return;
1298 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1299 v_ann = var_ann (sym);
1301 /* FIXME: We currently refuse to optimize variables that have hidden uses
1302 (variables used in VLA declarations, MD builtin calls and variables
1303 from the parent function in nested functions). This is because not
1304 all uses of these variables are exposed in the IL or the statements
1305 that reference them are not in GIMPLE form. If that's the case, mark
1306 the statement as having volatile operands and return. */
1307 if (v_ann->has_hidden_use)
1309 s_ann->has_volatile_ops = true;
1310 return;
1313 /* Don't expose volatile variables to the optimizers. */
1314 if (TREE_THIS_VOLATILE (sym))
1316 s_ann->has_volatile_ops = true;
1317 return;
1320 if (is_real_op)
1322 /* The variable is a GIMPLE register. Add it to real operands. */
1323 if (flags & opf_is_def)
1324 append_def (var_p, stmt);
1325 else
1326 append_use (var_p, stmt);
1328 else
1330 varray_type aliases;
1332 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1333 virtual operands, unless the caller has specifically requested
1334 not to add virtual operands (used when adding operands inside an
1335 ADDR_EXPR expression). */
1336 if (flags & opf_no_vops)
1337 return;
1339 aliases = v_ann->may_aliases;
1341 /* If alias information hasn't been computed yet, then
1342 addressable variables will not be an alias tag nor will they
1343 have aliases. In this case, mark the statement as having
1344 volatile operands. */
1345 if (!aliases_computed_p && may_be_aliased (var))
1346 s_ann->has_volatile_ops = true;
1348 if (aliases == NULL)
1350 /* The variable is not aliased or it is an alias tag. */
1351 if (flags & opf_is_def)
1353 if (v_ann->is_alias_tag)
1355 /* Alias tagged vars get regular V_MAY_DEF */
1356 s_ann->makes_aliased_stores = 1;
1357 append_v_may_def (var, stmt, prev_vops);
1359 else if ((flags & opf_kill_def)
1360 && v_ann->mem_tag_kind == NOT_A_TAG)
1361 /* V_MUST_DEF for non-aliased non-GIMPLE register
1362 variable definitions. Avoid memory tags. */
1363 append_v_must_def (var, stmt, prev_vops);
1364 else
1365 /* Call-clobbered variables & memory tags get
1366 V_MAY_DEF */
1367 append_v_may_def (var, stmt, prev_vops);
1369 else
1371 append_vuse (var, stmt, prev_vops);
1372 if (v_ann->is_alias_tag)
1373 s_ann->makes_aliased_loads = 1;
1376 else
1378 size_t i;
1380 /* The variable is aliased. Add its aliases to the virtual
1381 operands. */
1382 if (VARRAY_ACTIVE_SIZE (aliases) == 0)
1383 abort ();
1385 if (flags & opf_is_def)
1387 /* If the variable is also an alias tag, add a virtual
1388 operand for it, otherwise we will miss representing
1389 references to the members of the variable's alias set.
1390 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1391 if (v_ann->is_alias_tag)
1392 append_v_may_def (var, stmt, prev_vops);
1394 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1395 append_v_may_def (VARRAY_TREE (aliases, i), stmt, prev_vops);
1397 s_ann->makes_aliased_stores = 1;
1399 else
1401 if (v_ann->is_alias_tag)
1402 append_vuse (var, stmt, prev_vops);
1404 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1405 append_vuse (VARRAY_TREE (aliases, i), stmt, prev_vops);
1407 s_ann->makes_aliased_loads = 1;
1413 /* Record that VAR had its address taken in the statement with annotations
1414 S_ANN. */
1416 static void
1417 note_addressable (tree var, stmt_ann_t s_ann)
1419 var = get_base_address (var);
1420 if (var && SSA_VAR_P (var))
1422 if (s_ann->addresses_taken == NULL)
1423 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1424 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1429 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1430 clobbered variables in the function. */
1432 static void
1433 add_call_clobber_ops (tree stmt, voperands_t prev_vops)
1435 /* Functions that are not const, pure or never return may clobber
1436 call-clobbered variables. */
1437 stmt_ann (stmt)->makes_clobbering_call = true;
1439 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1440 a V_MAY_DEF operand for every call clobbered variable. See
1441 compute_may_aliases for the heuristic used to decide whether
1442 to create .GLOBAL_VAR or not. */
1443 if (global_var)
1444 add_stmt_operand (&global_var, stmt, opf_is_def, prev_vops);
1445 else
1447 size_t i;
1449 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1451 tree var = referenced_var (i);
1453 /* If VAR is read-only, don't add a V_MAY_DEF, just a
1454 VUSE operand. */
1455 if (!TREE_READONLY (var))
1456 add_stmt_operand (&var, stmt, opf_is_def, prev_vops);
1457 else
1458 add_stmt_operand (&var, stmt, opf_none, prev_vops);
1464 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1465 function. */
1467 static void
1468 add_call_read_ops (tree stmt, voperands_t prev_vops)
1470 /* Otherwise, if the function is not pure, it may reference memory. Add
1471 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1472 for each call-clobbered variable. See add_referenced_var for the
1473 heuristic used to decide whether to create .GLOBAL_VAR. */
1474 if (global_var)
1475 add_stmt_operand (&global_var, stmt, opf_none, prev_vops);
1476 else
1478 size_t i;
1480 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1482 tree var = referenced_var (i);
1483 add_stmt_operand (&var, stmt, opf_none, prev_vops);
1488 /* Copies virtual operands from SRC to DST. */
1490 void
1491 copy_virtual_operands (tree dst, tree src)
1493 vuse_optype vuses = STMT_VUSE_OPS (src);
1494 v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
1495 v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
1496 vuse_optype *vuses_new = &stmt_ann (dst)->vuse_ops;
1497 v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->v_may_def_ops;
1498 v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->v_must_def_ops;
1499 unsigned i;
1501 if (vuses)
1503 *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
1504 for (i = 0; i < NUM_VUSES (vuses); i++)
1505 SET_VUSE_OP (*vuses_new, i, VUSE_OP (vuses, i));
1508 if (v_may_defs)
1510 *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
1511 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
1513 SET_V_MAY_DEF_OP (*v_may_defs_new, i, V_MAY_DEF_OP (v_may_defs, i));
1514 SET_V_MAY_DEF_RESULT (*v_may_defs_new, i,
1515 V_MAY_DEF_RESULT (v_may_defs, i));
1519 if (v_must_defs)
1521 *v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
1522 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
1523 SET_V_MUST_DEF_OP (*v_must_defs_new, i, V_MUST_DEF_OP (v_must_defs, i));
1527 #include "gt-tree-ssa-operands.h"