* builtins.c (std_expand_builtin_va_arg): Remove.
[official-gcc.git] / gcc / tree-ssa-operands.c
blobc1c967a53a9f23f71e4ab9e7cef46cf0e551060c
1 /* SSA operands management for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
35 /* Flags to describe operand properties in get_stmt_operands and helpers. */
37 /* By default, operands are loaded. */
38 #define opf_none 0
40 /* Operand is the target of an assignment expression or a
41 call-clobbered variable */
42 #define opf_is_def (1 << 0)
44 /* Operand is the target of an assignment expression. */
45 #define opf_kill_def (1 << 2)
47 /* No virtual operands should be created in the expression. This is used
48 when traversing ADDR_EXPR nodes which have different semantics than
49 other expressions. Inside an ADDR_EXPR node, the only operands that we
50 need to consider are indices into arrays. For instance, &a.b[i] should
51 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
52 VUSE for 'b'. */
53 #define opf_no_vops (1 << 1)
55 /* Array for building all the def operands. */
56 static GTY (()) varray_type build_defs;
58 /* Array for building all the use operands. */
59 static GTY (()) varray_type build_uses;
61 /* Array for building all the v_may_def operands. */
62 static GTY (()) varray_type build_v_may_defs;
64 /* Array for building all the vuse operands. */
65 static GTY (()) varray_type build_vuses;
67 /* Array for building all the v_must_def operands. */
68 static GTY (()) varray_type build_v_must_defs;
70 #ifdef ENABLE_CHECKING
71 tree check_build_stmt;
72 #endif
74 typedef struct voperands_d
76 v_may_def_optype v_may_def_ops;
77 vuse_optype vuse_ops;
78 v_must_def_optype v_must_def_ops;
79 } *voperands_t;
81 static void note_addressable (tree, stmt_ann_t);
82 static void get_expr_operands (tree, tree *, int, voperands_t);
83 static void get_asm_expr_operands (tree, voperands_t);
84 static void get_indirect_ref_operands (tree, tree, int, voperands_t);
85 static void get_call_expr_operands (tree, tree, voperands_t);
86 static inline void append_def (tree *, tree);
87 static inline void append_use (tree *, tree);
88 static void append_v_may_def (tree, tree, voperands_t);
89 static void append_v_must_def (tree, tree, voperands_t);
90 static void add_call_clobber_ops (tree, voperands_t);
91 static void add_call_read_ops (tree, voperands_t);
92 static void add_stmt_operand (tree *, tree, int, voperands_t);
94 /* Return a vector of contiguous memory of a specified size. */
96 static inline def_optype
97 allocate_def_optype (unsigned num)
99 def_optype def_ops;
100 unsigned size;
101 size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
102 def_ops = ggc_alloc (size);
103 def_ops->num_defs = num;
104 return def_ops;
107 static inline use_optype
108 allocate_use_optype (unsigned num)
110 use_optype use_ops;
111 unsigned size;
112 size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
113 use_ops = ggc_alloc (size);
114 use_ops->num_uses = num;
115 return use_ops;
118 static inline v_may_def_optype
119 allocate_v_may_def_optype (unsigned num)
121 v_may_def_optype v_may_def_ops;
122 unsigned size;
123 size = sizeof (struct v_may_def_optype_d) + sizeof (tree) * ((num * 2) - 1);
124 v_may_def_ops = ggc_alloc (size);
125 v_may_def_ops->num_v_may_defs = num;
126 return v_may_def_ops;
129 static inline vuse_optype
130 allocate_vuse_optype (unsigned num)
132 vuse_optype vuse_ops;
133 unsigned size;
134 size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
135 vuse_ops = ggc_alloc (size);
136 vuse_ops->num_vuses = num;
137 return vuse_ops;
140 static inline v_must_def_optype
141 allocate_v_must_def_optype (unsigned num)
143 v_must_def_optype v_must_def_ops;
144 unsigned size;
145 size = sizeof (struct v_must_def_optype_d) + sizeof (tree *) * (num - 1);
146 v_must_def_ops = ggc_alloc (size);
147 v_must_def_ops->num_v_must_defs = num;
148 return v_must_def_ops;
151 static inline void
152 free_uses (use_optype *uses, bool dealloc)
154 if (*uses)
156 if (dealloc)
157 ggc_free (*uses);
158 *uses = NULL;
162 static inline void
163 free_defs (def_optype *defs, bool dealloc)
165 if (*defs)
167 if (dealloc)
168 ggc_free (*defs);
169 *defs = NULL;
173 static inline void
174 free_vuses (vuse_optype *vuses, bool dealloc)
176 if (*vuses)
178 if (dealloc)
179 ggc_free (*vuses);
180 *vuses = NULL;
184 static inline void
185 free_v_may_defs (v_may_def_optype *v_may_defs, bool dealloc)
187 if (*v_may_defs)
189 if (dealloc)
190 ggc_free (*v_may_defs);
191 *v_may_defs = NULL;
195 static inline void
196 free_v_must_defs (v_must_def_optype *v_must_defs, bool dealloc)
198 if (*v_must_defs)
200 if (dealloc)
201 ggc_free (*v_must_defs);
202 *v_must_defs = NULL;
206 void
207 remove_vuses (tree stmt)
209 stmt_ann_t ann;
211 ann = stmt_ann (stmt);
212 if (ann)
213 free_vuses (&(ann->vuse_ops), true);
216 void
217 remove_v_may_defs (tree stmt)
219 stmt_ann_t ann;
221 ann = stmt_ann (stmt);
222 if (ann)
223 free_v_may_defs (&(ann->v_may_def_ops), true);
226 void
227 remove_v_must_defs (tree stmt)
229 stmt_ann_t ann;
231 ann = stmt_ann (stmt);
232 if (ann)
233 free_v_must_defs (&(ann->v_must_def_ops), true);
236 void
237 init_ssa_operands (void)
239 VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
240 VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
241 VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
242 VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
243 VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
246 void
247 fini_ssa_operands (void)
251 static void
252 finalize_ssa_defs (tree stmt)
254 unsigned num, x;
255 stmt_ann_t ann;
256 def_optype def_ops;
258 num = VARRAY_ACTIVE_SIZE (build_defs);
259 if (num == 0)
260 return;
262 #ifdef ENABLE_CHECKING
263 /* There should only be a single real definition per assignment. */
264 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
265 abort ();
266 #endif
268 def_ops = allocate_def_optype (num);
269 for (x = 0; x < num ; x++)
270 def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
271 VARRAY_POP_ALL (build_defs);
273 ann = stmt_ann (stmt);
274 ann->def_ops = def_ops;
277 static void
278 finalize_ssa_uses (tree stmt)
280 unsigned num, x;
281 use_optype use_ops;
282 stmt_ann_t ann;
284 num = VARRAY_ACTIVE_SIZE (build_uses);
285 if (num == 0)
286 return;
288 #ifdef ENABLE_CHECKING
290 unsigned x;
291 /* If the pointer to the operand is the statement itself, something is
292 wrong. It means that we are pointing to a local variable (the
293 initial call to get_stmt_operands does not pass a pointer to a
294 statement). */
295 for (x = 0; x < num; x++)
296 if (*(VARRAY_TREE_PTR (build_uses, x)) == stmt)
297 abort ();
299 #endif
301 use_ops = allocate_use_optype (num);
302 for (x = 0; x < num ; x++)
303 use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
304 VARRAY_POP_ALL (build_uses);
306 ann = stmt_ann (stmt);
307 ann->use_ops = use_ops;
310 static void
311 finalize_ssa_v_may_defs (tree stmt)
313 unsigned num, x;
314 v_may_def_optype v_may_def_ops;
315 stmt_ann_t ann;
317 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
318 if (num == 0)
319 return;
321 #ifdef ENABLE_CHECKING
322 /* V_MAY_DEFs must be entered in pairs of result/uses. */
323 if (num % 2 != 0)
324 abort();
325 #endif
327 v_may_def_ops = allocate_v_may_def_optype (num / 2);
328 for (x = 0; x < num; x++)
329 v_may_def_ops->v_may_defs[x] = VARRAY_TREE (build_v_may_defs, x);
330 VARRAY_CLEAR (build_v_may_defs);
332 ann = stmt_ann (stmt);
333 ann->v_may_def_ops = v_may_def_ops;
336 static inline void
337 finalize_ssa_vuses (tree stmt)
339 unsigned num, x;
340 stmt_ann_t ann;
341 vuse_optype vuse_ops;
342 v_may_def_optype v_may_defs;
344 #ifdef ENABLE_CHECKING
345 if (VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0)
347 fprintf (stderr, "Please finalize V_MAY_DEFs before finalize VUSES.\n");
348 abort ();
350 #endif
352 num = VARRAY_ACTIVE_SIZE (build_vuses);
353 if (num == 0)
354 return;
356 /* Remove superfluous VUSE operands. If the statement already has a
357 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
358 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
359 suppose that variable 'a' is aliased:
361 # VUSE <a_2>
362 # a_3 = V_MAY_DEF <a_2>
363 a = a + 1;
365 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
366 operation. */
368 ann = stmt_ann (stmt);
369 v_may_defs = V_MAY_DEF_OPS (ann);
370 if (NUM_V_MAY_DEFS (v_may_defs) > 0)
372 size_t i, j;
373 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
375 bool found = false;
376 for (j = 0; j < NUM_V_MAY_DEFS (v_may_defs); j++)
378 tree vuse_var, v_may_def_var;
379 tree vuse = VARRAY_TREE (build_vuses, i);
380 tree v_may_def = V_MAY_DEF_OP (v_may_defs, j);
382 if (TREE_CODE (vuse) == SSA_NAME)
383 vuse_var = SSA_NAME_VAR (vuse);
384 else
385 vuse_var = vuse;
387 if (TREE_CODE (v_may_def) == SSA_NAME)
388 v_may_def_var = SSA_NAME_VAR (v_may_def);
389 else
390 v_may_def_var = v_may_def;
392 if (vuse_var == v_may_def_var)
394 found = true;
395 break;
399 /* If we found a useless VUSE operand, remove it from the
400 operand array by replacing it with the last active element
401 in the operand array (unless the useless VUSE was the
402 last operand, in which case we simply remove it. */
403 if (found)
405 if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
407 VARRAY_TREE (build_vuses, i)
408 = VARRAY_TREE (build_vuses,
409 VARRAY_ACTIVE_SIZE (build_vuses) - 1);
411 VARRAY_POP (build_vuses);
413 /* We want to rescan the element at this index, unless
414 this was the last element, in which case the loop
415 terminates. */
416 i--;
421 num = VARRAY_ACTIVE_SIZE (build_vuses);
422 /* We could have reduced the size to zero now, however. */
423 if (num == 0)
424 return;
426 vuse_ops = allocate_vuse_optype (num);
427 for (x = 0; x < num; x++)
428 vuse_ops->vuses[x] = VARRAY_TREE (build_vuses, x);
429 VARRAY_CLEAR (build_vuses);
430 ann->vuse_ops = vuse_ops;
433 static void
434 finalize_ssa_v_must_defs (tree stmt)
436 unsigned num, x;
437 stmt_ann_t ann;
438 v_must_def_optype v_must_def_ops;
440 num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
441 if (num == 0)
442 return;
444 #ifdef ENABLE_CHECKING
445 /* There should only be a single V_MUST_DEF per assignment. */
446 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
447 abort ();
448 #endif
450 v_must_def_ops = allocate_v_must_def_optype (num);
451 for (x = 0; x < num ; x++)
452 v_must_def_ops->v_must_defs[x] = VARRAY_TREE (build_v_must_defs, x);
453 VARRAY_POP_ALL (build_v_must_defs);
455 ann = stmt_ann (stmt);
456 ann->v_must_def_ops = v_must_def_ops;
459 extern void
460 finalize_ssa_stmt_operands (tree stmt)
462 #ifdef ENABLE_CHECKING
463 if (check_build_stmt == NULL)
464 abort();
465 #endif
467 finalize_ssa_defs (stmt);
468 finalize_ssa_uses (stmt);
469 finalize_ssa_v_must_defs (stmt);
470 finalize_ssa_v_may_defs (stmt);
471 finalize_ssa_vuses (stmt);
473 #ifdef ENABLE_CHECKING
474 check_build_stmt = NULL;
475 #endif
479 extern void
480 verify_start_operands (tree stmt ATTRIBUTE_UNUSED)
482 #ifdef ENABLE_CHECKING
483 if (VARRAY_ACTIVE_SIZE (build_defs) > 0
484 || VARRAY_ACTIVE_SIZE (build_uses) > 0
485 || VARRAY_ACTIVE_SIZE (build_vuses) > 0
486 || VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0
487 || VARRAY_ACTIVE_SIZE (build_v_must_defs) > 0)
488 abort ();
489 if (check_build_stmt != NULL)
490 abort();
491 check_build_stmt = stmt;
492 #endif
496 /* Add DEF_P to the list of pointers to operands defined by STMT. */
498 static inline void
499 append_def (tree *def_p, tree stmt ATTRIBUTE_UNUSED)
501 #ifdef ENABLE_CHECKING
502 if (check_build_stmt != stmt)
503 abort();
504 #endif
505 VARRAY_PUSH_TREE_PTR (build_defs, def_p);
509 /* Add USE_P to the list of pointers to operands used by STMT. */
511 static inline void
512 append_use (tree *use_p, tree stmt ATTRIBUTE_UNUSED)
514 #ifdef ENABLE_CHECKING
515 if (check_build_stmt != stmt)
516 abort();
517 #endif
518 VARRAY_PUSH_TREE_PTR (build_uses, use_p);
522 /* Add a new virtual def for variable VAR to statement STMT. If PREV_VOPS
523 is not NULL, the existing entries are preserved and no new entries are
524 added here. This is done to preserve the SSA numbering of virtual
525 operands. */
527 static void
528 append_v_may_def (tree var, tree stmt, voperands_t prev_vops)
530 stmt_ann_t ann;
531 size_t i;
532 tree result, source;
534 #ifdef ENABLE_CHECKING
535 if (check_build_stmt != stmt)
536 abort();
537 #endif
539 ann = stmt_ann (stmt);
541 /* Don't allow duplicate entries. */
543 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i += 2)
545 tree result = VARRAY_TREE (build_v_may_defs, i);
546 if (var == result
547 || (TREE_CODE (result) == SSA_NAME
548 && var == SSA_NAME_VAR (result)))
549 return;
552 /* If the statement already had virtual definitions, see if any of the
553 existing V_MAY_DEFs matches VAR. If so, re-use it, otherwise add a new
554 V_MAY_DEF for VAR. */
555 result = NULL_TREE;
556 source = NULL_TREE;
557 if (prev_vops)
558 for (i = 0; i < NUM_V_MAY_DEFS (prev_vops->v_may_def_ops); i++)
560 result = V_MAY_DEF_RESULT (prev_vops->v_may_def_ops, i);
561 if (result == var
562 || (TREE_CODE (result) == SSA_NAME
563 && SSA_NAME_VAR (result) == var))
565 source = V_MAY_DEF_OP (prev_vops->v_may_def_ops, i);
566 break;
570 /* If no previous V_MAY_DEF operand was found for VAR, create one now. */
571 if (source == NULL_TREE)
573 result = var;
574 source = var;
577 VARRAY_PUSH_TREE (build_v_may_defs, result);
578 VARRAY_PUSH_TREE (build_v_may_defs, source);
582 /* Add VAR to the list of virtual uses for STMT. If PREV_VOPS
583 is not NULL, the existing entries are preserved and no new entries are
584 added here. This is done to preserve the SSA numbering of virtual
585 operands. */
587 static void
588 append_vuse (tree var, tree stmt, voperands_t prev_vops)
590 stmt_ann_t ann;
591 size_t i;
592 bool found;
593 tree vuse;
595 #ifdef ENABLE_CHECKING
596 if (check_build_stmt != stmt)
597 abort();
598 #endif
600 ann = stmt_ann (stmt);
602 /* Don't allow duplicate entries. */
603 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
605 tree vuse_var = VARRAY_TREE (build_vuses, i);
606 if (var == vuse_var
607 || (TREE_CODE (vuse_var) == SSA_NAME
608 && var == SSA_NAME_VAR (vuse_var)))
609 return;
612 /* If the statement already had virtual uses, see if any of the
613 existing VUSEs matches VAR. If so, re-use it, otherwise add a new
614 VUSE for VAR. */
615 found = false;
616 vuse = NULL_TREE;
617 if (prev_vops)
618 for (i = 0; i < NUM_VUSES (prev_vops->vuse_ops); i++)
620 vuse = VUSE_OP (prev_vops->vuse_ops, i);
621 if (vuse == var
622 || (TREE_CODE (vuse) == SSA_NAME
623 && SSA_NAME_VAR (vuse) == var))
625 found = true;
626 break;
630 /* If VAR existed already in PREV_VOPS, re-use it. */
631 if (found)
632 var = vuse;
634 VARRAY_PUSH_TREE (build_vuses, var);
637 /* Add VAR to the list of virtual must definitions for STMT. If PREV_VOPS
638 is not NULL, the existing entries are preserved and no new entries are
639 added here. This is done to preserve the SSA numbering of virtual
640 operands. */
642 static void
643 append_v_must_def (tree var, tree stmt, voperands_t prev_vops)
645 stmt_ann_t ann;
646 size_t i;
647 bool found;
648 tree v_must_def;
650 #ifdef ENABLE_CHECKING
651 if (check_build_stmt != stmt)
652 abort();
653 #endif
655 ann = stmt_ann (stmt);
657 /* Don't allow duplicate entries. */
658 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
660 tree v_must_def_var = VARRAY_TREE (build_v_must_defs, i);
661 if (var == v_must_def_var
662 || (TREE_CODE (v_must_def_var) == SSA_NAME
663 && var == SSA_NAME_VAR (v_must_def_var)))
664 return;
667 /* If the statement already had virtual must defs, see if any of the
668 existing V_MUST_DEFs matches VAR. If so, re-use it, otherwise add a new
669 V_MUST_DEF for VAR. */
670 found = false;
671 v_must_def = NULL_TREE;
672 if (prev_vops)
673 for (i = 0; i < NUM_V_MUST_DEFS (prev_vops->v_must_def_ops); i++)
675 v_must_def = V_MUST_DEF_OP (prev_vops->v_must_def_ops, i);
676 if (v_must_def == var
677 || (TREE_CODE (v_must_def) == SSA_NAME
678 && SSA_NAME_VAR (v_must_def) == var))
680 found = true;
681 break;
685 /* If VAR existed already in PREV_VOPS, re-use it. */
686 if (found)
687 var = v_must_def;
689 VARRAY_PUSH_TREE (build_v_must_defs, var);
693 /* External entry point which by-passes the previous vops mechanism. */
694 void
695 add_vuse (tree var, tree stmt)
697 append_vuse (var, stmt, NULL);
701 /* Get the operands of statement STMT. Note that repeated calls to
702 get_stmt_operands for the same statement will do nothing until the
703 statement is marked modified by a call to modify_stmt(). */
705 void
706 get_stmt_operands (tree stmt)
708 enum tree_code code;
709 stmt_ann_t ann;
710 struct voperands_d prev_vops;
712 #if defined ENABLE_CHECKING
713 /* The optimizers cannot handle statements that are nothing but a
714 _DECL. This indicates a bug in the gimplifier. */
715 if (SSA_VAR_P (stmt))
716 abort ();
717 #endif
719 /* Ignore error statements. */
720 if (TREE_CODE (stmt) == ERROR_MARK)
721 return;
723 ann = get_stmt_ann (stmt);
725 /* If the statement has not been modified, the operands are still valid. */
726 if (!ann->modified)
727 return;
729 timevar_push (TV_TREE_OPS);
731 /* Initially assume that the statement has no volatile operands, nor
732 makes aliased loads or stores. */
733 ann->has_volatile_ops = false;
734 ann->makes_aliased_stores = false;
735 ann->makes_aliased_loads = false;
737 /* Remove any existing operands as they will be scanned again. */
738 free_defs (&(ann->def_ops), true);
739 free_uses (&(ann->use_ops), true);
741 /* Before removing existing virtual operands, save them in PREV_VOPS so
742 that we can re-use their SSA versions. */
743 prev_vops.v_may_def_ops = V_MAY_DEF_OPS (ann);
744 prev_vops.vuse_ops = VUSE_OPS (ann);
745 prev_vops.v_must_def_ops = V_MUST_DEF_OPS (ann);
747 /* Don't free the previous values to memory since we're still using them. */
748 free_v_may_defs (&(ann->v_may_def_ops), false);
749 free_vuses (&(ann->vuse_ops), false);
750 free_v_must_defs (&(ann->v_must_def_ops), false);
752 start_ssa_stmt_operands (stmt);
754 code = TREE_CODE (stmt);
755 switch (code)
757 case MODIFY_EXPR:
758 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none, &prev_vops);
759 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
760 || TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
761 || TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
762 || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
763 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
764 modified in that case. FIXME we should represent somehow
765 that it is killed on the fallthrough path. */
766 || tree_could_throw_p (TREE_OPERAND (stmt, 1)))
767 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def,
768 &prev_vops);
769 else
770 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
771 opf_is_def | opf_kill_def, &prev_vops);
772 break;
774 case COND_EXPR:
775 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none, &prev_vops);
776 break;
778 case SWITCH_EXPR:
779 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none, &prev_vops);
780 break;
782 case ASM_EXPR:
783 get_asm_expr_operands (stmt, &prev_vops);
784 break;
786 case RETURN_EXPR:
787 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none, &prev_vops);
788 break;
790 case GOTO_EXPR:
791 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none, &prev_vops);
792 break;
794 case LABEL_EXPR:
795 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none, &prev_vops);
796 break;
798 /* These nodes contain no variable references. */
799 case BIND_EXPR:
800 case CASE_LABEL_EXPR:
801 case TRY_CATCH_EXPR:
802 case TRY_FINALLY_EXPR:
803 case EH_FILTER_EXPR:
804 case CATCH_EXPR:
805 case RESX_EXPR:
806 break;
808 default:
809 /* Notice that if get_expr_operands tries to use &STMT as the operand
810 pointer (which may only happen for USE operands), we will abort in
811 append_use. This default will handle statements like empty
812 statements, or CALL_EXPRs that may appear on the RHS of a statement
813 or as statements themselves. */
814 get_expr_operands (stmt, &stmt, opf_none, &prev_vops);
815 break;
818 finalize_ssa_stmt_operands (stmt);
820 /* Now free the previous virtual ops to memory. */
821 free_v_may_defs (&(prev_vops.v_may_def_ops), true);
822 free_vuses (&(prev_vops.vuse_ops), true);
823 free_v_must_defs (&(prev_vops.v_must_def_ops), true);
825 /* Clear the modified bit for STMT. Subsequent calls to
826 get_stmt_operands for this statement will do nothing until the
827 statement is marked modified by a call to modify_stmt(). */
828 ann->modified = 0;
830 timevar_pop (TV_TREE_OPS);
834 /* Recursively scan the expression pointed by EXPR_P in statement STMT.
835 FLAGS is one of the OPF_* constants modifying how to interpret the
836 operands found. PREV_VOPS is as in append_v_may_def and append_vuse. */
838 static void
839 get_expr_operands (tree stmt, tree *expr_p, int flags, voperands_t prev_vops)
841 enum tree_code code;
842 char class;
843 tree expr = *expr_p;
845 if (expr == NULL || expr == error_mark_node)
846 return;
848 code = TREE_CODE (expr);
849 class = TREE_CODE_CLASS (code);
851 switch (code)
853 case ADDR_EXPR:
854 /* We could have the address of a component, array member,
855 etc which has interesting variable references. */
856 /* Taking the address of a variable does not represent a
857 reference to it, but the fact that STMT takes its address will be
858 of interest to some passes (e.g. alias resolution). */
859 add_stmt_operand (expr_p, stmt, 0, NULL);
861 /* If the address is constant (invariant is not sufficient), there will
862 be no interesting variable references inside. */
863 if (TREE_CONSTANT (expr))
864 return;
866 /* There should be no VUSEs created, since the referenced objects are
867 not really accessed. The only operands that we should find here
868 are ARRAY_REF indices which will always be real operands (GIMPLE
869 does not allow non-registers as array indices). */
870 flags |= opf_no_vops;
872 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
873 return;
875 case SSA_NAME:
876 case VAR_DECL:
877 case PARM_DECL:
878 case RESULT_DECL:
879 /* If we found a variable, add it to DEFS or USES depending
880 on the operand flags. */
881 add_stmt_operand (expr_p, stmt, flags, prev_vops);
882 return;
884 case INDIRECT_REF:
885 get_indirect_ref_operands (stmt, expr, flags, prev_vops);
886 return;
888 case ARRAY_REF:
889 case ARRAY_RANGE_REF:
890 /* Treat array references as references to the virtual variable
891 representing the array. The virtual variable for an ARRAY_REF
892 is the VAR_DECL for the array. */
894 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
895 according to the value of IS_DEF. Recurse if the LHS of the
896 ARRAY_REF node is not a regular variable. */
897 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
898 add_stmt_operand (expr_p, stmt, flags, prev_vops);
899 else
900 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
902 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none, prev_vops);
903 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
904 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none, prev_vops);
905 return;
907 case COMPONENT_REF:
908 case REALPART_EXPR:
909 case IMAGPART_EXPR:
910 /* Similarly to arrays, references to compound variables (complex
911 types and structures/unions) are globbed.
913 FIXME: This means that
915 a.x = 6;
916 a.y = 7;
917 foo (a.x, a.y);
919 will not be constant propagated because the two partial
920 definitions to 'a' will kill each other. Note that SRA may be
921 able to fix this problem if 'a' can be scalarized. */
923 /* If the LHS of the compound reference is not a regular variable,
924 recurse to keep looking for more operands in the subexpression. */
925 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
926 add_stmt_operand (expr_p, stmt, flags, prev_vops);
927 else
928 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
930 if (code == COMPONENT_REF)
931 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
932 return;
934 case WITH_SIZE_EXPR:
935 /* WITH_SIZE_EXPR is a pass-through reference to it's first argument,
936 and an rvalue reference to its second argument. */
937 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none, prev_vops);
938 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
939 return;
941 case CALL_EXPR:
942 get_call_expr_operands (stmt, expr, prev_vops);
943 return;
945 case MODIFY_EXPR:
947 int subflags;
948 tree op;
950 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none, prev_vops);
952 op = TREE_OPERAND (expr, 0);
953 if (TREE_CODE (op) == WITH_SIZE_EXPR)
954 op = TREE_OPERAND (expr, 0);
955 if (TREE_CODE (op) == ARRAY_REF
956 || TREE_CODE (op) == COMPONENT_REF
957 || TREE_CODE (op) == REALPART_EXPR
958 || TREE_CODE (op) == IMAGPART_EXPR)
959 subflags = opf_is_def;
960 else
961 subflags = opf_is_def | opf_kill_def;
963 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags, prev_vops);
964 return;
967 case CONSTRUCTOR:
969 /* General aggregate CONSTRUCTORs have been decomposed, but they
970 are still in use as the COMPLEX_EXPR equivalent for vectors. */
972 tree t;
973 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
974 get_expr_operands (stmt, &TREE_VALUE (t), opf_none, prev_vops);
976 return;
979 case TRUTH_NOT_EXPR:
980 case BIT_FIELD_REF:
981 do_unary:
982 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
983 return;
985 case TRUTH_AND_EXPR:
986 case TRUTH_OR_EXPR:
987 case TRUTH_XOR_EXPR:
988 case COMPOUND_EXPR:
989 case OBJ_TYPE_REF:
990 do_binary:
992 tree op0 = TREE_OPERAND (expr, 0);
993 tree op1 = TREE_OPERAND (expr, 1);
995 /* If it would be profitable to swap the operands, then do so to
996 canonicalize the statement, enabling better optimization.
998 By placing canonicalization of such expressions here we
999 transparently keep statements in canonical form, even
1000 when the statement is modified. */
1001 if (tree_swap_operands_p (op0, op1, false))
1003 /* For relationals we need to swap the operands
1004 and change the code. */
1005 if (code == LT_EXPR
1006 || code == GT_EXPR
1007 || code == LE_EXPR
1008 || code == GE_EXPR)
1010 TREE_SET_CODE (expr, swap_tree_comparison (code));
1011 TREE_OPERAND (expr, 0) = op1;
1012 TREE_OPERAND (expr, 1) = op0;
1015 /* For a commutative operator we can just swap the operands. */
1016 else if (commutative_tree_code (code))
1018 TREE_OPERAND (expr, 0) = op1;
1019 TREE_OPERAND (expr, 1) = op0;
1023 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
1024 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags, prev_vops);
1025 return;
1028 case BLOCK:
1029 case FUNCTION_DECL:
1030 case EXC_PTR_EXPR:
1031 case FILTER_EXPR:
1032 case LABEL_DECL:
1033 /* Expressions that make no memory references. */
1034 return;
1036 default:
1037 if (class == '1')
1038 goto do_unary;
1039 if (class == '2' || class == '<')
1040 goto do_binary;
1041 if (class == 'c' || class == 't')
1042 return;
1045 /* If we get here, something has gone wrong. */
1046 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1047 debug_tree (expr);
1048 fputs ("\n", stderr);
1049 abort ();
1053 /* Scan operands in ASM_EXPR STMT. PREV_VOPS is as in append_v_may_def and
1054 append_vuse. */
1056 static void
1057 get_asm_expr_operands (tree stmt, voperands_t prev_vops)
1059 int noutputs = list_length (ASM_OUTPUTS (stmt));
1060 const char **oconstraints
1061 = (const char **) alloca ((noutputs) * sizeof (const char *));
1062 int i;
1063 tree link;
1064 const char *constraint;
1065 bool allows_mem, allows_reg, is_inout;
1066 stmt_ann_t s_ann = stmt_ann (stmt);
1068 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1070 oconstraints[i] = constraint
1071 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1072 parse_output_constraint (&constraint, i, 0, 0,
1073 &allows_mem, &allows_reg, &is_inout);
1075 #if defined ENABLE_CHECKING
1076 /* This should have been split in gimplify_asm_expr. */
1077 if (allows_reg && is_inout)
1078 abort ();
1079 #endif
1081 /* Memory operands are addressable. Note that STMT needs the
1082 address of this operand. */
1083 if (!allows_reg && allows_mem)
1085 tree t = get_base_address (TREE_VALUE (link));
1086 if (t && DECL_P (t))
1087 note_addressable (t, s_ann);
1090 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def, prev_vops);
1093 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1095 constraint
1096 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1097 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1098 oconstraints, &allows_mem, &allows_reg);
1100 /* Memory operands are addressable. Note that STMT needs the
1101 address of this operand. */
1102 if (!allows_reg && allows_mem)
1104 tree t = get_base_address (TREE_VALUE (link));
1105 if (t && DECL_P (t))
1106 note_addressable (t, s_ann);
1109 get_expr_operands (stmt, &TREE_VALUE (link), 0, prev_vops);
1113 /* Clobber memory for asm ("" : : : "memory"); */
1114 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1115 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1117 size_t i;
1119 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1120 decided to group them). */
1121 if (global_var)
1122 add_stmt_operand (&global_var, stmt, opf_is_def, prev_vops);
1123 else
1124 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1126 tree var = referenced_var (i);
1127 add_stmt_operand (&var, stmt, opf_is_def, prev_vops);
1130 /* Now clobber all addressables. */
1131 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i,
1133 tree var = referenced_var (i);
1134 add_stmt_operand (&var, stmt, opf_is_def, prev_vops);
1137 /* If we don't have call-clobbered nor addressable vars and we
1138 still have not computed aliasing information, just mark the
1139 statement as having volatile operands. If the alias pass
1140 finds some, we will add them at that point. */
1141 if (!aliases_computed_p)
1142 stmt_ann (stmt)->has_volatile_ops = true;
1144 break;
1148 /* A subroutine of get_expr_operands to handle INDIRECT_REF. */
1150 static void
1151 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1152 voperands_t prev_vops)
1154 tree *pptr = &TREE_OPERAND (expr, 0);
1155 tree ptr = *pptr;
1157 if (SSA_VAR_P (ptr))
1159 if (!aliases_computed_p)
1161 /* If the pointer does not have a memory tag and aliases have not
1162 been computed yet, mark the statement as having volatile
1163 operands to prevent DOM from entering it in equivalence tables
1164 and DCE from killing it. */
1165 stmt_ann (stmt)->has_volatile_ops = true;
1167 else
1169 struct ptr_info_def *pi = NULL;
1171 /* If we have computed aliasing already, check if PTR has
1172 flow-sensitive points-to information. */
1173 if (TREE_CODE (ptr) == SSA_NAME
1174 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1175 && pi->name_mem_tag)
1177 /* PTR has its own memory tag. Use it. */
1178 add_stmt_operand (&pi->name_mem_tag, stmt, flags, prev_vops);
1180 else
1182 /* If PTR is not an SSA_NAME or it doesn't have a name
1183 tag, use its type memory tag. */
1184 var_ann_t ann;
1186 /* If we are emitting debugging dumps, display a warning if
1187 PTR is an SSA_NAME with no flow-sensitive alias
1188 information. That means that we may need to compute
1189 aliasing again. */
1190 if (dump_file
1191 && TREE_CODE (ptr) == SSA_NAME
1192 && pi == NULL)
1194 fprintf (dump_file,
1195 "NOTE: no flow-sensitive alias info for ");
1196 print_generic_expr (dump_file, ptr, dump_flags);
1197 fprintf (dump_file, " in ");
1198 print_generic_stmt (dump_file, stmt, dump_flags);
1201 if (TREE_CODE (ptr) == SSA_NAME)
1202 ptr = SSA_NAME_VAR (ptr);
1203 ann = var_ann (ptr);
1204 add_stmt_operand (&ann->type_mem_tag, stmt, flags, prev_vops);
1209 /* If a constant is used as a pointer, we can't generate a real
1210 operand for it but we mark the statement volatile to prevent
1211 optimizations from messing things up. */
1212 else if (TREE_CODE (ptr) == INTEGER_CST)
1214 stmt_ann (stmt)->has_volatile_ops = true;
1215 return;
1218 /* Everything else *should* have been folded elsewhere, but users
1219 are smarter than we in finding ways to write invalid code. We
1220 cannot just abort here. If we were absolutely certain that we
1221 do handle all valid cases, then we could just do nothing here.
1222 That seems optimistic, so attempt to do something logical... */
1223 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1224 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1225 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1227 /* Make sure we know the object is addressable. */
1228 pptr = &TREE_OPERAND (ptr, 0);
1229 add_stmt_operand (pptr, stmt, 0, NULL);
1231 /* Mark the object itself with a VUSE. */
1232 pptr = &TREE_OPERAND (*pptr, 0);
1233 get_expr_operands (stmt, pptr, flags, prev_vops);
1234 return;
1237 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1238 else
1239 abort ();
1241 /* Add a USE operand for the base pointer. */
1242 get_expr_operands (stmt, pptr, opf_none, prev_vops);
1245 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1247 static void
1248 get_call_expr_operands (tree stmt, tree expr, voperands_t prev_vops)
1250 tree op;
1251 int call_flags = call_expr_flags (expr);
1253 /* Find uses in the called function. */
1254 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none, prev_vops);
1256 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1257 get_expr_operands (stmt, &TREE_VALUE (op), opf_none, prev_vops);
1259 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
1261 if (bitmap_first_set_bit (call_clobbered_vars) >= 0)
1263 /* A 'pure' or a 'const' functions never call clobber anything.
1264 A 'noreturn' function might, but since we don't return anyway
1265 there is no point in recording that. */
1266 if (!(call_flags
1267 & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1268 add_call_clobber_ops (stmt, prev_vops);
1269 else if (!(call_flags & (ECF_CONST | ECF_NORETURN)))
1270 add_call_read_ops (stmt, prev_vops);
1272 else if (!aliases_computed_p)
1273 stmt_ann (stmt)->has_volatile_ops = true;
1277 /* Add *VAR_P to the appropriate operand array of STMT. FLAGS is as in
1278 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1279 the statement's real operands, otherwise it is added to virtual
1280 operands.
1282 PREV_VOPS is used when adding virtual operands to statements that
1283 already had them (See append_v_may_def and append_vuse). */
1285 static void
1286 add_stmt_operand (tree *var_p, tree stmt, int flags, voperands_t prev_vops)
1288 bool is_real_op;
1289 tree var, sym;
1290 stmt_ann_t s_ann;
1291 var_ann_t v_ann;
1293 var = *var_p;
1294 STRIP_NOPS (var);
1296 s_ann = stmt_ann (stmt);
1298 /* If the operand is an ADDR_EXPR, add its operand to the list of
1299 variables that have had their address taken in this statement. */
1300 if (TREE_CODE (var) == ADDR_EXPR)
1302 note_addressable (TREE_OPERAND (var, 0), s_ann);
1303 return;
1306 /* If the original variable is not a scalar, it will be added to the list
1307 of virtual operands. In that case, use its base symbol as the virtual
1308 variable representing it. */
1309 is_real_op = is_gimple_reg (var);
1310 if (!is_real_op && !DECL_P (var))
1311 var = get_virtual_var (var);
1313 /* If VAR is not a variable that we care to optimize, do nothing. */
1314 if (var == NULL_TREE || !SSA_VAR_P (var))
1315 return;
1317 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1318 v_ann = var_ann (sym);
1320 /* Don't expose volatile variables to the optimizers. */
1321 if (TREE_THIS_VOLATILE (sym))
1323 s_ann->has_volatile_ops = true;
1324 return;
1327 if (is_real_op)
1329 /* The variable is a GIMPLE register. Add it to real operands. */
1330 if (flags & opf_is_def)
1331 append_def (var_p, stmt);
1332 else
1333 append_use (var_p, stmt);
1335 else
1337 varray_type aliases;
1339 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1340 virtual operands, unless the caller has specifically requested
1341 not to add virtual operands (used when adding operands inside an
1342 ADDR_EXPR expression). */
1343 if (flags & opf_no_vops)
1344 return;
1346 aliases = v_ann->may_aliases;
1348 /* If alias information hasn't been computed yet, then
1349 addressable variables will not be an alias tag nor will they
1350 have aliases. In this case, mark the statement as having
1351 volatile operands. */
1352 if (!aliases_computed_p && may_be_aliased (var))
1353 s_ann->has_volatile_ops = true;
1355 if (aliases == NULL)
1357 /* The variable is not aliased or it is an alias tag. */
1358 if (flags & opf_is_def)
1360 if (v_ann->is_alias_tag)
1362 /* Alias tagged vars get regular V_MAY_DEF */
1363 s_ann->makes_aliased_stores = 1;
1364 append_v_may_def (var, stmt, prev_vops);
1366 else if ((flags & opf_kill_def)
1367 && v_ann->mem_tag_kind == NOT_A_TAG)
1368 /* V_MUST_DEF for non-aliased non-GIMPLE register
1369 variable definitions. Avoid memory tags. */
1370 append_v_must_def (var, stmt, prev_vops);
1371 else
1372 /* Call-clobbered variables & memory tags get
1373 V_MAY_DEF */
1374 append_v_may_def (var, stmt, prev_vops);
1376 else
1378 append_vuse (var, stmt, prev_vops);
1379 if (v_ann->is_alias_tag)
1380 s_ann->makes_aliased_loads = 1;
1383 else
1385 size_t i;
1387 /* The variable is aliased. Add its aliases to the virtual
1388 operands. */
1389 if (VARRAY_ACTIVE_SIZE (aliases) == 0)
1390 abort ();
1392 if (flags & opf_is_def)
1394 /* If the variable is also an alias tag, add a virtual
1395 operand for it, otherwise we will miss representing
1396 references to the members of the variable's alias set.
1397 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1398 if (v_ann->is_alias_tag)
1399 append_v_may_def (var, stmt, prev_vops);
1401 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1402 append_v_may_def (VARRAY_TREE (aliases, i), stmt, prev_vops);
1404 s_ann->makes_aliased_stores = 1;
1406 else
1408 if (v_ann->is_alias_tag)
1409 append_vuse (var, stmt, prev_vops);
1411 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1412 append_vuse (VARRAY_TREE (aliases, i), stmt, prev_vops);
1414 s_ann->makes_aliased_loads = 1;
1420 /* Record that VAR had its address taken in the statement with annotations
1421 S_ANN. */
1423 static void
1424 note_addressable (tree var, stmt_ann_t s_ann)
1426 var = get_base_address (var);
1427 if (var && SSA_VAR_P (var))
1429 if (s_ann->addresses_taken == NULL)
1430 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1431 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1436 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1437 clobbered variables in the function. */
1439 static void
1440 add_call_clobber_ops (tree stmt, voperands_t prev_vops)
1442 /* Functions that are not const, pure or never return may clobber
1443 call-clobbered variables. */
1444 stmt_ann (stmt)->makes_clobbering_call = true;
1446 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1447 a V_MAY_DEF operand for every call clobbered variable. See
1448 compute_may_aliases for the heuristic used to decide whether
1449 to create .GLOBAL_VAR or not. */
1450 if (global_var)
1451 add_stmt_operand (&global_var, stmt, opf_is_def, prev_vops);
1452 else
1454 size_t i;
1456 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1458 tree var = referenced_var (i);
1460 /* If VAR is read-only, don't add a V_MAY_DEF, just a
1461 VUSE operand. */
1462 if (!TREE_READONLY (var))
1463 add_stmt_operand (&var, stmt, opf_is_def, prev_vops);
1464 else
1465 add_stmt_operand (&var, stmt, opf_none, prev_vops);
1471 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1472 function. */
1474 static void
1475 add_call_read_ops (tree stmt, voperands_t prev_vops)
1477 /* Otherwise, if the function is not pure, it may reference memory. Add
1478 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1479 for each call-clobbered variable. See add_referenced_var for the
1480 heuristic used to decide whether to create .GLOBAL_VAR. */
1481 if (global_var)
1482 add_stmt_operand (&global_var, stmt, opf_none, prev_vops);
1483 else
1485 size_t i;
1487 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1489 tree var = referenced_var (i);
1490 add_stmt_operand (&var, stmt, opf_none, prev_vops);
1495 /* Copies virtual operands from SRC to DST. */
1497 void
1498 copy_virtual_operands (tree dst, tree src)
1500 vuse_optype vuses = STMT_VUSE_OPS (src);
1501 v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
1502 v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
1503 vuse_optype *vuses_new = &stmt_ann (dst)->vuse_ops;
1504 v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->v_may_def_ops;
1505 v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->v_must_def_ops;
1506 unsigned i;
1508 if (vuses)
1510 *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
1511 for (i = 0; i < NUM_VUSES (vuses); i++)
1512 SET_VUSE_OP (*vuses_new, i, VUSE_OP (vuses, i));
1515 if (v_may_defs)
1517 *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
1518 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
1520 SET_V_MAY_DEF_OP (*v_may_defs_new, i, V_MAY_DEF_OP (v_may_defs, i));
1521 SET_V_MAY_DEF_RESULT (*v_may_defs_new, i,
1522 V_MAY_DEF_RESULT (v_may_defs, i));
1526 if (v_must_defs)
1528 *v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
1529 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
1530 SET_V_MUST_DEF_OP (*v_must_defs_new, i, V_MUST_DEF_OP (v_must_defs, i));
1534 #include "gt-tree-ssa-operands.h"