Add files that I missed when importing NaCl changes earlier
[gcc/nacl-gcc.git] / gcc / tree-ssa-operands.c
blob17f9bb7ddc3a045e6cd086d2dcf2fae82ada6767
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "function.h"
27 #include "diagnostic.h"
28 #include "tree-flow.h"
29 #include "tree-inline.h"
30 #include "tree-pass.h"
31 #include "ggc.h"
32 #include "timevar.h"
33 #include "toplev.h"
34 #include "langhooks.h"
35 #include "ipa-reference.h"
37 /* This file contains the code required to manage the operands cache of the
38 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
39 annotation. This cache contains operands that will be of interest to
40 optimizers and other passes wishing to manipulate the IL.
42 The operand type are broken up into REAL and VIRTUAL operands. The real
43 operands are represented as pointers into the stmt's operand tree. Thus
44 any manipulation of the real operands will be reflected in the actual tree.
45 Virtual operands are represented solely in the cache, although the base
46 variable for the SSA_NAME may, or may not occur in the stmt's tree.
47 Manipulation of the virtual operands will not be reflected in the stmt tree.
49 The routines in this file are concerned with creating this operand cache
50 from a stmt tree.
52 The operand tree is the parsed by the various get_* routines which look
53 through the stmt tree for the occurrence of operands which may be of
54 interest, and calls are made to the append_* routines whenever one is
55 found. There are 5 of these routines, each representing one of the
56 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
57 Virtual Must Defs.
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 5 operand vectors which have been built up.
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
78 /* Flags to describe operand properties in helpers. */
80 /* By default, operands are loaded. */
81 #define opf_none 0
83 /* Operand is the target of an assignment expression or a
84 call-clobbered variable. */
85 #define opf_is_def (1 << 0)
87 /* Operand is the target of an assignment expression. */
88 #define opf_kill_def (1 << 1)
90 /* No virtual operands should be created in the expression. This is used
91 when traversing ADDR_EXPR nodes which have different semantics than
92 other expressions. Inside an ADDR_EXPR node, the only operands that we
93 need to consider are indices into arrays. For instance, &a.b[i] should
94 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
95 VUSE for 'b'. */
96 #define opf_no_vops (1 << 2)
98 /* Operand is a "non-specific" kill for call-clobbers and such. This
99 is used to distinguish "reset the world" events from explicit
100 MODIFY_EXPRs. */
101 #define opf_non_specific (1 << 3)
103 /* Array for building all the def operands. */
104 static VEC(tree,heap) *build_defs;
106 /* Array for building all the use operands. */
107 static VEC(tree,heap) *build_uses;
109 /* Array for building all the V_MAY_DEF operands. */
110 static VEC(tree,heap) *build_v_may_defs;
112 /* Array for building all the VUSE operands. */
113 static VEC(tree,heap) *build_vuses;
115 /* Array for building all the V_MUST_DEF operands. */
116 static VEC(tree,heap) *build_v_must_defs;
118 /* These arrays are the cached operand vectors for call clobbered calls. */
119 static bool ops_active = false;
121 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
122 static unsigned operand_memory_index;
124 static void get_expr_operands (tree, tree *, int);
126 static def_optype_p free_defs = NULL;
127 static use_optype_p free_uses = NULL;
128 static vuse_optype_p free_vuses = NULL;
129 static maydef_optype_p free_maydefs = NULL;
130 static mustdef_optype_p free_mustdefs = NULL;
132 /* Allocates operand OP of given TYPE from the appropriate free list,
133 or of the new value if the list is empty. */
135 #define ALLOC_OPTYPE(OP, TYPE) \
136 do \
138 TYPE##_optype_p ret = free_##TYPE##s; \
139 if (ret) \
140 free_##TYPE##s = ret->next; \
141 else \
142 ret = ssa_operand_alloc (sizeof (*ret)); \
143 (OP) = ret; \
144 } while (0)
146 /* Return the DECL_UID of the base variable of T. */
148 static inline unsigned
149 get_name_decl (tree t)
151 if (TREE_CODE (t) != SSA_NAME)
152 return DECL_UID (t);
153 else
154 return DECL_UID (SSA_NAME_VAR (t));
158 /* Comparison function for qsort used in operand_build_sort_virtual. */
160 static int
161 operand_build_cmp (const void *p, const void *q)
163 tree e1 = *((const tree *)p);
164 tree e2 = *((const tree *)q);
165 unsigned int u1,u2;
167 u1 = get_name_decl (e1);
168 u2 = get_name_decl (e2);
170 /* We want to sort in ascending order. They can never be equal. */
171 #ifdef ENABLE_CHECKING
172 gcc_assert (u1 != u2);
173 #endif
174 return (u1 > u2 ? 1 : -1);
178 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
180 static inline void
181 operand_build_sort_virtual (VEC(tree,heap) *list)
183 int num = VEC_length (tree, list);
185 if (num < 2)
186 return;
188 if (num == 2)
190 if (get_name_decl (VEC_index (tree, list, 0))
191 > get_name_decl (VEC_index (tree, list, 1)))
193 /* Swap elements if in the wrong order. */
194 tree tmp = VEC_index (tree, list, 0);
195 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
196 VEC_replace (tree, list, 1, tmp);
198 return;
201 /* There are 3 or more elements, call qsort. */
202 qsort (VEC_address (tree, list),
203 VEC_length (tree, list),
204 sizeof (tree),
205 operand_build_cmp);
209 /* Return true if the SSA operands cache is active. */
211 bool
212 ssa_operands_active (void)
214 return ops_active;
218 /* Structure storing statistics on how many call clobbers we have, and
219 how many where avoided. */
221 static struct
223 /* Number of call-clobbered ops we attempt to add to calls in
224 add_call_clobber_ops. */
225 unsigned int clobbered_vars;
227 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
228 not_written information. */
229 unsigned int static_write_clobbers_avoided;
231 /* Number of reads (VUSEs) avoided by using not_read information. */
232 unsigned int static_read_clobbers_avoided;
234 /* Number of write-clobbers avoided because the variable can't escape to
235 this call. */
236 unsigned int unescapable_clobbers_avoided;
238 /* Number of read-only uses we attempt to add to calls in
239 add_call_read_ops. */
240 unsigned int readonly_clobbers;
242 /* Number of read-only uses we avoid using not_read information. */
243 unsigned int static_readonly_clobbers_avoided;
244 } clobber_stats;
247 /* Initialize the operand cache routines. */
249 void
250 init_ssa_operands (void)
252 build_defs = VEC_alloc (tree, heap, 5);
253 build_uses = VEC_alloc (tree, heap, 10);
254 build_vuses = VEC_alloc (tree, heap, 25);
255 build_v_may_defs = VEC_alloc (tree, heap, 25);
256 build_v_must_defs = VEC_alloc (tree, heap, 25);
258 gcc_assert (operand_memory == NULL);
259 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
260 ops_active = true;
261 memset (&clobber_stats, 0, sizeof (clobber_stats));
265 /* Dispose of anything required by the operand routines. */
267 void
268 fini_ssa_operands (void)
270 struct ssa_operand_memory_d *ptr;
271 VEC_free (tree, heap, build_defs);
272 VEC_free (tree, heap, build_uses);
273 VEC_free (tree, heap, build_v_must_defs);
274 VEC_free (tree, heap, build_v_may_defs);
275 VEC_free (tree, heap, build_vuses);
276 free_defs = NULL;
277 free_uses = NULL;
278 free_vuses = NULL;
279 free_maydefs = NULL;
280 free_mustdefs = NULL;
281 while ((ptr = operand_memory) != NULL)
283 operand_memory = operand_memory->next;
284 ggc_free (ptr);
287 ops_active = false;
289 if (dump_file && (dump_flags & TDF_STATS))
291 fprintf (dump_file, "Original clobbered vars:%d\n",
292 clobber_stats.clobbered_vars);
293 fprintf (dump_file, "Static write clobbers avoided:%d\n",
294 clobber_stats.static_write_clobbers_avoided);
295 fprintf (dump_file, "Static read clobbers avoided:%d\n",
296 clobber_stats.static_read_clobbers_avoided);
297 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
298 clobber_stats.unescapable_clobbers_avoided);
299 fprintf (dump_file, "Original read-only clobbers:%d\n",
300 clobber_stats.readonly_clobbers);
301 fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
302 clobber_stats.static_readonly_clobbers_avoided);
307 /* Return memory for operands of SIZE chunks. */
309 static inline void *
310 ssa_operand_alloc (unsigned size)
312 char *ptr;
313 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
315 struct ssa_operand_memory_d *ptr;
316 ptr = GGC_NEW (struct ssa_operand_memory_d);
317 ptr->next = operand_memory;
318 operand_memory = ptr;
319 operand_memory_index = 0;
321 ptr = &(operand_memory->mem[operand_memory_index]);
322 operand_memory_index += size;
323 return ptr;
328 /* This routine makes sure that PTR is in an immediate use list, and makes
329 sure the stmt pointer is set to the current stmt. */
331 static inline void
332 set_virtual_use_link (use_operand_p ptr, tree stmt)
334 /* fold_stmt may have changed the stmt pointers. */
335 if (ptr->stmt != stmt)
336 ptr->stmt = stmt;
338 /* If this use isn't in a list, add it to the correct list. */
339 if (!ptr->prev)
340 link_imm_use (ptr, *(ptr->use));
343 /* Appends ELT after TO, and moves the TO pointer to ELT. */
345 #define APPEND_OP_AFTER(ELT, TO) \
346 do \
348 (TO)->next = (ELT); \
349 (TO) = (ELT); \
350 } while (0)
352 /* Appends head of list FROM after TO, and move both pointers
353 to their successors. */
355 #define MOVE_HEAD_AFTER(FROM, TO) \
356 do \
358 APPEND_OP_AFTER (FROM, TO); \
359 (FROM) = (FROM)->next; \
360 } while (0)
362 /* Moves OP to appropriate freelist. OP is set to its successor. */
364 #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
365 do \
367 TYPE##_optype_p next = (OP)->next; \
368 (OP)->next = free_##TYPE##s; \
369 free_##TYPE##s = (OP); \
370 (OP) = next; \
371 } while (0)
373 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
374 of immediate uses. STMT is the current statement. */
376 #define INITIALIZE_USE(USE_PTR, VAL, STMT) \
377 do \
379 (USE_PTR)->use = (VAL); \
380 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
381 } while (0)
383 /* Adds OP to the list of defs after LAST, and moves
384 LAST to the new element. */
386 static inline void
387 add_def_op (tree *op, def_optype_p *last)
389 def_optype_p new;
391 ALLOC_OPTYPE (new, def);
392 DEF_OP_PTR (new) = op;
393 APPEND_OP_AFTER (new, *last);
396 /* Adds OP to the list of uses of statement STMT after LAST, and moves
397 LAST to the new element. */
399 static inline void
400 add_use_op (tree stmt, tree *op, use_optype_p *last)
402 use_optype_p new;
404 ALLOC_OPTYPE (new, use);
405 INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
406 APPEND_OP_AFTER (new, *last);
409 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
410 LAST to the new element. */
412 static inline void
413 add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
415 vuse_optype_p new;
417 ALLOC_OPTYPE (new, vuse);
418 VUSE_OP (new) = op;
419 INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
420 APPEND_OP_AFTER (new, *last);
423 /* Adds OP to the list of maydefs of statement STMT after LAST, and moves
424 LAST to the new element. */
426 static inline void
427 add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
429 maydef_optype_p new;
431 ALLOC_OPTYPE (new, maydef);
432 MAYDEF_RESULT (new) = op;
433 MAYDEF_OP (new) = op;
434 INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
435 APPEND_OP_AFTER (new, *last);
438 /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
439 LAST to the new element. */
441 static inline void
442 add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
444 mustdef_optype_p new;
446 ALLOC_OPTYPE (new, mustdef);
447 MUSTDEF_RESULT (new) = op;
448 MUSTDEF_KILL (new) = op;
449 INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
450 APPEND_OP_AFTER (new, *last);
453 /* Takes elements from build_defs and turns them into def operands of STMT.
454 TODO -- Given that def operands list is not necessarily sorted, merging
455 the operands this way does not make much sense.
456 -- Make build_defs VEC of tree *. */
458 static inline void
459 finalize_ssa_def_ops (tree stmt)
461 unsigned new_i;
462 struct def_optype_d new_list;
463 def_optype_p old_ops, last;
464 tree *old_base;
466 new_list.next = NULL;
467 last = &new_list;
469 old_ops = DEF_OPS (stmt);
471 new_i = 0;
472 while (old_ops && new_i < VEC_length (tree, build_defs))
474 tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
475 old_base = DEF_OP_PTR (old_ops);
477 if (old_base == new_base)
479 /* if variables are the same, reuse this node. */
480 MOVE_HEAD_AFTER (old_ops, last);
481 new_i++;
483 else if (old_base < new_base)
485 /* if old is less than new, old goes to the free list. */
486 MOVE_HEAD_TO_FREELIST (old_ops, def);
488 else
490 /* This is a new operand. */
491 add_def_op (new_base, &last);
492 new_i++;
496 /* If there is anything remaining in the build_defs list, simply emit it. */
497 for ( ; new_i < VEC_length (tree, build_defs); new_i++)
498 add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
500 last->next = NULL;
502 /* If there is anything in the old list, free it. */
503 if (old_ops)
505 old_ops->next = free_defs;
506 free_defs = old_ops;
509 /* Now set the stmt's operands. */
510 DEF_OPS (stmt) = new_list.next;
512 #ifdef ENABLE_CHECKING
514 def_optype_p ptr;
515 unsigned x = 0;
516 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
517 x++;
519 gcc_assert (x == VEC_length (tree, build_defs));
521 #endif
524 /* This routine will create stmt operands for STMT from the def build list. */
526 static void
527 finalize_ssa_defs (tree stmt)
529 unsigned int num = VEC_length (tree, build_defs);
531 /* There should only be a single real definition per assignment. */
532 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
534 /* If there is an old list, often the new list is identical, or close, so
535 find the elements at the beginning that are the same as the vector. */
536 finalize_ssa_def_ops (stmt);
537 VEC_truncate (tree, build_defs, 0);
540 /* Takes elements from build_uses and turns them into use operands of STMT.
541 TODO -- Make build_uses VEC of tree *. */
543 static inline void
544 finalize_ssa_use_ops (tree stmt)
546 unsigned new_i;
547 struct use_optype_d new_list;
548 use_optype_p old_ops, ptr, last;
550 new_list.next = NULL;
551 last = &new_list;
553 old_ops = USE_OPS (stmt);
555 /* If there is anything in the old list, free it. */
556 if (old_ops)
558 for (ptr = old_ops; ptr; ptr = ptr->next)
559 delink_imm_use (USE_OP_PTR (ptr));
560 old_ops->next = free_uses;
561 free_uses = old_ops;
564 /* Now create nodes for all the new nodes. */
565 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
566 add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
568 last->next = NULL;
570 /* Now set the stmt's operands. */
571 USE_OPS (stmt) = new_list.next;
573 #ifdef ENABLE_CHECKING
575 unsigned x = 0;
576 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
577 x++;
579 gcc_assert (x == VEC_length (tree, build_uses));
581 #endif
584 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
586 static void
587 finalize_ssa_uses (tree stmt)
589 #ifdef ENABLE_CHECKING
591 unsigned x;
592 unsigned num = VEC_length (tree, build_uses);
594 /* If the pointer to the operand is the statement itself, something is
595 wrong. It means that we are pointing to a local variable (the
596 initial call to update_stmt_operands does not pass a pointer to a
597 statement). */
598 for (x = 0; x < num; x++)
599 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
601 #endif
602 finalize_ssa_use_ops (stmt);
603 VEC_truncate (tree, build_uses, 0);
607 /* Takes elements from build_v_may_defs and turns them into maydef operands of
608 STMT. */
610 static inline void
611 finalize_ssa_v_may_def_ops (tree stmt)
613 unsigned new_i;
614 struct maydef_optype_d new_list;
615 maydef_optype_p old_ops, ptr, last;
616 tree act;
617 unsigned old_base, new_base;
619 new_list.next = NULL;
620 last = &new_list;
622 old_ops = MAYDEF_OPS (stmt);
624 new_i = 0;
625 while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
627 act = VEC_index (tree, build_v_may_defs, new_i);
628 new_base = get_name_decl (act);
629 old_base = get_name_decl (MAYDEF_OP (old_ops));
631 if (old_base == new_base)
633 /* if variables are the same, reuse this node. */
634 MOVE_HEAD_AFTER (old_ops, last);
635 set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
636 new_i++;
638 else if (old_base < new_base)
640 /* if old is less than new, old goes to the free list. */
641 delink_imm_use (MAYDEF_OP_PTR (old_ops));
642 MOVE_HEAD_TO_FREELIST (old_ops, maydef);
644 else
646 /* This is a new operand. */
647 add_maydef_op (stmt, act, &last);
648 new_i++;
652 /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
653 for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
654 add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
656 last->next = NULL;
658 /* If there is anything in the old list, free it. */
659 if (old_ops)
661 for (ptr = old_ops; ptr; ptr = ptr->next)
662 delink_imm_use (MAYDEF_OP_PTR (ptr));
663 old_ops->next = free_maydefs;
664 free_maydefs = old_ops;
667 /* Now set the stmt's operands. */
668 MAYDEF_OPS (stmt) = new_list.next;
670 #ifdef ENABLE_CHECKING
672 unsigned x = 0;
673 for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
674 x++;
676 gcc_assert (x == VEC_length (tree, build_v_may_defs));
678 #endif
681 static void
682 finalize_ssa_v_may_defs (tree stmt)
684 finalize_ssa_v_may_def_ops (stmt);
688 /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
690 static inline void
691 cleanup_v_may_defs (void)
693 unsigned x, num;
694 num = VEC_length (tree, build_v_may_defs);
696 for (x = 0; x < num; x++)
698 tree t = VEC_index (tree, build_v_may_defs, x);
699 if (TREE_CODE (t) != SSA_NAME)
701 var_ann_t ann = var_ann (t);
702 ann->in_v_may_def_list = 0;
705 VEC_truncate (tree, build_v_may_defs, 0);
709 /* Takes elements from build_vuses and turns them into vuse operands of
710 STMT. */
712 static inline void
713 finalize_ssa_vuse_ops (tree stmt)
715 unsigned new_i;
716 struct vuse_optype_d new_list;
717 vuse_optype_p old_ops, ptr, last;
718 tree act;
719 unsigned old_base, new_base;
721 new_list.next = NULL;
722 last = &new_list;
724 old_ops = VUSE_OPS (stmt);
726 new_i = 0;
727 while (old_ops && new_i < VEC_length (tree, build_vuses))
729 act = VEC_index (tree, build_vuses, new_i);
730 new_base = get_name_decl (act);
731 old_base = get_name_decl (VUSE_OP (old_ops));
733 if (old_base == new_base)
735 /* if variables are the same, reuse this node. */
736 MOVE_HEAD_AFTER (old_ops, last);
737 set_virtual_use_link (VUSE_OP_PTR (last), stmt);
738 new_i++;
740 else if (old_base < new_base)
742 /* if old is less than new, old goes to the free list. */
743 delink_imm_use (USE_OP_PTR (old_ops));
744 MOVE_HEAD_TO_FREELIST (old_ops, vuse);
746 else
748 /* This is a new operand. */
749 add_vuse_op (stmt, act, &last);
750 new_i++;
754 /* If there is anything remaining in the build_vuses list, simply emit it. */
755 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
756 add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
758 last->next = NULL;
760 /* If there is anything in the old list, free it. */
761 if (old_ops)
763 for (ptr = old_ops; ptr; ptr = ptr->next)
764 delink_imm_use (VUSE_OP_PTR (ptr));
765 old_ops->next = free_vuses;
766 free_vuses = old_ops;
769 /* Now set the stmt's operands. */
770 VUSE_OPS (stmt) = new_list.next;
772 #ifdef ENABLE_CHECKING
774 unsigned x = 0;
775 for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
776 x++;
778 gcc_assert (x == VEC_length (tree, build_vuses));
780 #endif
783 /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
785 static void
786 finalize_ssa_vuses (tree stmt)
788 unsigned num, num_v_may_defs;
789 unsigned vuse_index;
791 /* Remove superfluous VUSE operands. If the statement already has a
792 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
793 not needed because V_MAY_DEFs imply a VUSE of the variable. For
794 instance, suppose that variable 'a' is aliased:
796 # VUSE <a_2>
797 # a_3 = V_MAY_DEF <a_2>
798 a = a + 1;
800 The VUSE <a_2> is superfluous because it is implied by the
801 V_MAY_DEF operation. */
802 num = VEC_length (tree, build_vuses);
803 num_v_may_defs = VEC_length (tree, build_v_may_defs);
805 if (num > 0 && num_v_may_defs > 0)
807 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
809 tree vuse;
810 vuse = VEC_index (tree, build_vuses, vuse_index);
811 if (TREE_CODE (vuse) != SSA_NAME)
813 var_ann_t ann = var_ann (vuse);
814 ann->in_vuse_list = 0;
815 if (ann->in_v_may_def_list)
817 VEC_ordered_remove (tree, build_vuses, vuse_index);
818 continue;
821 vuse_index++;
824 else
826 /* Clear out the in_list bits. */
827 for (vuse_index = 0;
828 vuse_index < VEC_length (tree, build_vuses);
829 vuse_index++)
831 tree t = VEC_index (tree, build_vuses, vuse_index);
832 if (TREE_CODE (t) != SSA_NAME)
834 var_ann_t ann = var_ann (t);
835 ann->in_vuse_list = 0;
840 finalize_ssa_vuse_ops (stmt);
842 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
843 cleanup_v_may_defs ();
845 /* Free the VUSEs build vector. */
846 VEC_truncate (tree, build_vuses, 0);
850 /* Takes elements from build_v_must_defs and turns them into mustdef operands of
851 STMT. */
853 static inline void
854 finalize_ssa_v_must_def_ops (tree stmt)
856 unsigned new_i;
857 struct mustdef_optype_d new_list;
858 mustdef_optype_p old_ops, ptr, last;
859 tree act;
860 unsigned old_base, new_base;
862 new_list.next = NULL;
863 last = &new_list;
865 old_ops = MUSTDEF_OPS (stmt);
867 new_i = 0;
868 while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
870 act = VEC_index (tree, build_v_must_defs, new_i);
871 new_base = get_name_decl (act);
872 old_base = get_name_decl (MUSTDEF_KILL (old_ops));
874 if (old_base == new_base)
876 /* If variables are the same, reuse this node. */
877 MOVE_HEAD_AFTER (old_ops, last);
878 set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
879 new_i++;
881 else if (old_base < new_base)
883 /* If old is less than new, old goes to the free list. */
884 delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
885 MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
887 else
889 /* This is a new operand. */
890 add_mustdef_op (stmt, act, &last);
891 new_i++;
895 /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
896 for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
897 add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
899 last->next = NULL;
901 /* If there is anything in the old list, free it. */
902 if (old_ops)
904 for (ptr = old_ops; ptr; ptr = ptr->next)
905 delink_imm_use (MUSTDEF_KILL_PTR (ptr));
906 old_ops->next = free_mustdefs;
907 free_mustdefs = old_ops;
910 /* Now set the stmt's operands. */
911 MUSTDEF_OPS (stmt) = new_list.next;
913 #ifdef ENABLE_CHECKING
915 unsigned x = 0;
916 for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
917 x++;
919 gcc_assert (x == VEC_length (tree, build_v_must_defs));
921 #endif
924 static void
925 finalize_ssa_v_must_defs (tree stmt)
927 /* In the presence of subvars, there may be more than one V_MUST_DEF
928 per statement (one for each subvar). It is a bit expensive to
929 verify that all must-defs in a statement belong to subvars if
930 there is more than one must-def, so we don't do it. Suffice to
931 say, if you reach here without having subvars, and have num >1,
932 you have hit a bug. */
933 finalize_ssa_v_must_def_ops (stmt);
934 VEC_truncate (tree, build_v_must_defs, 0);
938 /* Finalize all the build vectors, fill the new ones into INFO. */
940 static inline void
941 finalize_ssa_stmt_operands (tree stmt)
943 finalize_ssa_defs (stmt);
944 finalize_ssa_uses (stmt);
945 finalize_ssa_v_must_defs (stmt);
946 finalize_ssa_v_may_defs (stmt);
947 finalize_ssa_vuses (stmt);
951 /* Start the process of building up operands vectors in INFO. */
953 static inline void
954 start_ssa_stmt_operands (void)
956 gcc_assert (VEC_length (tree, build_defs) == 0);
957 gcc_assert (VEC_length (tree, build_uses) == 0);
958 gcc_assert (VEC_length (tree, build_vuses) == 0);
959 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
960 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
964 /* Add DEF_P to the list of pointers to operands. */
966 static inline void
967 append_def (tree *def_p)
969 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
973 /* Add USE_P to the list of pointers to operands. */
975 static inline void
976 append_use (tree *use_p)
978 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
982 /* Add a new virtual may def for variable VAR to the build array. */
984 static inline void
985 append_v_may_def (tree var)
987 if (TREE_CODE (var) != SSA_NAME)
989 var_ann_t ann = get_var_ann (var);
991 /* Don't allow duplicate entries. */
992 if (ann->in_v_may_def_list)
993 return;
994 ann->in_v_may_def_list = 1;
997 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
1001 /* Add VAR to the list of virtual uses. */
1003 static inline void
1004 append_vuse (tree var)
1006 /* Don't allow duplicate entries. */
1007 if (TREE_CODE (var) != SSA_NAME)
1009 var_ann_t ann = get_var_ann (var);
1011 if (ann->in_vuse_list || ann->in_v_may_def_list)
1012 return;
1013 ann->in_vuse_list = 1;
1016 VEC_safe_push (tree, heap, build_vuses, (tree)var);
1020 /* Add VAR to the list of virtual must definitions for INFO. */
1022 static inline void
1023 append_v_must_def (tree var)
1025 unsigned i;
1027 /* Don't allow duplicate entries. */
1028 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1029 if (var == VEC_index (tree, build_v_must_defs, i))
1030 return;
1032 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
1036 /* REF is a tree that contains the entire pointer dereference
1037 expression, if available, or NULL otherwise. ALIAS is the variable
1038 we are asking if REF can access. OFFSET and SIZE come from the
1039 memory access expression that generated this virtual operand. */
1041 static bool
1042 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1043 HOST_WIDE_INT size)
1045 bool offsetgtz = offset > 0;
1046 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1047 tree base = ref ? get_base_address (ref) : NULL;
1049 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1050 using a call-clobbered memory tag. By definition, call-clobbered
1051 memory tags can always touch .GLOBAL_VAR. */
1052 if (alias == global_var)
1053 return true;
1055 /* We cannot prune nonlocal aliases because they are not type
1056 specific. */
1057 if (alias == nonlocal_all)
1058 return true;
1060 /* If ALIAS is an SFT, it can't be touched if the offset
1061 and size of the access is not overlapping with the SFT offset and
1062 size. This is only true if we are accessing through a pointer
1063 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1064 be accessing through a pointer to some substruct of the
1065 structure, and if we try to prune there, we will have the wrong
1066 offset, and get the wrong answer.
1067 i.e., we can't prune without more work if we have something like
1069 struct gcc_target
1071 struct asm_out
1073 const char *byte_op;
1074 struct asm_int_op
1076 const char *hi;
1077 } aligned_op;
1078 } asm_out;
1079 } targetm;
1081 foo = &targetm.asm_out.aligned_op;
1082 return foo->hi;
1084 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1085 terms of SFT_PARENT_VAR, that is where it is.
1086 However, the access through the foo pointer will be at offset 0. */
1087 if (size != -1
1088 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1089 && base
1090 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1091 && !overlap_subvar (offset, size, alias, NULL))
1093 #ifdef ACCESS_DEBUGGING
1094 fprintf (stderr, "Access to ");
1095 print_generic_expr (stderr, ref, 0);
1096 fprintf (stderr, " may not touch ");
1097 print_generic_expr (stderr, alias, 0);
1098 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1099 #endif
1100 return false;
1103 /* Without strict aliasing, it is impossible for a component access
1104 through a pointer to touch a random variable, unless that
1105 variable *is* a structure or a pointer.
1107 That is, given p->c, and some random global variable b,
1108 there is no legal way that p->c could be an access to b.
1110 Without strict aliasing on, we consider it legal to do something
1111 like:
1113 struct foos { int l; };
1114 int foo;
1115 static struct foos *getfoo(void);
1116 int main (void)
1118 struct foos *f = getfoo();
1119 f->l = 1;
1120 foo = 2;
1121 if (f->l == 1)
1122 abort();
1123 exit(0);
1125 static struct foos *getfoo(void)
1126 { return (struct foos *)&foo; }
1128 (taken from 20000623-1.c)
1130 The docs also say/imply that access through union pointers
1131 is legal (but *not* if you take the address of the union member,
1132 i.e. the inverse), such that you can do
1134 typedef union {
1135 int d;
1136 } U;
1138 int rv;
1139 void breakme()
1141 U *rv0;
1142 U *pretmp = (U*)&rv;
1143 rv0 = pretmp;
1144 rv0->d = 42;
1146 To implement this, we just punt on accesses through union
1147 pointers entirely.
1149 else if (ref
1150 && flag_strict_aliasing
1151 && TREE_CODE (ref) != INDIRECT_REF
1152 && !MTAG_P (alias)
1153 && (TREE_CODE (base) != INDIRECT_REF
1154 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1155 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1156 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1157 && !POINTER_TYPE_P (TREE_TYPE (alias))
1158 /* When the struct has may_alias attached to it, we need not to
1159 return true. */
1160 && get_alias_set (base))
1162 #ifdef ACCESS_DEBUGGING
1163 fprintf (stderr, "Access to ");
1164 print_generic_expr (stderr, ref, 0);
1165 fprintf (stderr, " may not touch ");
1166 print_generic_expr (stderr, alias, 0);
1167 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1168 #endif
1169 return false;
1172 /* If the offset of the access is greater than the size of one of
1173 the possible aliases, it can't be touching that alias, because it
1174 would be past the end of the structure. */
1175 else if (ref
1176 && flag_strict_aliasing
1177 && TREE_CODE (ref) != INDIRECT_REF
1178 && !MTAG_P (alias)
1179 && !POINTER_TYPE_P (TREE_TYPE (alias))
1180 && offsetgtz
1181 && DECL_SIZE (alias)
1182 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1183 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1185 #ifdef ACCESS_DEBUGGING
1186 fprintf (stderr, "Access to ");
1187 print_generic_expr (stderr, ref, 0);
1188 fprintf (stderr, " may not touch ");
1189 print_generic_expr (stderr, alias, 0);
1190 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1191 #endif
1192 return false;
1195 return true;
1199 /* Add VAR to the virtual operands array. FLAGS is as in
1200 get_expr_operands. FULL_REF is a tree that contains the entire
1201 pointer dereference expression, if available, or NULL otherwise.
1202 OFFSET and SIZE come from the memory access expression that
1203 generated this virtual operand. FOR_CLOBBER is true is this is
1204 adding a virtual operand for a call clobber. */
1206 static void
1207 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1208 tree full_ref, HOST_WIDE_INT offset,
1209 HOST_WIDE_INT size, bool for_clobber)
1211 VEC(tree,gc) *aliases;
1212 tree sym;
1213 var_ann_t v_ann;
1215 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1216 v_ann = var_ann (sym);
1218 /* Mark statements with volatile operands. Optimizers should back
1219 off from statements having volatile operands. */
1220 if (TREE_THIS_VOLATILE (sym) && s_ann)
1221 s_ann->has_volatile_ops = true;
1223 /* If the variable cannot be modified and this is a V_MAY_DEF change
1224 it into a VUSE. This happens when read-only variables are marked
1225 call-clobbered and/or aliased to writable variables. So we only
1226 check that this only happens on non-specific stores.
1228 Note that if this is a specific store, i.e. associated with a
1229 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1230 into validation problems.
1232 This can happen when programs cast away const, leaving us with a
1233 store to read-only memory. If the statement is actually executed
1234 at runtime, then the program is ill formed. If the statement is
1235 not executed then all is well. At the very least, we cannot ICE. */
1236 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1237 flags &= ~(opf_is_def | opf_kill_def);
1239 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1240 virtual operands, unless the caller has specifically requested
1241 not to add virtual operands (used when adding operands inside an
1242 ADDR_EXPR expression). */
1243 if (flags & opf_no_vops)
1244 return;
1246 aliases = v_ann->may_aliases;
1247 if (aliases == NULL)
1249 /* The variable is not aliased or it is an alias tag. */
1250 if (flags & opf_is_def)
1252 if (flags & opf_kill_def)
1254 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1255 variable definitions. */
1256 gcc_assert (!MTAG_P (var)
1257 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1258 append_v_must_def (var);
1260 else
1262 /* Add a V_MAY_DEF for call-clobbered variables and
1263 memory tags. */
1264 append_v_may_def (var);
1267 else
1268 append_vuse (var);
1270 else
1272 unsigned i;
1273 tree al;
1275 /* The variable is aliased. Add its aliases to the virtual
1276 operands. */
1277 gcc_assert (VEC_length (tree, aliases) != 0);
1279 if (flags & opf_is_def)
1282 bool none_added = true;
1284 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1286 if (!access_can_touch_variable (full_ref, al, offset, size))
1287 continue;
1289 none_added = false;
1290 append_v_may_def (al);
1293 /* If the variable is also an alias tag, add a virtual
1294 operand for it, otherwise we will miss representing
1295 references to the members of the variable's alias set.
1296 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1298 It is also necessary to add bare defs on clobbers for
1299 SMT's, so that bare SMT uses caused by pruning all the
1300 aliases will link up properly with calls. In order to
1301 keep the number of these bare defs we add down to the
1302 minimum necessary, we keep track of which SMT's were used
1303 alone in statement vdefs or VUSEs. */
1304 if (v_ann->is_aliased
1305 || none_added
1306 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1307 && for_clobber
1308 && SMT_USED_ALONE (var)))
1310 /* Every bare SMT def we add should have SMT_USED_ALONE
1311 set on it, or else we will get the wrong answer on
1312 clobbers. Sadly, this assertion trips on code that
1313 violates strict aliasing rules, because they *do* get
1314 the clobbers wrong, since it is illegal code. As a
1315 result, we currently only enable it for aliasing
1316 debugging. Someone might wish to turn this code into
1317 a nice strict-aliasing warning, since we *know* it
1318 will get the wrong answer... */
1319 #ifdef ACCESS_DEBUGGING
1320 if (none_added
1321 && !updating_used_alone && aliases_computed_p
1322 && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1323 gcc_assert (SMT_USED_ALONE (var));
1324 #endif
1325 append_v_may_def (var);
1328 else
1330 bool none_added = true;
1331 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1333 if (!access_can_touch_variable (full_ref, al, offset, size))
1334 continue;
1335 none_added = false;
1336 append_vuse (al);
1339 /* Similarly, append a virtual uses for VAR itself, when
1340 it is an alias tag. */
1341 if (v_ann->is_aliased || none_added)
1342 append_vuse (var);
1348 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1349 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1350 the statement's real operands, otherwise it is added to virtual
1351 operands. */
1353 static void
1354 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1356 bool is_real_op;
1357 tree var, sym;
1358 var_ann_t v_ann;
1360 var = *var_p;
1361 gcc_assert (SSA_VAR_P (var));
1363 is_real_op = is_gimple_reg (var);
1365 /* If this is a real operand, the operand is either an SSA name or a
1366 decl. Virtual operands may only be decls. */
1367 gcc_assert (is_real_op || DECL_P (var));
1369 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1370 v_ann = var_ann (sym);
1372 /* Mark statements with volatile operands. Optimizers should back
1373 off from statements having volatile operands. */
1374 if (TREE_THIS_VOLATILE (sym) && s_ann)
1375 s_ann->has_volatile_ops = true;
1377 if (is_real_op)
1379 /* The variable is a GIMPLE register. Add it to real operands. */
1380 if (flags & opf_is_def)
1381 append_def (var_p);
1382 else
1383 append_use (var_p);
1385 else
1386 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1390 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1391 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1393 STMT is the statement being processed, EXPR is the INDIRECT_REF
1394 that got us here.
1396 FLAGS is as in get_expr_operands.
1398 FULL_REF contains the full pointer dereference expression, if we
1399 have it, or NULL otherwise.
1401 OFFSET and SIZE are the location of the access inside the
1402 dereferenced pointer, if known.
1404 RECURSE_ON_BASE should be set to true if we want to continue
1405 calling get_expr_operands on the base pointer, and false if
1406 something else will do it for us. */
1408 static void
1409 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1410 tree full_ref,
1411 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1412 bool recurse_on_base)
1414 tree *pptr = &TREE_OPERAND (expr, 0);
1415 tree ptr = *pptr;
1416 stmt_ann_t s_ann = stmt_ann (stmt);
1418 /* Stores into INDIRECT_REF operands are never killing definitions. */
1419 flags &= ~opf_kill_def;
1421 if (SSA_VAR_P (ptr))
1423 struct ptr_info_def *pi = NULL;
1425 /* If PTR has flow-sensitive points-to information, use it. */
1426 if (TREE_CODE (ptr) == SSA_NAME
1427 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1428 && pi->name_mem_tag)
1430 /* PTR has its own memory tag. Use it. */
1431 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1432 full_ref, offset, size, false);
1434 else
1436 /* If PTR is not an SSA_NAME or it doesn't have a name
1437 tag, use its symbol memory tag. */
1438 var_ann_t v_ann;
1440 /* If we are emitting debugging dumps, display a warning if
1441 PTR is an SSA_NAME with no flow-sensitive alias
1442 information. That means that we may need to compute
1443 aliasing again. */
1444 if (dump_file
1445 && TREE_CODE (ptr) == SSA_NAME
1446 && pi == NULL)
1448 fprintf (dump_file,
1449 "NOTE: no flow-sensitive alias info for ");
1450 print_generic_expr (dump_file, ptr, dump_flags);
1451 fprintf (dump_file, " in ");
1452 print_generic_stmt (dump_file, stmt, dump_flags);
1455 if (TREE_CODE (ptr) == SSA_NAME)
1456 ptr = SSA_NAME_VAR (ptr);
1457 v_ann = var_ann (ptr);
1459 if (v_ann->symbol_mem_tag)
1460 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1461 full_ref, offset, size, false);
1464 else if (TREE_CODE (ptr) == INTEGER_CST)
1466 /* If a constant is used as a pointer, we can't generate a real
1467 operand for it but we mark the statement volatile to prevent
1468 optimizations from messing things up. */
1469 if (s_ann)
1470 s_ann->has_volatile_ops = true;
1471 return;
1473 else
1475 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1476 gcc_unreachable ();
1479 /* If requested, add a USE operand for the base pointer. */
1480 if (recurse_on_base)
1481 get_expr_operands (stmt, pptr, opf_none);
1485 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1487 static void
1488 get_tmr_operands (tree stmt, tree expr, int flags)
1490 tree tag = TMR_TAG (expr), ref;
1491 HOST_WIDE_INT offset, size, maxsize;
1492 subvar_t svars, sv;
1493 stmt_ann_t s_ann = stmt_ann (stmt);
1495 /* First record the real operands. */
1496 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1497 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1499 /* MEM_REFs should never be killing. */
1500 flags &= ~opf_kill_def;
1502 if (TMR_SYMBOL (expr))
1504 stmt_ann_t ann = stmt_ann (stmt);
1505 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1508 if (!tag)
1510 /* Something weird, so ensure that we will be careful. */
1511 stmt_ann (stmt)->has_volatile_ops = true;
1512 return;
1515 if (DECL_P (tag))
1517 get_expr_operands (stmt, &tag, flags);
1518 return;
1521 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1522 gcc_assert (ref != NULL_TREE);
1523 svars = get_subvars_for_var (ref);
1524 for (sv = svars; sv; sv = sv->next)
1526 bool exact;
1527 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1529 int subvar_flags = flags;
1530 if (!exact || size != maxsize)
1531 subvar_flags &= ~opf_kill_def;
1532 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1538 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1539 clobbered variables in the function. */
1541 static void
1542 add_call_clobber_ops (tree stmt, tree callee)
1544 unsigned u;
1545 bitmap_iterator bi;
1546 stmt_ann_t s_ann = stmt_ann (stmt);
1547 bitmap not_read_b, not_written_b;
1549 /* Functions that are not const, pure or never return may clobber
1550 call-clobbered variables. */
1551 if (s_ann)
1552 s_ann->makes_clobbering_call = true;
1554 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1555 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1556 if (global_var)
1558 add_stmt_operand (&global_var, s_ann, opf_is_def);
1559 return;
1562 /* Get info for local and module level statics. There is a bit
1563 set for each static if the call being processed does not read
1564 or write that variable. */
1565 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1566 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1567 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1568 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1570 tree var = referenced_var_lookup (u);
1571 unsigned int escape_mask = var_ann (var)->escape_mask;
1572 tree real_var = var;
1573 bool not_read;
1574 bool not_written;
1576 /* Not read and not written are computed on regular vars, not
1577 subvars, so look at the parent var if this is an SFT. */
1578 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1579 real_var = SFT_PARENT_VAR (var);
1581 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1582 DECL_UID (real_var)) : false;
1583 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1584 DECL_UID (real_var)) : false;
1585 gcc_assert (!unmodifiable_var_p (var));
1587 clobber_stats.clobbered_vars++;
1589 /* See if this variable is really clobbered by this function. */
1591 /* Trivial case: Things escaping only to pure/const are not
1592 clobbered by non-pure-const, and only read by pure/const. */
1593 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1595 tree call = get_call_expr_in (stmt);
1596 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1598 add_stmt_operand (&var, s_ann, opf_none);
1599 clobber_stats.unescapable_clobbers_avoided++;
1600 continue;
1602 else
1604 clobber_stats.unescapable_clobbers_avoided++;
1605 continue;
1609 if (not_written)
1611 clobber_stats.static_write_clobbers_avoided++;
1612 if (!not_read)
1613 add_stmt_operand (&var, s_ann, opf_none);
1614 else
1615 clobber_stats.static_read_clobbers_avoided++;
1617 else
1618 add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1623 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1624 function. */
1626 static void
1627 add_call_read_ops (tree stmt, tree callee)
1629 unsigned u;
1630 bitmap_iterator bi;
1631 stmt_ann_t s_ann = stmt_ann (stmt);
1632 bitmap not_read_b;
1634 /* if the function is not pure, it may reference memory. Add
1635 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1636 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1637 if (global_var)
1639 add_stmt_operand (&global_var, s_ann, opf_none);
1640 return;
1643 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1645 /* Add a VUSE for each call-clobbered variable. */
1646 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1648 tree var = referenced_var (u);
1649 tree real_var = var;
1650 bool not_read;
1652 clobber_stats.readonly_clobbers++;
1654 /* Not read and not written are computed on regular vars, not
1655 subvars, so look at the parent var if this is an SFT. */
1657 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1658 real_var = SFT_PARENT_VAR (var);
1660 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1661 : false;
1663 if (not_read)
1665 clobber_stats.static_readonly_clobbers_avoided++;
1666 continue;
1669 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1674 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1676 static void
1677 get_call_expr_operands (tree stmt, tree expr)
1679 tree op;
1680 int call_flags = call_expr_flags (expr);
1682 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1683 operands for all the symbols that have been found to be
1684 call-clobbered.
1686 Note that if aliases have not been computed, the global effects
1687 of calls will not be included in the SSA web. This is fine
1688 because no optimizer should run before aliases have been
1689 computed. By not bothering with virtual operands for CALL_EXPRs
1690 we avoid adding superfluous virtual operands, which can be a
1691 significant compile time sink (See PR 15855). */
1692 if (aliases_computed_p
1693 && !bitmap_empty_p (call_clobbered_vars)
1694 && !(call_flags & ECF_NOVOPS))
1696 /* A 'pure' or a 'const' function never call-clobbers anything.
1697 A 'noreturn' function might, but since we don't return anyway
1698 there is no point in recording that. */
1699 if (TREE_SIDE_EFFECTS (expr)
1700 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1701 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1702 else if (!(call_flags & ECF_CONST))
1703 add_call_read_ops (stmt, get_callee_fndecl (expr));
1706 /* Find uses in the called function. */
1707 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1709 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1710 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1712 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1716 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1718 static void
1719 get_asm_expr_operands (tree stmt)
1721 stmt_ann_t s_ann = stmt_ann (stmt);
1722 int noutputs = list_length (ASM_OUTPUTS (stmt));
1723 const char **oconstraints
1724 = (const char **) alloca ((noutputs) * sizeof (const char *));
1725 int i;
1726 tree link;
1727 const char *constraint;
1728 bool allows_mem, allows_reg, is_inout;
1730 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1732 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1733 oconstraints[i] = constraint;
1734 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1735 &allows_reg, &is_inout);
1737 /* This should have been split in gimplify_asm_expr. */
1738 gcc_assert (!allows_reg || !is_inout);
1740 /* Memory operands are addressable. Note that STMT needs the
1741 address of this operand. */
1742 if (!allows_reg && allows_mem)
1744 tree t = get_base_address (TREE_VALUE (link));
1745 if (t && DECL_P (t) && s_ann)
1746 add_to_addressable_set (t, &s_ann->addresses_taken);
1749 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1752 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1754 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1755 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1756 oconstraints, &allows_mem, &allows_reg);
1758 /* Memory operands are addressable. Note that STMT needs the
1759 address of this operand. */
1760 if (!allows_reg && allows_mem)
1762 tree t = get_base_address (TREE_VALUE (link));
1763 if (t && DECL_P (t) && s_ann)
1764 add_to_addressable_set (t, &s_ann->addresses_taken);
1767 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1771 /* Clobber memory for asm ("" : : : "memory"); */
1772 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1773 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1775 unsigned i;
1776 bitmap_iterator bi;
1778 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1779 decided to group them). */
1780 if (global_var)
1781 add_stmt_operand (&global_var, s_ann, opf_is_def);
1782 else
1783 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1785 tree var = referenced_var (i);
1786 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1789 /* Now clobber all addressables. */
1790 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1792 tree var = referenced_var (i);
1794 /* Subvars are explicitly represented in this list, so
1795 we don't need the original to be added to the clobber
1796 ops, but the original *will* be in this list because
1797 we keep the addressability of the original
1798 variable up-to-date so we don't screw up the rest of
1799 the backend. */
1800 if (var_can_have_subvars (var)
1801 && get_subvars_for_var (var) != NULL)
1802 continue;
1804 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1807 break;
1812 /* Scan operands for the assignment expression EXPR in statement STMT. */
1814 static void
1815 get_modify_expr_operands (tree stmt, tree expr)
1817 /* First get operands from the RHS. */
1818 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1820 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1821 registers. If the LHS is a store to memory, we will either need
1822 a preserving definition (V_MAY_DEF) or a killing definition
1823 (V_MUST_DEF).
1825 Preserving definitions are those that modify a part of an
1826 aggregate object for which no subvars have been computed (or the
1827 reference does not correspond exactly to one of them). Stores
1828 through a pointer are also represented with V_MAY_DEF operators.
1830 The determination of whether to use a preserving or a killing
1831 definition is done while scanning the LHS of the assignment. By
1832 default, assume that we will emit a V_MUST_DEF. */
1833 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1837 /* Recursively scan the expression pointed to by EXPR_P in statement
1838 STMT. FLAGS is one of the OPF_* constants modifying how to
1839 interpret the operands found. */
1841 static void
1842 get_expr_operands (tree stmt, tree *expr_p, int flags)
1844 enum tree_code code;
1845 enum tree_code_class class;
1846 tree expr = *expr_p;
1847 stmt_ann_t s_ann = stmt_ann (stmt);
1849 if (expr == NULL)
1850 return;
1852 code = TREE_CODE (expr);
1853 class = TREE_CODE_CLASS (code);
1855 switch (code)
1857 case ADDR_EXPR:
1858 /* Taking the address of a variable does not represent a
1859 reference to it, but the fact that the statement takes its
1860 address will be of interest to some passes (e.g. alias
1861 resolution). */
1862 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1864 /* If the address is invariant, there may be no interesting
1865 variable references inside. */
1866 if (is_gimple_min_invariant (expr))
1867 return;
1869 /* Otherwise, there may be variables referenced inside but there
1870 should be no VUSEs created, since the referenced objects are
1871 not really accessed. The only operands that we should find
1872 here are ARRAY_REF indices which will always be real operands
1873 (GIMPLE does not allow non-registers as array indices). */
1874 flags |= opf_no_vops;
1875 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1876 return;
1878 case SSA_NAME:
1879 case STRUCT_FIELD_TAG:
1880 case SYMBOL_MEMORY_TAG:
1881 case NAME_MEMORY_TAG:
1882 add_stmt_operand (expr_p, s_ann, flags);
1883 return;
1885 case VAR_DECL:
1886 case PARM_DECL:
1887 case RESULT_DECL:
1889 subvar_t svars;
1891 /* Add the subvars for a variable, if it has subvars, to DEFS
1892 or USES. Otherwise, add the variable itself. Whether it
1893 goes to USES or DEFS depends on the operand flags. */
1894 if (var_can_have_subvars (expr)
1895 && (svars = get_subvars_for_var (expr)))
1897 subvar_t sv;
1898 for (sv = svars; sv; sv = sv->next)
1899 add_stmt_operand (&sv->var, s_ann, flags);
1901 else
1902 add_stmt_operand (expr_p, s_ann, flags);
1904 return;
1907 case MISALIGNED_INDIRECT_REF:
1908 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1909 /* fall through */
1911 case ALIGN_INDIRECT_REF:
1912 case INDIRECT_REF:
1913 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1914 return;
1916 case TARGET_MEM_REF:
1917 get_tmr_operands (stmt, expr, flags);
1918 return;
1920 case ARRAY_REF:
1921 case ARRAY_RANGE_REF:
1922 case COMPONENT_REF:
1923 case REALPART_EXPR:
1924 case IMAGPART_EXPR:
1926 tree ref;
1927 HOST_WIDE_INT offset, size, maxsize;
1928 bool none = true;
1930 /* This component reference becomes an access to all of the
1931 subvariables it can touch, if we can determine that, but
1932 *NOT* the real one. If we can't determine which fields we
1933 could touch, the recursion will eventually get to a
1934 variable and add *all* of its subvars, or whatever is the
1935 minimum correct subset. */
1936 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1937 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1939 subvar_t sv;
1940 subvar_t svars = get_subvars_for_var (ref);
1942 for (sv = svars; sv; sv = sv->next)
1944 bool exact;
1946 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1948 int subvar_flags = flags;
1949 none = false;
1950 if (!exact || size != maxsize)
1951 subvar_flags &= ~opf_kill_def;
1952 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1956 if (!none)
1957 flags |= opf_no_vops;
1959 else if (TREE_CODE (ref) == INDIRECT_REF)
1961 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1962 maxsize, false);
1963 flags |= opf_no_vops;
1966 /* Even if we found subvars above we need to ensure to see
1967 immediate uses for d in s.a[d]. In case of s.a having
1968 a subvar or we would miss it otherwise. */
1969 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1970 flags & ~opf_kill_def);
1972 if (code == COMPONENT_REF)
1974 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1975 s_ann->has_volatile_ops = true;
1976 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1978 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
1980 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1981 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1982 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1985 return;
1988 case WITH_SIZE_EXPR:
1989 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1990 and an rvalue reference to its second argument. */
1991 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1992 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1993 return;
1995 case CALL_EXPR:
1996 get_call_expr_operands (stmt, expr);
1997 return;
1999 case COND_EXPR:
2000 case VEC_COND_EXPR:
2001 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
2002 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2003 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2004 return;
2006 case MODIFY_EXPR:
2007 get_modify_expr_operands (stmt, expr);
2008 return;
2010 case CONSTRUCTOR:
2012 /* General aggregate CONSTRUCTORs have been decomposed, but they
2013 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2014 constructor_elt *ce;
2015 unsigned HOST_WIDE_INT idx;
2017 for (idx = 0;
2018 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2019 idx++)
2020 get_expr_operands (stmt, &ce->value, opf_none);
2022 return;
2025 case BIT_FIELD_REF:
2026 /* Stores using BIT_FIELD_REF are always preserving definitions. */
2027 flags &= ~opf_kill_def;
2029 /* Fallthru */
2031 case TRUTH_NOT_EXPR:
2032 case VIEW_CONVERT_EXPR:
2033 do_unary:
2034 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2035 return;
2037 case TRUTH_AND_EXPR:
2038 case TRUTH_OR_EXPR:
2039 case TRUTH_XOR_EXPR:
2040 case COMPOUND_EXPR:
2041 case OBJ_TYPE_REF:
2042 case ASSERT_EXPR:
2043 do_binary:
2045 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2046 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2047 return;
2050 case DOT_PROD_EXPR:
2051 case REALIGN_LOAD_EXPR:
2053 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2054 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2055 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2056 return;
2059 case BLOCK:
2060 case FUNCTION_DECL:
2061 case EXC_PTR_EXPR:
2062 case FILTER_EXPR:
2063 case LABEL_DECL:
2064 case CONST_DECL:
2065 case OMP_PARALLEL:
2066 case OMP_SECTIONS:
2067 case OMP_FOR:
2068 case OMP_SINGLE:
2069 case OMP_MASTER:
2070 case OMP_ORDERED:
2071 case OMP_CRITICAL:
2072 case OMP_RETURN:
2073 case OMP_CONTINUE:
2074 /* Expressions that make no memory references. */
2075 return;
2077 default:
2078 if (class == tcc_unary)
2079 goto do_unary;
2080 if (class == tcc_binary || class == tcc_comparison)
2081 goto do_binary;
2082 if (class == tcc_constant || class == tcc_type)
2083 return;
2086 /* If we get here, something has gone wrong. */
2087 #ifdef ENABLE_CHECKING
2088 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2089 debug_tree (expr);
2090 fputs ("\n", stderr);
2091 #endif
2092 gcc_unreachable ();
2096 /* Parse STMT looking for operands. When finished, the various
2097 build_* operand vectors will have potential operands in them. */
2099 static void
2100 parse_ssa_operands (tree stmt)
2102 enum tree_code code;
2104 code = TREE_CODE (stmt);
2105 switch (code)
2107 case MODIFY_EXPR:
2108 get_modify_expr_operands (stmt, stmt);
2109 break;
2111 case COND_EXPR:
2112 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2113 break;
2115 case SWITCH_EXPR:
2116 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2117 break;
2119 case ASM_EXPR:
2120 get_asm_expr_operands (stmt);
2121 break;
2123 case RETURN_EXPR:
2124 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2125 break;
2127 case GOTO_EXPR:
2128 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2129 break;
2131 case LABEL_EXPR:
2132 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2133 break;
2135 case BIND_EXPR:
2136 case CASE_LABEL_EXPR:
2137 case TRY_CATCH_EXPR:
2138 case TRY_FINALLY_EXPR:
2139 case EH_FILTER_EXPR:
2140 case CATCH_EXPR:
2141 case RESX_EXPR:
2142 /* These nodes contain no variable references. */
2143 break;
2145 default:
2146 /* Notice that if get_expr_operands tries to use &STMT as the
2147 operand pointer (which may only happen for USE operands), we
2148 will fail in add_stmt_operand. This default will handle
2149 statements like empty statements, or CALL_EXPRs that may
2150 appear on the RHS of a statement or as statements themselves. */
2151 get_expr_operands (stmt, &stmt, opf_none);
2152 break;
2157 /* Create an operands cache for STMT. */
2159 static void
2160 build_ssa_operands (tree stmt)
2162 stmt_ann_t ann = get_stmt_ann (stmt);
2164 /* Initially assume that the statement has no volatile operands and
2165 does not take the address of any symbols. */
2166 if (ann)
2168 ann->has_volatile_ops = false;
2169 if (ann->addresses_taken)
2170 ann->addresses_taken = NULL;
2173 start_ssa_stmt_operands ();
2175 parse_ssa_operands (stmt);
2176 operand_build_sort_virtual (build_vuses);
2177 operand_build_sort_virtual (build_v_may_defs);
2178 operand_build_sort_virtual (build_v_must_defs);
2180 finalize_ssa_stmt_operands (stmt);
2184 /* Free any operands vectors in OPS. */
2186 void
2187 free_ssa_operands (stmt_operands_p ops)
2189 ops->def_ops = NULL;
2190 ops->use_ops = NULL;
2191 ops->maydef_ops = NULL;
2192 ops->mustdef_ops = NULL;
2193 ops->vuse_ops = NULL;
2197 /* Get the operands of statement STMT. */
2199 void
2200 update_stmt_operands (tree stmt)
2202 stmt_ann_t ann = get_stmt_ann (stmt);
2204 /* If update_stmt_operands is called before SSA is initialized, do
2205 nothing. */
2206 if (!ssa_operands_active ())
2207 return;
2209 /* The optimizers cannot handle statements that are nothing but a
2210 _DECL. This indicates a bug in the gimplifier. */
2211 gcc_assert (!SSA_VAR_P (stmt));
2213 gcc_assert (ann->modified);
2215 timevar_push (TV_TREE_OPS);
2217 build_ssa_operands (stmt);
2219 /* Clear the modified bit for STMT. */
2220 ann->modified = 0;
2222 timevar_pop (TV_TREE_OPS);
2226 /* Copies virtual operands from SRC to DST. */
2228 void
2229 copy_virtual_operands (tree dest, tree src)
2231 tree t;
2232 ssa_op_iter iter, old_iter;
2233 use_operand_p use_p, u2;
2234 def_operand_p def_p, d2;
2236 build_ssa_operands (dest);
2238 /* Copy all the virtual fields. */
2239 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2240 append_vuse (t);
2241 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2242 append_v_may_def (t);
2243 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2244 append_v_must_def (t);
2246 if (VEC_length (tree, build_vuses) == 0
2247 && VEC_length (tree, build_v_may_defs) == 0
2248 && VEC_length (tree, build_v_must_defs) == 0)
2249 return;
2251 /* Now commit the virtual operands to this stmt. */
2252 finalize_ssa_v_must_defs (dest);
2253 finalize_ssa_v_may_defs (dest);
2254 finalize_ssa_vuses (dest);
2256 /* Finally, set the field to the same values as then originals. */
2257 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2258 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
2260 gcc_assert (!op_iter_done (&old_iter));
2261 SET_USE (use_p, t);
2262 t = op_iter_next_tree (&old_iter);
2264 gcc_assert (op_iter_done (&old_iter));
2266 op_iter_init_maydef (&old_iter, src, &u2, &d2);
2267 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
2269 gcc_assert (!op_iter_done (&old_iter));
2270 SET_USE (use_p, USE_FROM_PTR (u2));
2271 SET_DEF (def_p, DEF_FROM_PTR (d2));
2272 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2274 gcc_assert (op_iter_done (&old_iter));
2276 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2277 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2279 gcc_assert (!op_iter_done (&old_iter));
2280 SET_USE (use_p, USE_FROM_PTR (u2));
2281 SET_DEF (def_p, DEF_FROM_PTR (d2));
2282 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2284 gcc_assert (op_iter_done (&old_iter));
2289 /* Specifically for use in DOM's expression analysis. Given a store, we
2290 create an artificial stmt which looks like a load from the store, this can
2291 be used to eliminate redundant loads. OLD_OPS are the operands from the
2292 store stmt, and NEW_STMT is the new load which represents a load of the
2293 values stored. */
2295 void
2296 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2298 stmt_ann_t ann;
2299 tree op;
2300 ssa_op_iter iter;
2301 use_operand_p use_p;
2302 unsigned x;
2304 ann = get_stmt_ann (new_stmt);
2306 /* Process the stmt looking for operands. */
2307 start_ssa_stmt_operands ();
2308 parse_ssa_operands (new_stmt);
2310 for (x = 0; x < VEC_length (tree, build_vuses); x++)
2312 tree t = VEC_index (tree, build_vuses, x);
2313 if (TREE_CODE (t) != SSA_NAME)
2315 var_ann_t ann = var_ann (t);
2316 ann->in_vuse_list = 0;
2320 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2322 tree t = VEC_index (tree, build_v_may_defs, x);
2323 if (TREE_CODE (t) != SSA_NAME)
2325 var_ann_t ann = var_ann (t);
2326 ann->in_v_may_def_list = 0;
2330 /* Remove any virtual operands that were found. */
2331 VEC_truncate (tree, build_v_may_defs, 0);
2332 VEC_truncate (tree, build_v_must_defs, 0);
2333 VEC_truncate (tree, build_vuses, 0);
2335 /* For each VDEF on the original statement, we want to create a
2336 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2337 statement. */
2338 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2339 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2340 append_vuse (op);
2342 /* Now build the operands for this new stmt. */
2343 finalize_ssa_stmt_operands (new_stmt);
2345 /* All uses in this fake stmt must not be in the immediate use lists. */
2346 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2347 delink_imm_use (use_p);
2351 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2352 to test the validity of the swap operation. */
2354 void
2355 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2357 tree op0, op1;
2358 op0 = *exp0;
2359 op1 = *exp1;
2361 /* If the operand cache is active, attempt to preserve the relative
2362 positions of these two operands in their respective immediate use
2363 lists. */
2364 if (ssa_operands_active () && op0 != op1)
2366 use_optype_p use0, use1, ptr;
2367 use0 = use1 = NULL;
2369 /* Find the 2 operands in the cache, if they are there. */
2370 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2371 if (USE_OP_PTR (ptr)->use == exp0)
2373 use0 = ptr;
2374 break;
2377 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2378 if (USE_OP_PTR (ptr)->use == exp1)
2380 use1 = ptr;
2381 break;
2384 /* If both uses don't have operand entries, there isn't much we can do
2385 at this point. Presumably we don't need to worry about it. */
2386 if (use0 && use1)
2388 tree *tmp = USE_OP_PTR (use1)->use;
2389 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2390 USE_OP_PTR (use0)->use = tmp;
2394 /* Now swap the data. */
2395 *exp0 = op1;
2396 *exp1 = op0;
2400 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2401 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2402 a single variable whose address has been taken or any other valid
2403 GIMPLE memory reference (structure reference, array, etc). If the
2404 base address of REF is a decl that has sub-variables, also add all
2405 of its sub-variables. */
2407 void
2408 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2410 tree var;
2411 subvar_t svars;
2413 gcc_assert (addresses_taken);
2415 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2416 as the only thing we take the address of. If VAR is a structure,
2417 taking the address of a field means that the whole structure may
2418 be referenced using pointer arithmetic. See PR 21407 and the
2419 ensuing mailing list discussion. */
2420 var = get_base_address (ref);
2421 if (var && SSA_VAR_P (var))
2423 if (*addresses_taken == NULL)
2424 *addresses_taken = BITMAP_GGC_ALLOC ();
2426 if (var_can_have_subvars (var)
2427 && (svars = get_subvars_for_var (var)))
2429 subvar_t sv;
2430 for (sv = svars; sv; sv = sv->next)
2432 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2433 TREE_ADDRESSABLE (sv->var) = 1;
2436 else
2438 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2439 TREE_ADDRESSABLE (var) = 1;
2445 /* Scan the immediate_use list for VAR making sure its linked properly.
2446 Return TRUE if there is a problem and emit an error message to F. */
2448 bool
2449 verify_imm_links (FILE *f, tree var)
2451 use_operand_p ptr, prev, list;
2452 int count;
2454 gcc_assert (TREE_CODE (var) == SSA_NAME);
2456 list = &(SSA_NAME_IMM_USE_NODE (var));
2457 gcc_assert (list->use == NULL);
2459 if (list->prev == NULL)
2461 gcc_assert (list->next == NULL);
2462 return false;
2465 prev = list;
2466 count = 0;
2467 for (ptr = list->next; ptr != list; )
2469 if (prev != ptr->prev)
2470 goto error;
2472 if (ptr->use == NULL)
2473 goto error; /* 2 roots, or SAFE guard node. */
2474 else if (*(ptr->use) != var)
2475 goto error;
2477 prev = ptr;
2478 ptr = ptr->next;
2480 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2481 problem. */
2482 if (count++ > 50000000)
2483 goto error;
2486 /* Verify list in the other direction. */
2487 prev = list;
2488 for (ptr = list->prev; ptr != list; )
2490 if (prev != ptr->next)
2491 goto error;
2492 prev = ptr;
2493 ptr = ptr->prev;
2494 if (count-- < 0)
2495 goto error;
2498 if (count != 0)
2499 goto error;
2501 return false;
2503 error:
2504 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2506 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2507 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2509 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2510 (void *)ptr->use);
2511 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2512 fprintf(f, "\n");
2513 return true;
2517 /* Dump all the immediate uses to FILE. */
2519 void
2520 dump_immediate_uses_for (FILE *file, tree var)
2522 imm_use_iterator iter;
2523 use_operand_p use_p;
2525 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2527 print_generic_expr (file, var, TDF_SLIM);
2528 fprintf (file, " : -->");
2529 if (has_zero_uses (var))
2530 fprintf (file, " no uses.\n");
2531 else
2532 if (has_single_use (var))
2533 fprintf (file, " single use.\n");
2534 else
2535 fprintf (file, "%d uses.\n", num_imm_uses (var));
2537 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2539 if (use_p->stmt == NULL && use_p->use == NULL)
2540 fprintf (file, "***end of stmt iterator marker***\n");
2541 else
2542 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2543 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2544 else
2545 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2547 fprintf(file, "\n");
2551 /* Dump all the immediate uses to FILE. */
2553 void
2554 dump_immediate_uses (FILE *file)
2556 tree var;
2557 unsigned int x;
2559 fprintf (file, "Immediate_uses: \n\n");
2560 for (x = 1; x < num_ssa_names; x++)
2562 var = ssa_name(x);
2563 if (!var)
2564 continue;
2565 dump_immediate_uses_for (file, var);
2570 /* Dump def-use edges on stderr. */
2572 void
2573 debug_immediate_uses (void)
2575 dump_immediate_uses (stderr);
2579 /* Dump def-use edges on stderr. */
2581 void
2582 debug_immediate_uses_for (tree var)
2584 dump_immediate_uses_for (stderr, var);
2587 #include "gt-tree-ssa-operands.h"