Privatize SSA variables into gimple_df.
[official-gcc.git] / gcc / tree-ssa-operands.c
bloba594d4d5dab7caaf64e9575eb9ea9b67589ac880
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
79 /* Flags to describe operand properties in helpers. */
81 /* By default, operands are loaded. */
82 #define opf_none 0
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_is_def (1 << 0)
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96 VUSE for 'b'. */
97 #define opf_no_vops (1 << 2)
99 /* Operand is a "non-specific" kill for call-clobbers and such. This
100 is used to distinguish "reset the world" events from explicit
101 MODIFY_EXPRs. */
102 #define opf_non_specific (1 << 3)
104 /* Array for building all the def operands. */
105 static VEC(tree,heap) *build_defs;
107 /* Array for building all the use operands. */
108 static VEC(tree,heap) *build_uses;
110 /* Array for building all the V_MAY_DEF operands. */
111 static VEC(tree,heap) *build_v_may_defs;
113 /* Array for building all the VUSE operands. */
114 static VEC(tree,heap) *build_vuses;
116 /* Array for building all the V_MUST_DEF operands. */
117 static VEC(tree,heap) *build_v_must_defs;
119 /* These arrays are the cached operand vectors for call clobbered calls. */
120 static bool ops_active = false;
122 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
123 static unsigned operand_memory_index;
125 static void get_expr_operands (tree, tree *, int);
127 static def_optype_p free_defs = NULL;
128 static use_optype_p free_uses = NULL;
129 static vuse_optype_p free_vuses = NULL;
130 static maydef_optype_p free_maydefs = NULL;
131 static mustdef_optype_p free_mustdefs = NULL;
133 /* Allocates operand OP of given TYPE from the appropriate free list,
134 or of the new value if the list is empty. */
136 #define ALLOC_OPTYPE(OP, TYPE) \
137 do \
139 TYPE##_optype_p ret = free_##TYPE##s; \
140 if (ret) \
141 free_##TYPE##s = ret->next; \
142 else \
143 ret = ssa_operand_alloc (sizeof (*ret)); \
144 (OP) = ret; \
145 } while (0)
147 /* Return the DECL_UID of the base variable of T. */
149 static inline unsigned
150 get_name_decl (tree t)
152 if (TREE_CODE (t) != SSA_NAME)
153 return DECL_UID (t);
154 else
155 return DECL_UID (SSA_NAME_VAR (t));
159 /* Comparison function for qsort used in operand_build_sort_virtual. */
161 static int
162 operand_build_cmp (const void *p, const void *q)
164 tree e1 = *((const tree *)p);
165 tree e2 = *((const tree *)q);
166 unsigned int u1,u2;
168 u1 = get_name_decl (e1);
169 u2 = get_name_decl (e2);
171 /* We want to sort in ascending order. They can never be equal. */
172 #ifdef ENABLE_CHECKING
173 gcc_assert (u1 != u2);
174 #endif
175 return (u1 > u2 ? 1 : -1);
179 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
181 static inline void
182 operand_build_sort_virtual (VEC(tree,heap) *list)
184 int num = VEC_length (tree, list);
186 if (num < 2)
187 return;
189 if (num == 2)
191 if (get_name_decl (VEC_index (tree, list, 0))
192 > get_name_decl (VEC_index (tree, list, 1)))
194 /* Swap elements if in the wrong order. */
195 tree tmp = VEC_index (tree, list, 0);
196 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
197 VEC_replace (tree, list, 1, tmp);
199 return;
202 /* There are 3 or more elements, call qsort. */
203 qsort (VEC_address (tree, list),
204 VEC_length (tree, list),
205 sizeof (tree),
206 operand_build_cmp);
210 /* Return true if the SSA operands cache is active. */
212 bool
213 ssa_operands_active (void)
215 return ops_active;
219 /* Structure storing statistics on how many call clobbers we have, and
220 how many where avoided. */
222 static struct
224 /* Number of call-clobbered ops we attempt to add to calls in
225 add_call_clobber_ops. */
226 unsigned int clobbered_vars;
228 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
229 not_written information. */
230 unsigned int static_write_clobbers_avoided;
232 /* Number of reads (VUSEs) avoided by using not_read information. */
233 unsigned int static_read_clobbers_avoided;
235 /* Number of write-clobbers avoided because the variable can't escape to
236 this call. */
237 unsigned int unescapable_clobbers_avoided;
239 /* Number of read-only uses we attempt to add to calls in
240 add_call_read_ops. */
241 unsigned int readonly_clobbers;
243 /* Number of read-only uses we avoid using not_read information. */
244 unsigned int static_readonly_clobbers_avoided;
245 } clobber_stats;
248 /* Initialize the operand cache routines. */
250 void
251 init_ssa_operands (void)
253 build_defs = VEC_alloc (tree, heap, 5);
254 build_uses = VEC_alloc (tree, heap, 10);
255 build_vuses = VEC_alloc (tree, heap, 25);
256 build_v_may_defs = VEC_alloc (tree, heap, 25);
257 build_v_must_defs = VEC_alloc (tree, heap, 25);
259 gcc_assert (operand_memory == NULL);
260 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
261 ops_active = true;
262 memset (&clobber_stats, 0, sizeof (clobber_stats));
266 /* Dispose of anything required by the operand routines. */
268 void
269 fini_ssa_operands (void)
271 struct ssa_operand_memory_d *ptr;
272 VEC_free (tree, heap, build_defs);
273 VEC_free (tree, heap, build_uses);
274 VEC_free (tree, heap, build_v_must_defs);
275 VEC_free (tree, heap, build_v_may_defs);
276 VEC_free (tree, heap, build_vuses);
277 free_defs = NULL;
278 free_uses = NULL;
279 free_vuses = NULL;
280 free_maydefs = NULL;
281 free_mustdefs = NULL;
282 while ((ptr = operand_memory) != NULL)
284 operand_memory = operand_memory->next;
285 ggc_free (ptr);
288 ops_active = false;
290 if (dump_file && (dump_flags & TDF_STATS))
292 fprintf (dump_file, "Original clobbered vars:%d\n",
293 clobber_stats.clobbered_vars);
294 fprintf (dump_file, "Static write clobbers avoided:%d\n",
295 clobber_stats.static_write_clobbers_avoided);
296 fprintf (dump_file, "Static read clobbers avoided:%d\n",
297 clobber_stats.static_read_clobbers_avoided);
298 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
299 clobber_stats.unescapable_clobbers_avoided);
300 fprintf (dump_file, "Original read-only clobbers:%d\n",
301 clobber_stats.readonly_clobbers);
302 fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
303 clobber_stats.static_readonly_clobbers_avoided);
308 /* Return memory for operands of SIZE chunks. */
310 static inline void *
311 ssa_operand_alloc (unsigned size)
313 char *ptr;
314 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
316 struct ssa_operand_memory_d *ptr;
317 ptr = GGC_NEW (struct ssa_operand_memory_d);
318 ptr->next = operand_memory;
319 operand_memory = ptr;
320 operand_memory_index = 0;
322 ptr = &(operand_memory->mem[operand_memory_index]);
323 operand_memory_index += size;
324 return ptr;
329 /* This routine makes sure that PTR is in an immediate use list, and makes
330 sure the stmt pointer is set to the current stmt. */
332 static inline void
333 set_virtual_use_link (use_operand_p ptr, tree stmt)
335 /* fold_stmt may have changed the stmt pointers. */
336 if (ptr->stmt != stmt)
337 ptr->stmt = stmt;
339 /* If this use isn't in a list, add it to the correct list. */
340 if (!ptr->prev)
341 link_imm_use (ptr, *(ptr->use));
344 /* Appends ELT after TO, and moves the TO pointer to ELT. */
346 #define APPEND_OP_AFTER(ELT, TO) \
347 do \
349 (TO)->next = (ELT); \
350 (TO) = (ELT); \
351 } while (0)
353 /* Appends head of list FROM after TO, and move both pointers
354 to their successors. */
356 #define MOVE_HEAD_AFTER(FROM, TO) \
357 do \
359 APPEND_OP_AFTER (FROM, TO); \
360 (FROM) = (FROM)->next; \
361 } while (0)
363 /* Moves OP to appropriate freelist. OP is set to its successor. */
365 #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
366 do \
368 TYPE##_optype_p next = (OP)->next; \
369 (OP)->next = free_##TYPE##s; \
370 free_##TYPE##s = (OP); \
371 (OP) = next; \
372 } while (0)
374 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
375 of immediate uses. STMT is the current statement. */
377 #define INITIALIZE_USE(USE_PTR, VAL, STMT) \
378 do \
380 (USE_PTR)->use = (VAL); \
381 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
382 } while (0)
384 /* Adds OP to the list of defs after LAST, and moves
385 LAST to the new element. */
387 static inline void
388 add_def_op (tree *op, def_optype_p *last)
390 def_optype_p new;
392 ALLOC_OPTYPE (new, def);
393 DEF_OP_PTR (new) = op;
394 APPEND_OP_AFTER (new, *last);
397 /* Adds OP to the list of uses of statement STMT after LAST, and moves
398 LAST to the new element. */
400 static inline void
401 add_use_op (tree stmt, tree *op, use_optype_p *last)
403 use_optype_p new;
405 ALLOC_OPTYPE (new, use);
406 INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
407 APPEND_OP_AFTER (new, *last);
410 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
411 LAST to the new element. */
413 static inline void
414 add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
416 vuse_optype_p new;
418 ALLOC_OPTYPE (new, vuse);
419 VUSE_OP (new) = op;
420 INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
421 APPEND_OP_AFTER (new, *last);
424 /* Adds OP to the list of maydefs of statement STMT after LAST, and moves
425 LAST to the new element. */
427 static inline void
428 add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
430 maydef_optype_p new;
432 ALLOC_OPTYPE (new, maydef);
433 MAYDEF_RESULT (new) = op;
434 MAYDEF_OP (new) = op;
435 INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
436 APPEND_OP_AFTER (new, *last);
439 /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
440 LAST to the new element. */
442 static inline void
443 add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
445 mustdef_optype_p new;
447 ALLOC_OPTYPE (new, mustdef);
448 MUSTDEF_RESULT (new) = op;
449 MUSTDEF_KILL (new) = op;
450 INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
451 APPEND_OP_AFTER (new, *last);
454 /* Takes elements from build_defs and turns them into def operands of STMT.
455 TODO -- Given that def operands list is not necessarily sorted, merging
456 the operands this way does not make much sense.
457 -- Make build_defs VEC of tree *. */
459 static inline void
460 finalize_ssa_def_ops (tree stmt)
462 unsigned new_i;
463 struct def_optype_d new_list;
464 def_optype_p old_ops, last;
465 tree *old_base;
467 new_list.next = NULL;
468 last = &new_list;
470 old_ops = DEF_OPS (stmt);
472 new_i = 0;
473 while (old_ops && new_i < VEC_length (tree, build_defs))
475 tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
476 old_base = DEF_OP_PTR (old_ops);
478 if (old_base == new_base)
480 /* if variables are the same, reuse this node. */
481 MOVE_HEAD_AFTER (old_ops, last);
482 new_i++;
484 else if (old_base < new_base)
486 /* if old is less than new, old goes to the free list. */
487 MOVE_HEAD_TO_FREELIST (old_ops, def);
489 else
491 /* This is a new operand. */
492 add_def_op (new_base, &last);
493 new_i++;
497 /* If there is anything remaining in the build_defs list, simply emit it. */
498 for ( ; new_i < VEC_length (tree, build_defs); new_i++)
499 add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
501 last->next = NULL;
503 /* If there is anything in the old list, free it. */
504 if (old_ops)
506 old_ops->next = free_defs;
507 free_defs = old_ops;
510 /* Now set the stmt's operands. */
511 DEF_OPS (stmt) = new_list.next;
513 #ifdef ENABLE_CHECKING
515 def_optype_p ptr;
516 unsigned x = 0;
517 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
518 x++;
520 gcc_assert (x == VEC_length (tree, build_defs));
522 #endif
525 /* This routine will create stmt operands for STMT from the def build list. */
527 static void
528 finalize_ssa_defs (tree stmt)
530 unsigned int num = VEC_length (tree, build_defs);
532 /* There should only be a single real definition per assignment. */
533 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
535 /* If there is an old list, often the new list is identical, or close, so
536 find the elements at the beginning that are the same as the vector. */
537 finalize_ssa_def_ops (stmt);
538 VEC_truncate (tree, build_defs, 0);
541 /* Takes elements from build_uses and turns them into use operands of STMT.
542 TODO -- Make build_uses VEC of tree *. */
544 static inline void
545 finalize_ssa_use_ops (tree stmt)
547 unsigned new_i;
548 struct use_optype_d new_list;
549 use_optype_p old_ops, ptr, last;
551 new_list.next = NULL;
552 last = &new_list;
554 old_ops = USE_OPS (stmt);
556 /* If there is anything in the old list, free it. */
557 if (old_ops)
559 for (ptr = old_ops; ptr; ptr = ptr->next)
560 delink_imm_use (USE_OP_PTR (ptr));
561 old_ops->next = free_uses;
562 free_uses = old_ops;
565 /* Now create nodes for all the new nodes. */
566 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
567 add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
569 last->next = NULL;
571 /* Now set the stmt's operands. */
572 USE_OPS (stmt) = new_list.next;
574 #ifdef ENABLE_CHECKING
576 unsigned x = 0;
577 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
578 x++;
580 gcc_assert (x == VEC_length (tree, build_uses));
582 #endif
585 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
587 static void
588 finalize_ssa_uses (tree stmt)
590 #ifdef ENABLE_CHECKING
592 unsigned x;
593 unsigned num = VEC_length (tree, build_uses);
595 /* If the pointer to the operand is the statement itself, something is
596 wrong. It means that we are pointing to a local variable (the
597 initial call to update_stmt_operands does not pass a pointer to a
598 statement). */
599 for (x = 0; x < num; x++)
600 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
602 #endif
603 finalize_ssa_use_ops (stmt);
604 VEC_truncate (tree, build_uses, 0);
608 /* Takes elements from build_v_may_defs and turns them into maydef operands of
609 STMT. */
611 static inline void
612 finalize_ssa_v_may_def_ops (tree stmt)
614 unsigned new_i;
615 struct maydef_optype_d new_list;
616 maydef_optype_p old_ops, ptr, last;
617 tree act;
618 unsigned old_base, new_base;
620 new_list.next = NULL;
621 last = &new_list;
623 old_ops = MAYDEF_OPS (stmt);
625 new_i = 0;
626 while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
628 act = VEC_index (tree, build_v_may_defs, new_i);
629 new_base = get_name_decl (act);
630 old_base = get_name_decl (MAYDEF_OP (old_ops));
632 if (old_base == new_base)
634 /* if variables are the same, reuse this node. */
635 MOVE_HEAD_AFTER (old_ops, last);
636 set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
637 new_i++;
639 else if (old_base < new_base)
641 /* if old is less than new, old goes to the free list. */
642 delink_imm_use (MAYDEF_OP_PTR (old_ops));
643 MOVE_HEAD_TO_FREELIST (old_ops, maydef);
645 else
647 /* This is a new operand. */
648 add_maydef_op (stmt, act, &last);
649 new_i++;
653 /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
654 for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
655 add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
657 last->next = NULL;
659 /* If there is anything in the old list, free it. */
660 if (old_ops)
662 for (ptr = old_ops; ptr; ptr = ptr->next)
663 delink_imm_use (MAYDEF_OP_PTR (ptr));
664 old_ops->next = free_maydefs;
665 free_maydefs = old_ops;
668 /* Now set the stmt's operands. */
669 MAYDEF_OPS (stmt) = new_list.next;
671 #ifdef ENABLE_CHECKING
673 unsigned x = 0;
674 for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
675 x++;
677 gcc_assert (x == VEC_length (tree, build_v_may_defs));
679 #endif
682 static void
683 finalize_ssa_v_may_defs (tree stmt)
685 finalize_ssa_v_may_def_ops (stmt);
689 /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
691 static inline void
692 cleanup_v_may_defs (void)
694 unsigned x, num;
695 num = VEC_length (tree, build_v_may_defs);
697 for (x = 0; x < num; x++)
699 tree t = VEC_index (tree, build_v_may_defs, x);
700 if (TREE_CODE (t) != SSA_NAME)
702 var_ann_t ann = var_ann (t);
703 ann->in_v_may_def_list = 0;
706 VEC_truncate (tree, build_v_may_defs, 0);
710 /* Takes elements from build_vuses and turns them into vuse operands of
711 STMT. */
713 static inline void
714 finalize_ssa_vuse_ops (tree stmt)
716 unsigned new_i;
717 struct vuse_optype_d new_list;
718 vuse_optype_p old_ops, ptr, last;
719 tree act;
720 unsigned old_base, new_base;
722 new_list.next = NULL;
723 last = &new_list;
725 old_ops = VUSE_OPS (stmt);
727 new_i = 0;
728 while (old_ops && new_i < VEC_length (tree, build_vuses))
730 act = VEC_index (tree, build_vuses, new_i);
731 new_base = get_name_decl (act);
732 old_base = get_name_decl (VUSE_OP (old_ops));
734 if (old_base == new_base)
736 /* if variables are the same, reuse this node. */
737 MOVE_HEAD_AFTER (old_ops, last);
738 set_virtual_use_link (VUSE_OP_PTR (last), stmt);
739 new_i++;
741 else if (old_base < new_base)
743 /* if old is less than new, old goes to the free list. */
744 delink_imm_use (USE_OP_PTR (old_ops));
745 MOVE_HEAD_TO_FREELIST (old_ops, vuse);
747 else
749 /* This is a new operand. */
750 add_vuse_op (stmt, act, &last);
751 new_i++;
755 /* If there is anything remaining in the build_vuses list, simply emit it. */
756 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
757 add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
759 last->next = NULL;
761 /* If there is anything in the old list, free it. */
762 if (old_ops)
764 for (ptr = old_ops; ptr; ptr = ptr->next)
765 delink_imm_use (VUSE_OP_PTR (ptr));
766 old_ops->next = free_vuses;
767 free_vuses = old_ops;
770 /* Now set the stmt's operands. */
771 VUSE_OPS (stmt) = new_list.next;
773 #ifdef ENABLE_CHECKING
775 unsigned x = 0;
776 for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
777 x++;
779 gcc_assert (x == VEC_length (tree, build_vuses));
781 #endif
784 /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
786 static void
787 finalize_ssa_vuses (tree stmt)
789 unsigned num, num_v_may_defs;
790 unsigned vuse_index;
792 /* Remove superfluous VUSE operands. If the statement already has a
793 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
794 not needed because V_MAY_DEFs imply a VUSE of the variable. For
795 instance, suppose that variable 'a' is aliased:
797 # VUSE <a_2>
798 # a_3 = V_MAY_DEF <a_2>
799 a = a + 1;
801 The VUSE <a_2> is superfluous because it is implied by the
802 V_MAY_DEF operation. */
803 num = VEC_length (tree, build_vuses);
804 num_v_may_defs = VEC_length (tree, build_v_may_defs);
806 if (num > 0 && num_v_may_defs > 0)
808 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
810 tree vuse;
811 vuse = VEC_index (tree, build_vuses, vuse_index);
812 if (TREE_CODE (vuse) != SSA_NAME)
814 var_ann_t ann = var_ann (vuse);
815 ann->in_vuse_list = 0;
816 if (ann->in_v_may_def_list)
818 VEC_ordered_remove (tree, build_vuses, vuse_index);
819 continue;
822 vuse_index++;
825 else
827 /* Clear out the in_list bits. */
828 for (vuse_index = 0;
829 vuse_index < VEC_length (tree, build_vuses);
830 vuse_index++)
832 tree t = VEC_index (tree, build_vuses, vuse_index);
833 if (TREE_CODE (t) != SSA_NAME)
835 var_ann_t ann = var_ann (t);
836 ann->in_vuse_list = 0;
841 finalize_ssa_vuse_ops (stmt);
843 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
844 cleanup_v_may_defs ();
846 /* Free the VUSEs build vector. */
847 VEC_truncate (tree, build_vuses, 0);
851 /* Takes elements from build_v_must_defs and turns them into mustdef operands of
852 STMT. */
854 static inline void
855 finalize_ssa_v_must_def_ops (tree stmt)
857 unsigned new_i;
858 struct mustdef_optype_d new_list;
859 mustdef_optype_p old_ops, ptr, last;
860 tree act;
861 unsigned old_base, new_base;
863 new_list.next = NULL;
864 last = &new_list;
866 old_ops = MUSTDEF_OPS (stmt);
868 new_i = 0;
869 while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
871 act = VEC_index (tree, build_v_must_defs, new_i);
872 new_base = get_name_decl (act);
873 old_base = get_name_decl (MUSTDEF_KILL (old_ops));
875 if (old_base == new_base)
877 /* If variables are the same, reuse this node. */
878 MOVE_HEAD_AFTER (old_ops, last);
879 set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
880 new_i++;
882 else if (old_base < new_base)
884 /* If old is less than new, old goes to the free list. */
885 delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
886 MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
888 else
890 /* This is a new operand. */
891 add_mustdef_op (stmt, act, &last);
892 new_i++;
896 /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
897 for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
898 add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
900 last->next = NULL;
902 /* If there is anything in the old list, free it. */
903 if (old_ops)
905 for (ptr = old_ops; ptr; ptr = ptr->next)
906 delink_imm_use (MUSTDEF_KILL_PTR (ptr));
907 old_ops->next = free_mustdefs;
908 free_mustdefs = old_ops;
911 /* Now set the stmt's operands. */
912 MUSTDEF_OPS (stmt) = new_list.next;
914 #ifdef ENABLE_CHECKING
916 unsigned x = 0;
917 for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
918 x++;
920 gcc_assert (x == VEC_length (tree, build_v_must_defs));
922 #endif
925 static void
926 finalize_ssa_v_must_defs (tree stmt)
928 /* In the presence of subvars, there may be more than one V_MUST_DEF
929 per statement (one for each subvar). It is a bit expensive to
930 verify that all must-defs in a statement belong to subvars if
931 there is more than one must-def, so we don't do it. Suffice to
932 say, if you reach here without having subvars, and have num >1,
933 you have hit a bug. */
934 finalize_ssa_v_must_def_ops (stmt);
935 VEC_truncate (tree, build_v_must_defs, 0);
939 /* Finalize all the build vectors, fill the new ones into INFO. */
941 static inline void
942 finalize_ssa_stmt_operands (tree stmt)
944 finalize_ssa_defs (stmt);
945 finalize_ssa_uses (stmt);
946 finalize_ssa_v_must_defs (stmt);
947 finalize_ssa_v_may_defs (stmt);
948 finalize_ssa_vuses (stmt);
952 /* Start the process of building up operands vectors in INFO. */
954 static inline void
955 start_ssa_stmt_operands (void)
957 gcc_assert (VEC_length (tree, build_defs) == 0);
958 gcc_assert (VEC_length (tree, build_uses) == 0);
959 gcc_assert (VEC_length (tree, build_vuses) == 0);
960 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
961 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
965 /* Add DEF_P to the list of pointers to operands. */
967 static inline void
968 append_def (tree *def_p)
970 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
974 /* Add USE_P to the list of pointers to operands. */
976 static inline void
977 append_use (tree *use_p)
979 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
983 /* Add a new virtual may def for variable VAR to the build array. */
985 static inline void
986 append_v_may_def (tree var)
988 if (TREE_CODE (var) != SSA_NAME)
990 var_ann_t ann = get_var_ann (var);
992 /* Don't allow duplicate entries. */
993 if (ann->in_v_may_def_list)
994 return;
995 ann->in_v_may_def_list = 1;
998 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
1002 /* Add VAR to the list of virtual uses. */
1004 static inline void
1005 append_vuse (tree var)
1007 /* Don't allow duplicate entries. */
1008 if (TREE_CODE (var) != SSA_NAME)
1010 var_ann_t ann = get_var_ann (var);
1012 if (ann->in_vuse_list || ann->in_v_may_def_list)
1013 return;
1014 ann->in_vuse_list = 1;
1017 VEC_safe_push (tree, heap, build_vuses, (tree)var);
1021 /* Add VAR to the list of virtual must definitions for INFO. */
1023 static inline void
1024 append_v_must_def (tree var)
1026 unsigned i;
1028 /* Don't allow duplicate entries. */
1029 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1030 if (var == VEC_index (tree, build_v_must_defs, i))
1031 return;
1033 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
1037 /* REF is a tree that contains the entire pointer dereference
1038 expression, if available, or NULL otherwise. ALIAS is the variable
1039 we are asking if REF can access. OFFSET and SIZE come from the
1040 memory access expression that generated this virtual operand. */
1042 static bool
1043 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1044 HOST_WIDE_INT size)
1046 bool offsetgtz = offset > 0;
1047 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1048 tree base = ref ? get_base_address (ref) : NULL;
1050 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1051 using a call-clobbered memory tag. By definition, call-clobbered
1052 memory tags can always touch .GLOBAL_VAR. */
1053 if (alias == gimple_global_var (cfun))
1054 return true;
1056 /* We cannot prune nonlocal aliases because they are not type
1057 specific. */
1058 if (alias == gimple_nonlocal_all (cfun))
1059 return true;
1061 /* If ALIAS is an SFT, it can't be touched if the offset
1062 and size of the access is not overlapping with the SFT offset and
1063 size. This is only true if we are accessing through a pointer
1064 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1065 be accessing through a pointer to some substruct of the
1066 structure, and if we try to prune there, we will have the wrong
1067 offset, and get the wrong answer.
1068 i.e., we can't prune without more work if we have something like
1070 struct gcc_target
1072 struct asm_out
1074 const char *byte_op;
1075 struct asm_int_op
1077 const char *hi;
1078 } aligned_op;
1079 } asm_out;
1080 } targetm;
1082 foo = &targetm.asm_out.aligned_op;
1083 return foo->hi;
1085 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1086 terms of SFT_PARENT_VAR, that is where it is.
1087 However, the access through the foo pointer will be at offset 0. */
1088 if (size != -1
1089 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1090 && base
1091 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1092 && !overlap_subvar (offset, size, alias, NULL))
1094 #ifdef ACCESS_DEBUGGING
1095 fprintf (stderr, "Access to ");
1096 print_generic_expr (stderr, ref, 0);
1097 fprintf (stderr, " may not touch ");
1098 print_generic_expr (stderr, alias, 0);
1099 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1100 #endif
1101 return false;
1104 /* Without strict aliasing, it is impossible for a component access
1105 through a pointer to touch a random variable, unless that
1106 variable *is* a structure or a pointer.
1108 That is, given p->c, and some random global variable b,
1109 there is no legal way that p->c could be an access to b.
1111 Without strict aliasing on, we consider it legal to do something
1112 like:
1114 struct foos { int l; };
1115 int foo;
1116 static struct foos *getfoo(void);
1117 int main (void)
1119 struct foos *f = getfoo();
1120 f->l = 1;
1121 foo = 2;
1122 if (f->l == 1)
1123 abort();
1124 exit(0);
1126 static struct foos *getfoo(void)
1127 { return (struct foos *)&foo; }
1129 (taken from 20000623-1.c)
1131 The docs also say/imply that access through union pointers
1132 is legal (but *not* if you take the address of the union member,
1133 i.e. the inverse), such that you can do
1135 typedef union {
1136 int d;
1137 } U;
1139 int rv;
1140 void breakme()
1142 U *rv0;
1143 U *pretmp = (U*)&rv;
1144 rv0 = pretmp;
1145 rv0->d = 42;
1147 To implement this, we just punt on accesses through union
1148 pointers entirely.
1150 else if (ref
1151 && flag_strict_aliasing
1152 && TREE_CODE (ref) != INDIRECT_REF
1153 && !MTAG_P (alias)
1154 && (TREE_CODE (base) != INDIRECT_REF
1155 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1156 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1157 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1158 #if 0
1159 /* FIXME: PR tree-optimization/29680. */
1160 && !var_ann (alias)->is_heapvar
1161 #else
1162 && !POINTER_TYPE_P (TREE_TYPE (alias))
1163 #endif
1164 /* When the struct has may_alias attached to it, we need not to
1165 return true. */
1166 && get_alias_set (base))
1168 #ifdef ACCESS_DEBUGGING
1169 fprintf (stderr, "Access to ");
1170 print_generic_expr (stderr, ref, 0);
1171 fprintf (stderr, " may not touch ");
1172 print_generic_expr (stderr, alias, 0);
1173 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1174 #endif
1175 return false;
1178 /* If the offset of the access is greater than the size of one of
1179 the possible aliases, it can't be touching that alias, because it
1180 would be past the end of the structure. */
1181 else if (ref
1182 && flag_strict_aliasing
1183 && TREE_CODE (ref) != INDIRECT_REF
1184 && !MTAG_P (alias)
1185 && !POINTER_TYPE_P (TREE_TYPE (alias))
1186 && offsetgtz
1187 && DECL_SIZE (alias)
1188 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1189 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1191 #ifdef ACCESS_DEBUGGING
1192 fprintf (stderr, "Access to ");
1193 print_generic_expr (stderr, ref, 0);
1194 fprintf (stderr, " may not touch ");
1195 print_generic_expr (stderr, alias, 0);
1196 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1197 #endif
1198 return false;
1201 return true;
1205 /* Add VAR to the virtual operands array. FLAGS is as in
1206 get_expr_operands. FULL_REF is a tree that contains the entire
1207 pointer dereference expression, if available, or NULL otherwise.
1208 OFFSET and SIZE come from the memory access expression that
1209 generated this virtual operand. FOR_CLOBBER is true is this is
1210 adding a virtual operand for a call clobber. */
1212 static void
1213 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1214 tree full_ref, HOST_WIDE_INT offset,
1215 HOST_WIDE_INT size, bool for_clobber)
1217 VEC(tree,gc) *aliases;
1218 tree sym;
1219 var_ann_t v_ann;
1221 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1222 v_ann = var_ann (sym);
1224 /* Mark statements with volatile operands. Optimizers should back
1225 off from statements having volatile operands. */
1226 if (TREE_THIS_VOLATILE (sym) && s_ann)
1227 s_ann->has_volatile_ops = true;
1229 /* If the variable cannot be modified and this is a V_MAY_DEF change
1230 it into a VUSE. This happens when read-only variables are marked
1231 call-clobbered and/or aliased to writable variables. So we only
1232 check that this only happens on non-specific stores.
1234 Note that if this is a specific store, i.e. associated with a
1235 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1236 into validation problems.
1238 This can happen when programs cast away const, leaving us with a
1239 store to read-only memory. If the statement is actually executed
1240 at runtime, then the program is ill formed. If the statement is
1241 not executed then all is well. At the very least, we cannot ICE. */
1242 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1243 flags &= ~(opf_is_def | opf_kill_def);
1245 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1246 virtual operands, unless the caller has specifically requested
1247 not to add virtual operands (used when adding operands inside an
1248 ADDR_EXPR expression). */
1249 if (flags & opf_no_vops)
1250 return;
1252 aliases = v_ann->may_aliases;
1253 if (aliases == NULL)
1255 /* The variable is not aliased or it is an alias tag. */
1256 if (flags & opf_is_def)
1258 if (flags & opf_kill_def)
1260 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1261 variable definitions. */
1262 gcc_assert (!MTAG_P (var)
1263 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1264 append_v_must_def (var);
1266 else
1268 /* Add a V_MAY_DEF for call-clobbered variables and
1269 memory tags. */
1270 append_v_may_def (var);
1273 else
1274 append_vuse (var);
1276 else
1278 unsigned i;
1279 tree al;
1281 /* The variable is aliased. Add its aliases to the virtual
1282 operands. */
1283 gcc_assert (VEC_length (tree, aliases) != 0);
1285 if (flags & opf_is_def)
1288 bool none_added = true;
1290 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1292 if (!access_can_touch_variable (full_ref, al, offset, size))
1293 continue;
1295 none_added = false;
1296 append_v_may_def (al);
1299 /* If the variable is also an alias tag, add a virtual
1300 operand for it, otherwise we will miss representing
1301 references to the members of the variable's alias set.
1302 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1304 It is also necessary to add bare defs on clobbers for
1305 SMT's, so that bare SMT uses caused by pruning all the
1306 aliases will link up properly with calls. In order to
1307 keep the number of these bare defs we add down to the
1308 minimum necessary, we keep track of which SMT's were used
1309 alone in statement vdefs or VUSEs. */
1310 if (v_ann->is_aliased
1311 || none_added
1312 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1313 && for_clobber
1314 && SMT_USED_ALONE (var)))
1316 /* Every bare SMT def we add should have SMT_USED_ALONE
1317 set on it, or else we will get the wrong answer on
1318 clobbers. */
1319 if (none_added
1320 && !updating_used_alone && gimple_aliases_computed_p (cfun)
1321 && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1322 gcc_assert (SMT_USED_ALONE (var));
1324 append_v_may_def (var);
1327 else
1329 bool none_added = true;
1330 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1332 if (!access_can_touch_variable (full_ref, al, offset, size))
1333 continue;
1334 none_added = false;
1335 append_vuse (al);
1338 /* Similarly, append a virtual uses for VAR itself, when
1339 it is an alias tag. */
1340 if (v_ann->is_aliased || none_added)
1341 append_vuse (var);
1347 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1348 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1349 the statement's real operands, otherwise it is added to virtual
1350 operands. */
1352 static void
1353 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1355 bool is_real_op;
1356 tree var, sym;
1357 var_ann_t v_ann;
1359 var = *var_p;
1360 gcc_assert (SSA_VAR_P (var));
1362 is_real_op = is_gimple_reg (var);
1364 /* If this is a real operand, the operand is either an SSA name or a
1365 decl. Virtual operands may only be decls. */
1366 gcc_assert (is_real_op || DECL_P (var));
1368 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1369 v_ann = var_ann (sym);
1371 /* Mark statements with volatile operands. Optimizers should back
1372 off from statements having volatile operands. */
1373 if (TREE_THIS_VOLATILE (sym) && s_ann)
1374 s_ann->has_volatile_ops = true;
1376 if (is_real_op)
1378 /* The variable is a GIMPLE register. Add it to real operands. */
1379 if (flags & opf_is_def)
1380 append_def (var_p);
1381 else
1382 append_use (var_p);
1384 else
1385 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1389 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1390 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1392 STMT is the statement being processed, EXPR is the INDIRECT_REF
1393 that got us here.
1395 FLAGS is as in get_expr_operands.
1397 FULL_REF contains the full pointer dereference expression, if we
1398 have it, or NULL otherwise.
1400 OFFSET and SIZE are the location of the access inside the
1401 dereferenced pointer, if known.
1403 RECURSE_ON_BASE should be set to true if we want to continue
1404 calling get_expr_operands on the base pointer, and false if
1405 something else will do it for us. */
1407 static void
1408 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1409 tree full_ref,
1410 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1411 bool recurse_on_base)
1413 tree *pptr = &TREE_OPERAND (expr, 0);
1414 tree ptr = *pptr;
1415 stmt_ann_t s_ann = stmt_ann (stmt);
1417 /* Stores into INDIRECT_REF operands are never killing definitions. */
1418 flags &= ~opf_kill_def;
1420 if (SSA_VAR_P (ptr))
1422 struct ptr_info_def *pi = NULL;
1424 /* If PTR has flow-sensitive points-to information, use it. */
1425 if (TREE_CODE (ptr) == SSA_NAME
1426 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1427 && pi->name_mem_tag)
1429 /* PTR has its own memory tag. Use it. */
1430 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1431 full_ref, offset, size, false);
1433 else
1435 /* If PTR is not an SSA_NAME or it doesn't have a name
1436 tag, use its symbol memory tag. */
1437 var_ann_t v_ann;
1439 /* If we are emitting debugging dumps, display a warning if
1440 PTR is an SSA_NAME with no flow-sensitive alias
1441 information. That means that we may need to compute
1442 aliasing again. */
1443 if (dump_file
1444 && TREE_CODE (ptr) == SSA_NAME
1445 && pi == NULL)
1447 fprintf (dump_file,
1448 "NOTE: no flow-sensitive alias info for ");
1449 print_generic_expr (dump_file, ptr, dump_flags);
1450 fprintf (dump_file, " in ");
1451 print_generic_stmt (dump_file, stmt, dump_flags);
1454 if (TREE_CODE (ptr) == SSA_NAME)
1455 ptr = SSA_NAME_VAR (ptr);
1456 v_ann = var_ann (ptr);
1458 if (v_ann->symbol_mem_tag)
1459 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1460 full_ref, offset, size, false);
1463 else if (TREE_CODE (ptr) == INTEGER_CST)
1465 /* If a constant is used as a pointer, we can't generate a real
1466 operand for it but we mark the statement volatile to prevent
1467 optimizations from messing things up. */
1468 if (s_ann)
1469 s_ann->has_volatile_ops = true;
1470 return;
1472 else
1474 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1475 gcc_unreachable ();
1478 /* If requested, add a USE operand for the base pointer. */
1479 if (recurse_on_base)
1480 get_expr_operands (stmt, pptr, opf_none);
1484 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1486 static void
1487 get_tmr_operands (tree stmt, tree expr, int flags)
1489 tree tag = TMR_TAG (expr), ref;
1490 HOST_WIDE_INT offset, size, maxsize;
1491 subvar_t svars, sv;
1492 stmt_ann_t s_ann = stmt_ann (stmt);
1494 /* First record the real operands. */
1495 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1496 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1498 /* MEM_REFs should never be killing. */
1499 flags &= ~opf_kill_def;
1501 if (TMR_SYMBOL (expr))
1503 stmt_ann_t ann = stmt_ann (stmt);
1504 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1507 if (!tag)
1509 /* Something weird, so ensure that we will be careful. */
1510 stmt_ann (stmt)->has_volatile_ops = true;
1511 return;
1514 if (DECL_P (tag))
1516 get_expr_operands (stmt, &tag, flags);
1517 return;
1520 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1521 gcc_assert (ref != NULL_TREE);
1522 svars = get_subvars_for_var (ref);
1523 for (sv = svars; sv; sv = sv->next)
1525 bool exact;
1526 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1528 int subvar_flags = flags;
1529 if (!exact || size != maxsize)
1530 subvar_flags &= ~opf_kill_def;
1531 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1537 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1538 clobbered variables in the function. */
1540 static void
1541 add_call_clobber_ops (tree stmt, tree callee)
1543 unsigned u;
1544 bitmap_iterator bi;
1545 stmt_ann_t s_ann = stmt_ann (stmt);
1546 bitmap not_read_b, not_written_b;
1548 /* Functions that are not const, pure or never return may clobber
1549 call-clobbered variables. */
1550 if (s_ann)
1551 s_ann->makes_clobbering_call = true;
1553 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1554 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1555 if (gimple_global_var (cfun))
1557 tree var = gimple_global_var (cfun);
1558 add_stmt_operand (&var, s_ann, opf_is_def);
1559 return;
1562 /* Get info for local and module level statics. There is a bit
1563 set for each static if the call being processed does not read
1564 or write that variable. */
1565 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1566 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1567 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1568 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1570 tree var = referenced_var_lookup (u);
1571 unsigned int escape_mask = var_ann (var)->escape_mask;
1572 tree real_var = var;
1573 bool not_read;
1574 bool not_written;
1576 /* Not read and not written are computed on regular vars, not
1577 subvars, so look at the parent var if this is an SFT. */
1578 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1579 real_var = SFT_PARENT_VAR (var);
1581 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1582 DECL_UID (real_var)) : false;
1583 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1584 DECL_UID (real_var)) : false;
1585 gcc_assert (!unmodifiable_var_p (var));
1587 clobber_stats.clobbered_vars++;
1589 /* See if this variable is really clobbered by this function. */
1591 /* Trivial case: Things escaping only to pure/const are not
1592 clobbered by non-pure-const, and only read by pure/const. */
1593 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1595 tree call = get_call_expr_in (stmt);
1596 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1598 add_stmt_operand (&var, s_ann, opf_none);
1599 clobber_stats.unescapable_clobbers_avoided++;
1600 continue;
1602 else
1604 clobber_stats.unescapable_clobbers_avoided++;
1605 continue;
1609 if (not_written)
1611 clobber_stats.static_write_clobbers_avoided++;
1612 if (!not_read)
1613 add_stmt_operand (&var, s_ann, opf_none);
1614 else
1615 clobber_stats.static_read_clobbers_avoided++;
1617 else
1618 add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1623 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1624 function. */
1626 static void
1627 add_call_read_ops (tree stmt, tree callee)
1629 unsigned u;
1630 bitmap_iterator bi;
1631 stmt_ann_t s_ann = stmt_ann (stmt);
1632 bitmap not_read_b;
1634 /* if the function is not pure, it may reference memory. Add
1635 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1636 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1637 if (gimple_global_var (cfun))
1639 tree var = gimple_global_var (cfun);
1640 add_stmt_operand (&var, s_ann, opf_none);
1641 return;
1644 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1646 /* Add a VUSE for each call-clobbered variable. */
1647 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1649 tree var = referenced_var (u);
1650 tree real_var = var;
1651 bool not_read;
1653 clobber_stats.readonly_clobbers++;
1655 /* Not read and not written are computed on regular vars, not
1656 subvars, so look at the parent var if this is an SFT. */
1658 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1659 real_var = SFT_PARENT_VAR (var);
1661 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1662 : false;
1664 if (not_read)
1666 clobber_stats.static_readonly_clobbers_avoided++;
1667 continue;
1670 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1675 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1677 static void
1678 get_call_expr_operands (tree stmt, tree expr)
1680 tree op;
1681 int call_flags = call_expr_flags (expr);
1683 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1684 operands for all the symbols that have been found to be
1685 call-clobbered.
1687 Note that if aliases have not been computed, the global effects
1688 of calls will not be included in the SSA web. This is fine
1689 because no optimizer should run before aliases have been
1690 computed. By not bothering with virtual operands for CALL_EXPRs
1691 we avoid adding superfluous virtual operands, which can be a
1692 significant compile time sink (See PR 15855). */
1693 if (gimple_aliases_computed_p (cfun)
1694 && !bitmap_empty_p (gimple_call_clobbered_vars (cfun))
1695 && !(call_flags & ECF_NOVOPS))
1697 /* A 'pure' or a 'const' function never call-clobbers anything.
1698 A 'noreturn' function might, but since we don't return anyway
1699 there is no point in recording that. */
1700 if (TREE_SIDE_EFFECTS (expr)
1701 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1702 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1703 else if (!(call_flags & ECF_CONST))
1704 add_call_read_ops (stmt, get_callee_fndecl (expr));
1707 /* Find uses in the called function. */
1708 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1710 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1711 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1713 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1717 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1719 static void
1720 get_asm_expr_operands (tree stmt)
1722 stmt_ann_t s_ann = stmt_ann (stmt);
1723 int noutputs = list_length (ASM_OUTPUTS (stmt));
1724 const char **oconstraints
1725 = (const char **) alloca ((noutputs) * sizeof (const char *));
1726 int i;
1727 tree link;
1728 const char *constraint;
1729 bool allows_mem, allows_reg, is_inout;
1731 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1733 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1734 oconstraints[i] = constraint;
1735 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1736 &allows_reg, &is_inout);
1738 /* This should have been split in gimplify_asm_expr. */
1739 gcc_assert (!allows_reg || !is_inout);
1741 /* Memory operands are addressable. Note that STMT needs the
1742 address of this operand. */
1743 if (!allows_reg && allows_mem)
1745 tree t = get_base_address (TREE_VALUE (link));
1746 if (t && DECL_P (t) && s_ann)
1747 add_to_addressable_set (t, &s_ann->addresses_taken);
1750 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1753 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1755 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1756 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1757 oconstraints, &allows_mem, &allows_reg);
1759 /* Memory operands are addressable. Note that STMT needs the
1760 address of this operand. */
1761 if (!allows_reg && allows_mem)
1763 tree t = get_base_address (TREE_VALUE (link));
1764 if (t && DECL_P (t) && s_ann)
1765 add_to_addressable_set (t, &s_ann->addresses_taken);
1768 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1772 /* Clobber memory for asm ("" : : : "memory"); */
1773 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1774 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1776 unsigned i;
1777 bitmap_iterator bi;
1779 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1780 decided to group them). */
1781 if (gimple_global_var (cfun))
1783 tree var = gimple_global_var (cfun);
1784 add_stmt_operand (&var, s_ann, opf_is_def);
1786 else
1787 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1789 tree var = referenced_var (i);
1790 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1793 /* Now clobber all addressables. */
1794 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1796 tree var = referenced_var (i);
1798 /* Subvars are explicitly represented in this list, so
1799 we don't need the original to be added to the clobber
1800 ops, but the original *will* be in this list because
1801 we keep the addressability of the original
1802 variable up-to-date so we don't screw up the rest of
1803 the backend. */
1804 if (var_can_have_subvars (var)
1805 && get_subvars_for_var (var) != NULL)
1806 continue;
1808 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1811 break;
1816 /* Scan operands for the assignment expression EXPR in statement STMT. */
1818 static void
1819 get_modify_expr_operands (tree stmt, tree expr)
1821 /* First get operands from the RHS. */
1822 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1824 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1825 registers. If the LHS is a store to memory, we will either need
1826 a preserving definition (V_MAY_DEF) or a killing definition
1827 (V_MUST_DEF).
1829 Preserving definitions are those that modify a part of an
1830 aggregate object for which no subvars have been computed (or the
1831 reference does not correspond exactly to one of them). Stores
1832 through a pointer are also represented with V_MAY_DEF operators.
1834 The determination of whether to use a preserving or a killing
1835 definition is done while scanning the LHS of the assignment. By
1836 default, assume that we will emit a V_MUST_DEF. */
1837 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1841 /* Recursively scan the expression pointed to by EXPR_P in statement
1842 STMT. FLAGS is one of the OPF_* constants modifying how to
1843 interpret the operands found. */
1845 static void
1846 get_expr_operands (tree stmt, tree *expr_p, int flags)
1848 enum tree_code code;
1849 enum tree_code_class class;
1850 tree expr = *expr_p;
1851 stmt_ann_t s_ann = stmt_ann (stmt);
1853 if (expr == NULL)
1854 return;
1856 code = TREE_CODE (expr);
1857 class = TREE_CODE_CLASS (code);
1859 switch (code)
1861 case ADDR_EXPR:
1862 /* Taking the address of a variable does not represent a
1863 reference to it, but the fact that the statement takes its
1864 address will be of interest to some passes (e.g. alias
1865 resolution). */
1866 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1868 /* If the address is invariant, there may be no interesting
1869 variable references inside. */
1870 if (is_gimple_min_invariant (expr))
1871 return;
1873 /* Otherwise, there may be variables referenced inside but there
1874 should be no VUSEs created, since the referenced objects are
1875 not really accessed. The only operands that we should find
1876 here are ARRAY_REF indices which will always be real operands
1877 (GIMPLE does not allow non-registers as array indices). */
1878 flags |= opf_no_vops;
1879 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1880 return;
1882 case SSA_NAME:
1883 case STRUCT_FIELD_TAG:
1884 case SYMBOL_MEMORY_TAG:
1885 case NAME_MEMORY_TAG:
1886 add_stmt_operand (expr_p, s_ann, flags);
1887 return;
1889 case VAR_DECL:
1890 case PARM_DECL:
1891 case RESULT_DECL:
1893 subvar_t svars;
1895 /* Add the subvars for a variable, if it has subvars, to DEFS
1896 or USES. Otherwise, add the variable itself. Whether it
1897 goes to USES or DEFS depends on the operand flags. */
1898 if (var_can_have_subvars (expr)
1899 && (svars = get_subvars_for_var (expr)))
1901 subvar_t sv;
1902 for (sv = svars; sv; sv = sv->next)
1903 add_stmt_operand (&sv->var, s_ann, flags);
1905 else
1906 add_stmt_operand (expr_p, s_ann, flags);
1908 return;
1911 case MISALIGNED_INDIRECT_REF:
1912 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1913 /* fall through */
1915 case ALIGN_INDIRECT_REF:
1916 case INDIRECT_REF:
1917 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1918 return;
1920 case TARGET_MEM_REF:
1921 get_tmr_operands (stmt, expr, flags);
1922 return;
1924 case ARRAY_REF:
1925 case ARRAY_RANGE_REF:
1926 case COMPONENT_REF:
1927 case REALPART_EXPR:
1928 case IMAGPART_EXPR:
1930 tree ref;
1931 HOST_WIDE_INT offset, size, maxsize;
1932 bool none = true;
1934 /* This component reference becomes an access to all of the
1935 subvariables it can touch, if we can determine that, but
1936 *NOT* the real one. If we can't determine which fields we
1937 could touch, the recursion will eventually get to a
1938 variable and add *all* of its subvars, or whatever is the
1939 minimum correct subset. */
1940 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1941 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1943 subvar_t sv;
1944 subvar_t svars = get_subvars_for_var (ref);
1946 for (sv = svars; sv; sv = sv->next)
1948 bool exact;
1950 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1952 int subvar_flags = flags;
1953 none = false;
1954 if (!exact || size != maxsize)
1955 subvar_flags &= ~opf_kill_def;
1956 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1960 if (!none)
1961 flags |= opf_no_vops;
1963 else if (TREE_CODE (ref) == INDIRECT_REF)
1965 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1966 maxsize, false);
1967 flags |= opf_no_vops;
1970 /* Even if we found subvars above we need to ensure to see
1971 immediate uses for d in s.a[d]. In case of s.a having
1972 a subvar or we would miss it otherwise. */
1973 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1974 flags & ~opf_kill_def);
1976 if (code == COMPONENT_REF)
1978 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1979 s_ann->has_volatile_ops = true;
1980 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1982 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
1984 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1985 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1986 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1989 return;
1992 case WITH_SIZE_EXPR:
1993 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1994 and an rvalue reference to its second argument. */
1995 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1996 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1997 return;
1999 case CALL_EXPR:
2000 get_call_expr_operands (stmt, expr);
2001 return;
2003 case COND_EXPR:
2004 case VEC_COND_EXPR:
2005 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
2006 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2007 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2008 return;
2010 case MODIFY_EXPR:
2011 get_modify_expr_operands (stmt, expr);
2012 return;
2014 case CONSTRUCTOR:
2016 /* General aggregate CONSTRUCTORs have been decomposed, but they
2017 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2018 constructor_elt *ce;
2019 unsigned HOST_WIDE_INT idx;
2021 for (idx = 0;
2022 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2023 idx++)
2024 get_expr_operands (stmt, &ce->value, opf_none);
2026 return;
2029 case BIT_FIELD_REF:
2030 /* Stores using BIT_FIELD_REF are always preserving definitions. */
2031 flags &= ~opf_kill_def;
2033 /* Fallthru */
2035 case TRUTH_NOT_EXPR:
2036 case VIEW_CONVERT_EXPR:
2037 do_unary:
2038 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2039 return;
2041 case TRUTH_AND_EXPR:
2042 case TRUTH_OR_EXPR:
2043 case TRUTH_XOR_EXPR:
2044 case COMPOUND_EXPR:
2045 case OBJ_TYPE_REF:
2046 case ASSERT_EXPR:
2047 do_binary:
2049 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2050 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2051 return;
2054 case DOT_PROD_EXPR:
2055 case REALIGN_LOAD_EXPR:
2057 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2058 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2059 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2060 return;
2063 case BLOCK:
2064 case FUNCTION_DECL:
2065 case EXC_PTR_EXPR:
2066 case FILTER_EXPR:
2067 case LABEL_DECL:
2068 case CONST_DECL:
2069 case OMP_PARALLEL:
2070 case OMP_SECTIONS:
2071 case OMP_FOR:
2072 case OMP_SINGLE:
2073 case OMP_MASTER:
2074 case OMP_ORDERED:
2075 case OMP_CRITICAL:
2076 case OMP_RETURN:
2077 case OMP_CONTINUE:
2078 /* Expressions that make no memory references. */
2079 return;
2081 default:
2082 if (class == tcc_unary)
2083 goto do_unary;
2084 if (class == tcc_binary || class == tcc_comparison)
2085 goto do_binary;
2086 if (class == tcc_constant || class == tcc_type)
2087 return;
2090 /* If we get here, something has gone wrong. */
2091 #ifdef ENABLE_CHECKING
2092 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2093 debug_tree (expr);
2094 fputs ("\n", stderr);
2095 #endif
2096 gcc_unreachable ();
2100 /* Parse STMT looking for operands. When finished, the various
2101 build_* operand vectors will have potential operands in them. */
2103 static void
2104 parse_ssa_operands (tree stmt)
2106 enum tree_code code;
2108 code = TREE_CODE (stmt);
2109 switch (code)
2111 case MODIFY_EXPR:
2112 get_modify_expr_operands (stmt, stmt);
2113 break;
2115 case COND_EXPR:
2116 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2117 break;
2119 case SWITCH_EXPR:
2120 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2121 break;
2123 case ASM_EXPR:
2124 get_asm_expr_operands (stmt);
2125 break;
2127 case RETURN_EXPR:
2128 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2129 break;
2131 case GOTO_EXPR:
2132 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2133 break;
2135 case LABEL_EXPR:
2136 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2137 break;
2139 case BIND_EXPR:
2140 case CASE_LABEL_EXPR:
2141 case TRY_CATCH_EXPR:
2142 case TRY_FINALLY_EXPR:
2143 case EH_FILTER_EXPR:
2144 case CATCH_EXPR:
2145 case RESX_EXPR:
2146 /* These nodes contain no variable references. */
2147 break;
2149 default:
2150 /* Notice that if get_expr_operands tries to use &STMT as the
2151 operand pointer (which may only happen for USE operands), we
2152 will fail in add_stmt_operand. This default will handle
2153 statements like empty statements, or CALL_EXPRs that may
2154 appear on the RHS of a statement or as statements themselves. */
2155 get_expr_operands (stmt, &stmt, opf_none);
2156 break;
2161 /* Create an operands cache for STMT. */
2163 static void
2164 build_ssa_operands (tree stmt)
2166 stmt_ann_t ann = get_stmt_ann (stmt);
2168 /* Initially assume that the statement has no volatile operands. */
2169 if (ann)
2170 ann->has_volatile_ops = false;
2172 start_ssa_stmt_operands ();
2174 parse_ssa_operands (stmt);
2175 operand_build_sort_virtual (build_vuses);
2176 operand_build_sort_virtual (build_v_may_defs);
2177 operand_build_sort_virtual (build_v_must_defs);
2179 finalize_ssa_stmt_operands (stmt);
2183 /* Free any operands vectors in OPS. */
2185 void
2186 free_ssa_operands (stmt_operands_p ops)
2188 ops->def_ops = NULL;
2189 ops->use_ops = NULL;
2190 ops->maydef_ops = NULL;
2191 ops->mustdef_ops = NULL;
2192 ops->vuse_ops = NULL;
2196 /* Get the operands of statement STMT. */
2198 void
2199 update_stmt_operands (tree stmt)
2201 stmt_ann_t ann = get_stmt_ann (stmt);
2203 /* If update_stmt_operands is called before SSA is initialized, do
2204 nothing. */
2205 if (!ssa_operands_active ())
2206 return;
2208 /* The optimizers cannot handle statements that are nothing but a
2209 _DECL. This indicates a bug in the gimplifier. */
2210 gcc_assert (!SSA_VAR_P (stmt));
2212 gcc_assert (ann->modified);
2214 timevar_push (TV_TREE_OPS);
2216 build_ssa_operands (stmt);
2218 /* Clear the modified bit for STMT. */
2219 ann->modified = 0;
2221 timevar_pop (TV_TREE_OPS);
2225 /* Copies virtual operands from SRC to DST. */
2227 void
2228 copy_virtual_operands (tree dest, tree src)
2230 tree t;
2231 ssa_op_iter iter, old_iter;
2232 use_operand_p use_p, u2;
2233 def_operand_p def_p, d2;
2235 build_ssa_operands (dest);
2237 /* Copy all the virtual fields. */
2238 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2239 append_vuse (t);
2240 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2241 append_v_may_def (t);
2242 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2243 append_v_must_def (t);
2245 if (VEC_length (tree, build_vuses) == 0
2246 && VEC_length (tree, build_v_may_defs) == 0
2247 && VEC_length (tree, build_v_must_defs) == 0)
2248 return;
2250 /* Now commit the virtual operands to this stmt. */
2251 finalize_ssa_v_must_defs (dest);
2252 finalize_ssa_v_may_defs (dest);
2253 finalize_ssa_vuses (dest);
2255 /* Finally, set the field to the same values as then originals. */
2256 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2257 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
2259 gcc_assert (!op_iter_done (&old_iter));
2260 SET_USE (use_p, t);
2261 t = op_iter_next_tree (&old_iter);
2263 gcc_assert (op_iter_done (&old_iter));
2265 op_iter_init_maydef (&old_iter, src, &u2, &d2);
2266 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
2268 gcc_assert (!op_iter_done (&old_iter));
2269 SET_USE (use_p, USE_FROM_PTR (u2));
2270 SET_DEF (def_p, DEF_FROM_PTR (d2));
2271 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2273 gcc_assert (op_iter_done (&old_iter));
2275 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2276 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2278 gcc_assert (!op_iter_done (&old_iter));
2279 SET_USE (use_p, USE_FROM_PTR (u2));
2280 SET_DEF (def_p, DEF_FROM_PTR (d2));
2281 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2283 gcc_assert (op_iter_done (&old_iter));
2288 /* Specifically for use in DOM's expression analysis. Given a store, we
2289 create an artificial stmt which looks like a load from the store, this can
2290 be used to eliminate redundant loads. OLD_OPS are the operands from the
2291 store stmt, and NEW_STMT is the new load which represents a load of the
2292 values stored. */
2294 void
2295 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2297 stmt_ann_t ann;
2298 tree op;
2299 ssa_op_iter iter;
2300 use_operand_p use_p;
2301 unsigned x;
2303 ann = get_stmt_ann (new_stmt);
2305 /* Process the stmt looking for operands. */
2306 start_ssa_stmt_operands ();
2307 parse_ssa_operands (new_stmt);
2309 for (x = 0; x < VEC_length (tree, build_vuses); x++)
2311 tree t = VEC_index (tree, build_vuses, x);
2312 if (TREE_CODE (t) != SSA_NAME)
2314 var_ann_t ann = var_ann (t);
2315 ann->in_vuse_list = 0;
2319 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2321 tree t = VEC_index (tree, build_v_may_defs, x);
2322 if (TREE_CODE (t) != SSA_NAME)
2324 var_ann_t ann = var_ann (t);
2325 ann->in_v_may_def_list = 0;
2329 /* Remove any virtual operands that were found. */
2330 VEC_truncate (tree, build_v_may_defs, 0);
2331 VEC_truncate (tree, build_v_must_defs, 0);
2332 VEC_truncate (tree, build_vuses, 0);
2334 /* For each VDEF on the original statement, we want to create a
2335 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2336 statement. */
2337 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2338 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2339 append_vuse (op);
2341 /* Now build the operands for this new stmt. */
2342 finalize_ssa_stmt_operands (new_stmt);
2344 /* All uses in this fake stmt must not be in the immediate use lists. */
2345 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2346 delink_imm_use (use_p);
2350 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2351 to test the validity of the swap operation. */
2353 void
2354 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2356 tree op0, op1;
2357 op0 = *exp0;
2358 op1 = *exp1;
2360 /* If the operand cache is active, attempt to preserve the relative
2361 positions of these two operands in their respective immediate use
2362 lists. */
2363 if (ssa_operands_active () && op0 != op1)
2365 use_optype_p use0, use1, ptr;
2366 use0 = use1 = NULL;
2368 /* Find the 2 operands in the cache, if they are there. */
2369 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2370 if (USE_OP_PTR (ptr)->use == exp0)
2372 use0 = ptr;
2373 break;
2376 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2377 if (USE_OP_PTR (ptr)->use == exp1)
2379 use1 = ptr;
2380 break;
2383 /* If both uses don't have operand entries, there isn't much we can do
2384 at this point. Presumably we don't need to worry about it. */
2385 if (use0 && use1)
2387 tree *tmp = USE_OP_PTR (use1)->use;
2388 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2389 USE_OP_PTR (use0)->use = tmp;
2393 /* Now swap the data. */
2394 *exp0 = op1;
2395 *exp1 = op0;
2399 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2400 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2401 a single variable whose address has been taken or any other valid
2402 GIMPLE memory reference (structure reference, array, etc). If the
2403 base address of REF is a decl that has sub-variables, also add all
2404 of its sub-variables. */
2406 void
2407 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2409 tree var;
2410 subvar_t svars;
2412 gcc_assert (addresses_taken);
2414 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2415 as the only thing we take the address of. If VAR is a structure,
2416 taking the address of a field means that the whole structure may
2417 be referenced using pointer arithmetic. See PR 21407 and the
2418 ensuing mailing list discussion. */
2419 var = get_base_address (ref);
2420 if (var && SSA_VAR_P (var))
2422 if (*addresses_taken == NULL)
2423 *addresses_taken = BITMAP_GGC_ALLOC ();
2425 if (var_can_have_subvars (var)
2426 && (svars = get_subvars_for_var (var)))
2428 subvar_t sv;
2429 for (sv = svars; sv; sv = sv->next)
2431 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2432 TREE_ADDRESSABLE (sv->var) = 1;
2435 else
2437 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2438 TREE_ADDRESSABLE (var) = 1;
2444 /* Scan the immediate_use list for VAR making sure its linked properly.
2445 Return TRUE if there is a problem and emit an error message to F. */
2447 bool
2448 verify_imm_links (FILE *f, tree var)
2450 use_operand_p ptr, prev, list;
2451 int count;
2453 gcc_assert (TREE_CODE (var) == SSA_NAME);
2455 list = &(SSA_NAME_IMM_USE_NODE (var));
2456 gcc_assert (list->use == NULL);
2458 if (list->prev == NULL)
2460 gcc_assert (list->next == NULL);
2461 return false;
2464 prev = list;
2465 count = 0;
2466 for (ptr = list->next; ptr != list; )
2468 if (prev != ptr->prev)
2469 goto error;
2471 if (ptr->use == NULL)
2472 goto error; /* 2 roots, or SAFE guard node. */
2473 else if (*(ptr->use) != var)
2474 goto error;
2476 prev = ptr;
2477 ptr = ptr->next;
2479 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2480 problem. */
2481 if (count++ > 50000000)
2482 goto error;
2485 /* Verify list in the other direction. */
2486 prev = list;
2487 for (ptr = list->prev; ptr != list; )
2489 if (prev != ptr->next)
2490 goto error;
2491 prev = ptr;
2492 ptr = ptr->prev;
2493 if (count-- < 0)
2494 goto error;
2497 if (count != 0)
2498 goto error;
2500 return false;
2502 error:
2503 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2505 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2506 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2508 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2509 (void *)ptr->use);
2510 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2511 fprintf(f, "\n");
2512 return true;
2516 /* Dump all the immediate uses to FILE. */
2518 void
2519 dump_immediate_uses_for (FILE *file, tree var)
2521 imm_use_iterator iter;
2522 use_operand_p use_p;
2524 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2526 print_generic_expr (file, var, TDF_SLIM);
2527 fprintf (file, " : -->");
2528 if (has_zero_uses (var))
2529 fprintf (file, " no uses.\n");
2530 else
2531 if (has_single_use (var))
2532 fprintf (file, " single use.\n");
2533 else
2534 fprintf (file, "%d uses.\n", num_imm_uses (var));
2536 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2538 if (use_p->stmt == NULL && use_p->use == NULL)
2539 fprintf (file, "***end of stmt iterator marker***\n");
2540 else
2541 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2542 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2543 else
2544 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2546 fprintf(file, "\n");
2550 /* Dump all the immediate uses to FILE. */
2552 void
2553 dump_immediate_uses (FILE *file)
2555 tree var;
2556 unsigned int x;
2558 fprintf (file, "Immediate_uses: \n\n");
2559 for (x = 1; x < num_ssa_names; x++)
2561 var = ssa_name(x);
2562 if (!var)
2563 continue;
2564 dump_immediate_uses_for (file, var);
2569 /* Dump def-use edges on stderr. */
2571 void
2572 debug_immediate_uses (void)
2574 dump_immediate_uses (stderr);
2578 /* Dump def-use edges on stderr. */
2580 void
2581 debug_immediate_uses_for (tree var)
2583 dump_immediate_uses_for (stderr, var);
2586 #include "gt-tree-ssa-operands.h"