[multiple changes]
[official-gcc.git] / gcc / tree-ssa-operands.c
blobc11fe5b80f5495fbea7d0bc9cdd23bd9e15d307b
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
82 static struct
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars;
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided;
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided;
95 /* Number of write-clobbers avoided because the variable can't escape to
96 this call. */
97 unsigned int unescapable_clobbers_avoided;
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers;
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided;
105 } clobber_stats;
108 /* Flags to describe operand properties in helpers. */
110 /* By default, operands are loaded. */
111 #define opf_use 0
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
122 VUSE for 'b'. */
123 #define opf_no_vops (1 << 1)
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
130 /* Array for building all the def operands. */
131 static VEC(tree,heap) *build_defs;
133 /* Array for building all the use operands. */
134 static VEC(tree,heap) *build_uses;
136 /* Set for building all the VDEF operands. */
137 static VEC(tree,heap) *build_vdefs;
139 /* Set for building all the VUSE operands. */
140 static VEC(tree,heap) *build_vuses;
142 /* Set for building all the loaded symbols. */
143 static bitmap build_loads;
145 /* Set for building all the stored symbols. */
146 static bitmap build_stores;
148 static void get_expr_operands (tree, tree *, int);
150 /* Number of functions with initialized ssa_operands. */
151 static int n_initialized = 0;
153 /* Statement change buffer. Data structure used to record state
154 information for statements. This is used to determine what needs
155 to be done in order to update the SSA web after a statement is
156 modified by a pass. If STMT is a statement that has just been
157 created, or needs to be folded via fold_stmt, or anything that
158 changes its physical structure then the pass should:
160 1- Call push_stmt_changes (&stmt) to record the current state of
161 STMT before any modifications are made.
163 2- Make all appropriate modifications to the statement.
165 3- Call pop_stmt_changes (&stmt) to find new symbols that
166 need to be put in SSA form, SSA name mappings for names that
167 have disappeared, recompute invariantness for address
168 expressions, cleanup EH information, etc.
170 If it is possible to determine that the statement was not modified,
171 instead of calling pop_stmt_changes it is quicker to call
172 discard_stmt_changes to avoid the expensive and unnecessary operand
173 re-scan and change comparison. */
175 struct scb_d
177 /* Pointer to the statement being modified. */
178 tree *stmt_p;
180 /* If the statement references memory these are the sets of symbols
181 loaded and stored by the statement. */
182 bitmap loads;
183 bitmap stores;
186 typedef struct scb_d *scb_t;
187 DEF_VEC_P(scb_t);
188 DEF_VEC_ALLOC_P(scb_t,heap);
190 /* Stack of statement change buffers (SCB). Every call to
191 push_stmt_changes pushes a new buffer onto the stack. Calls to
192 pop_stmt_changes pop a buffer off of the stack and compute the set
193 of changes for the popped statement. */
194 static VEC(scb_t,heap) *scb_stack;
196 /* Return the DECL_UID of the base variable of T. */
198 static inline unsigned
199 get_name_decl (tree t)
201 if (TREE_CODE (t) != SSA_NAME)
202 return DECL_UID (t);
203 else
204 return DECL_UID (SSA_NAME_VAR (t));
208 /* Comparison function for qsort used in operand_build_sort_virtual. */
210 static int
211 operand_build_cmp (const void *p, const void *q)
213 tree e1 = *((const tree *)p);
214 tree e2 = *((const tree *)q);
215 unsigned int u1,u2;
217 u1 = get_name_decl (e1);
218 u2 = get_name_decl (e2);
220 /* We want to sort in ascending order. They can never be equal. */
221 #ifdef ENABLE_CHECKING
222 gcc_assert (u1 != u2);
223 #endif
224 return (u1 > u2 ? 1 : -1);
228 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
230 static inline void
231 operand_build_sort_virtual (VEC(tree,heap) *list)
233 int num = VEC_length (tree, list);
235 if (num < 2)
236 return;
238 if (num == 2)
240 if (get_name_decl (VEC_index (tree, list, 0))
241 > get_name_decl (VEC_index (tree, list, 1)))
243 /* Swap elements if in the wrong order. */
244 tree tmp = VEC_index (tree, list, 0);
245 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
246 VEC_replace (tree, list, 1, tmp);
248 return;
251 /* There are 3 or more elements, call qsort. */
252 qsort (VEC_address (tree, list),
253 VEC_length (tree, list),
254 sizeof (tree),
255 operand_build_cmp);
259 /* Return true if the SSA operands cache is active. */
261 bool
262 ssa_operands_active (void)
264 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
268 /* Initialize the operand cache routines. */
270 void
271 init_ssa_operands (void)
273 if (!n_initialized++)
275 build_defs = VEC_alloc (tree, heap, 5);
276 build_uses = VEC_alloc (tree, heap, 10);
277 build_vuses = VEC_alloc (tree, heap, 25);
278 build_vdefs = VEC_alloc (tree, heap, 25);
279 build_loads = BITMAP_ALLOC (NULL);
280 build_stores = BITMAP_ALLOC (NULL);
281 scb_stack = VEC_alloc (scb_t, heap, 20);
284 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
285 gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
286 gimple_ssa_operands (cfun)->operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
287 gimple_ssa_operands (cfun)->ops_active = true;
288 memset (&clobber_stats, 0, sizeof (clobber_stats));
292 /* Dispose of anything required by the operand routines. */
294 void
295 fini_ssa_operands (void)
297 struct ssa_operand_memory_d *ptr;
298 unsigned ix;
299 tree mpt;
301 if (!--n_initialized)
303 VEC_free (tree, heap, build_defs);
304 VEC_free (tree, heap, build_uses);
305 VEC_free (tree, heap, build_vdefs);
306 VEC_free (tree, heap, build_vuses);
307 BITMAP_FREE (build_loads);
308 BITMAP_FREE (build_stores);
310 /* The change buffer stack had better be empty. */
311 gcc_assert (VEC_length (scb_t, scb_stack) == 0);
312 VEC_free (scb_t, heap, scb_stack);
313 scb_stack = NULL;
316 gimple_ssa_operands (cfun)->free_defs = NULL;
317 gimple_ssa_operands (cfun)->free_uses = NULL;
318 gimple_ssa_operands (cfun)->free_vuses = NULL;
319 gimple_ssa_operands (cfun)->free_vdefs = NULL;
321 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
323 gimple_ssa_operands (cfun)->operand_memory
324 = gimple_ssa_operands (cfun)->operand_memory->next;
325 ggc_free (ptr);
328 for (ix = 0;
329 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt);
330 ix++)
332 if (mpt)
333 BITMAP_FREE (MPT_SYMBOLS (mpt));
336 VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table);
338 gimple_ssa_operands (cfun)->ops_active = false;
340 if (dump_file && (dump_flags & TDF_STATS))
342 fprintf (dump_file, "Original clobbered vars: %d\n",
343 clobber_stats.clobbered_vars);
344 fprintf (dump_file, "Static write clobbers avoided: %d\n",
345 clobber_stats.static_write_clobbers_avoided);
346 fprintf (dump_file, "Static read clobbers avoided: %d\n",
347 clobber_stats.static_read_clobbers_avoided);
348 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
349 clobber_stats.unescapable_clobbers_avoided);
350 fprintf (dump_file, "Original read-only clobbers: %d\n",
351 clobber_stats.readonly_clobbers);
352 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
353 clobber_stats.static_readonly_clobbers_avoided);
358 /* Return memory for operands of SIZE chunks. */
360 static inline void *
361 ssa_operand_alloc (unsigned size)
363 char *ptr;
365 gcc_assert (size <= SSA_OPERAND_MEMORY_SIZE);
367 if (gimple_ssa_operands (cfun)->operand_memory_index + size
368 >= SSA_OPERAND_MEMORY_SIZE)
370 struct ssa_operand_memory_d *ptr;
371 ptr = GGC_NEW (struct ssa_operand_memory_d);
372 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
373 gimple_ssa_operands (cfun)->operand_memory = ptr;
374 gimple_ssa_operands (cfun)->operand_memory_index = 0;
376 ptr = &(gimple_ssa_operands (cfun)->operand_memory
377 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
378 gimple_ssa_operands (cfun)->operand_memory_index += size;
379 return ptr;
383 static inline struct def_optype_d *
384 alloc_def (void)
386 struct def_optype_d *ret;
387 if (gimple_ssa_operands (cfun)->free_defs)
389 ret = gimple_ssa_operands (cfun)->free_defs;
390 gimple_ssa_operands (cfun)->free_defs
391 = gimple_ssa_operands (cfun)->free_defs->next;
393 else
394 ret = (struct def_optype_d *)
395 ssa_operand_alloc (sizeof (struct def_optype_d));
396 return ret;
400 static inline struct use_optype_d *
401 alloc_use (void)
403 struct use_optype_d *ret;
404 if (gimple_ssa_operands (cfun)->free_uses)
406 ret = gimple_ssa_operands (cfun)->free_uses;
407 gimple_ssa_operands (cfun)->free_uses
408 = gimple_ssa_operands (cfun)->free_uses->next;
410 else
411 ret = (struct use_optype_d *)ssa_operand_alloc (sizeof (struct use_optype_d));
412 return ret;
418 static inline struct vdef_optype_d *
419 alloc_vdef (int num)
421 struct vdef_optype_d *ret;
422 /* Eliminate free list for the moment. */
423 #if 0
424 if (free_vdefs)
426 ret = free_vdefs;
427 free_vdefs = free_vdefs->next;
429 else
430 #endif
431 ret = (struct vdef_optype_d *)ssa_operand_alloc (
432 sizeof (struct vdef_optype_d) + (num - 1) * sizeof (vuse_element_t));
433 VUSE_VECT_NUM_ELEM (ret->usev) = num;
434 return ret;
440 static inline struct vuse_optype_d *
441 alloc_vuse (int num)
443 struct vuse_optype_d *ret;
444 /* No free list for the moment. */
445 #if 0
446 if (free_vuses)
448 ret = free_vuses;
449 free_vuses = free_vuses->next;
451 else
452 #endif
453 ret = (struct vuse_optype_d *)ssa_operand_alloc (
454 sizeof (struct vuse_optype_d) + (num - 1) * sizeof (vuse_element_t));
455 VUSE_VECT_NUM_ELEM (ret->usev) = num;
456 return ret;
460 /* This routine makes sure that PTR is in an immediate use list, and makes
461 sure the stmt pointer is set to the current stmt. */
463 static inline void
464 set_virtual_use_link (use_operand_p ptr, tree stmt)
466 /* fold_stmt may have changed the stmt pointers. */
467 if (ptr->stmt != stmt)
468 ptr->stmt = stmt;
470 /* If this use isn't in a list, add it to the correct list. */
471 if (!ptr->prev)
472 link_imm_use (ptr, *(ptr->use));
475 /* Appends ELT after TO, and moves the TO pointer to ELT. */
477 #define APPEND_OP_AFTER(ELT, TO) \
478 do \
480 (TO)->next = (ELT); \
481 (TO) = (ELT); \
482 } while (0)
484 /* Appends head of list FROM after TO, and move both pointers
485 to their successors. */
487 #define MOVE_HEAD_AFTER(FROM, TO) \
488 do \
490 APPEND_OP_AFTER (FROM, TO); \
491 (FROM) = (FROM)->next; \
492 } while (0)
494 /* Moves OP to appropriate freelist. OP is set to its successor. */
496 #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
497 do \
499 TYPE##_optype_p next = (OP)->next; \
500 (OP)->next \
501 = gimple_ssa_operands (cfun)->free_##TYPE##s; \
502 gimple_ssa_operands (cfun)->free_##TYPE##s = (OP);\
503 (OP) = next; \
504 } while (0)
506 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
507 of immediate uses. STMT is the current statement. */
509 #define INITIALIZE_USE(USE_PTR, VAL, STMT) \
510 do \
512 (USE_PTR)->use = (VAL); \
513 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
514 } while (0)
516 /* Adds OP to the list of defs after LAST, and moves
517 LAST to the new element. */
519 static inline def_optype_p
520 add_def_op (tree *op, def_optype_p *last)
522 def_optype_p new;
524 new = alloc_def ();
525 DEF_OP_PTR (new) = op;
526 APPEND_OP_AFTER (new, *last);
527 return new;
530 /* Adds OP to the list of uses of statement STMT after LAST, and moves
531 LAST to the new element. */
533 static inline use_optype_p
534 add_use_op (tree stmt, tree *op, use_optype_p *last)
536 use_optype_p new;
538 new = alloc_use ();
539 INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
540 APPEND_OP_AFTER (new, *last);
541 return new;
544 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
545 LAST to the new element. */
547 static inline vuse_optype_p
548 add_vuse_op (tree stmt, tree op, int num, vuse_optype_p *last)
550 vuse_optype_p new;
551 int x;
553 new = alloc_vuse (num);
554 for (x = 0; x < num; x++)
556 SET_VUSE_OP (new, x, op);
557 INITIALIZE_USE (VUSE_OP_PTR (new, x), &new->usev.uses[x].use_var, stmt);
560 APPEND_OP_AFTER (new, *last);
561 return new;
565 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
566 LAST to the new element. */
568 static inline vdef_optype_p
569 add_vdef_op (tree stmt, tree op, int num, vdef_optype_p *last)
571 int x;
572 vdef_optype_p new;
574 new = alloc_vdef (num);
575 VDEF_RESULT (new) = op;
576 for (x = 0; x < num; x++)
578 SET_VDEF_OP (new, x, op);
579 INITIALIZE_USE (VDEF_OP_PTR (new, x), &new->usev.uses[x].use_var, stmt);
582 APPEND_OP_AFTER (new, *last);
583 return new;
587 struct vdef_optype_d *
588 realloc_vdef (struct vdef_optype_d *ptr, int num_elem)
590 int x, lim;
591 tree val, stmt;
592 struct vdef_optype_d *ret, *tmp;
594 if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
595 return ptr;
597 val = VDEF_RESULT (ptr);
598 if (TREE_CODE (val) == SSA_NAME)
599 val = SSA_NAME_VAR (val);
601 stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
603 /* Delink all the existing uses. */
604 for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
606 use_operand_p use_p = VDEF_OP_PTR (ptr, x);
607 delink_imm_use (use_p);
610 /* If we want less space, simply use this one, and shrink the size. */
611 if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
613 VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
614 return ptr;
617 /* It is growing. Allocate a new one and replace the old one. */
618 tmp = ptr;
619 ret = add_vdef_op (stmt, val, num_elem, &ptr);
620 ret->next = NULL;
621 ptr = tmp;
623 lim = VUSE_VECT_NUM_ELEM (ptr->usev);
624 memset (ptr, 0,
625 sizeof (struct vdef_optype_d) + sizeof (vuse_element_t) * (lim- 1));
627 /* Now simply remove the old one. */
628 if (VDEF_OPS (stmt) == ptr)
630 VDEF_OPS (stmt) = ret;
631 return ret;
633 else
634 for (tmp = VDEF_OPS (stmt);
635 tmp != NULL && tmp->next != ptr;
636 tmp = tmp->next)
638 tmp->next = ret;
639 return ret;
642 /* The pointer passed in isn't in STMT's VDEF lists. */
643 gcc_unreachable ();
647 struct vuse_optype_d *
648 realloc_vuse (struct vuse_optype_d *ptr, int num_elem)
650 int x, lim;
651 tree val, stmt;
652 struct vuse_optype_d *ret, *tmp;
654 if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
655 return ptr;
657 val = VUSE_OP (ptr, 0);
658 if (TREE_CODE (val) == SSA_NAME)
659 val = SSA_NAME_VAR (val);
661 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
663 /* Delink all the existing uses. */
664 for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
666 use_operand_p use_p = VUSE_OP_PTR (ptr, x);
667 delink_imm_use (use_p);
670 /* If we want less space, simply use this one, and shrink the size. */
671 if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
673 VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
674 return ptr;
677 /* It is growing. Allocate a new one and replace the old one. */
678 tmp = ptr;
679 ret = add_vuse_op (stmt, val, num_elem, &ptr);
680 ret->next = NULL;
681 ptr = tmp;
683 lim = VUSE_VECT_NUM_ELEM (ptr->usev);
684 memset (ptr, 0,
685 sizeof (struct vuse_optype_d) + sizeof (vuse_element_t) * (lim - 1));
687 /* Now simply link it in, find the node which points to this one. */
688 if (VUSE_OPS (stmt) == ptr)
690 VUSE_OPS (stmt) = ret;
691 return ret;
693 else
694 for (tmp = VUSE_OPS (stmt);
695 tmp != NULL && tmp->next != ptr;
696 tmp = tmp->next)
698 tmp->next = ret;
699 return ret;
702 /* The pointer passed in isn't in STMT's VUSE lists. */
703 gcc_unreachable ();
706 /* Takes elements from build_defs and turns them into def operands of STMT.
707 TODO -- Given that def operands list is not necessarily sorted, merging
708 the operands this way does not make much sense.
709 -- Make build_defs VEC of tree *. */
711 static inline void
712 finalize_ssa_def_ops (tree stmt)
714 unsigned new_i;
715 struct def_optype_d new_list;
716 def_optype_p old_ops, last;
717 tree *old_base;
719 new_list.next = NULL;
720 last = &new_list;
722 old_ops = DEF_OPS (stmt);
724 new_i = 0;
725 while (old_ops && new_i < VEC_length (tree, build_defs))
727 tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
728 old_base = DEF_OP_PTR (old_ops);
730 if (old_base == new_base)
732 /* if variables are the same, reuse this node. */
733 MOVE_HEAD_AFTER (old_ops, last);
734 new_i++;
736 else if (old_base < new_base)
738 /* if old is less than new, old goes to the free list. */
739 MOVE_HEAD_TO_FREELIST (old_ops, def);
741 else
743 /* This is a new operand. */
744 add_def_op (new_base, &last);
745 new_i++;
749 /* If there is anything remaining in the build_defs list, simply emit it. */
750 for ( ; new_i < VEC_length (tree, build_defs); new_i++)
751 add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
753 last->next = NULL;
755 /* If there is anything in the old list, free it. */
756 if (old_ops)
758 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
759 gimple_ssa_operands (cfun)->free_defs = old_ops;
762 /* Now set the stmt's operands. */
763 DEF_OPS (stmt) = new_list.next;
765 #ifdef ENABLE_CHECKING
767 def_optype_p ptr;
768 unsigned x = 0;
769 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
770 x++;
772 gcc_assert (x == VEC_length (tree, build_defs));
774 #endif
777 /* This routine will create stmt operands for STMT from the def build list. */
779 static void
780 finalize_ssa_defs (tree stmt)
782 unsigned int num = VEC_length (tree, build_defs);
784 /* There should only be a single real definition per assignment. */
785 gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
787 /* If there is an old list, often the new list is identical, or close, so
788 find the elements at the beginning that are the same as the vector. */
789 finalize_ssa_def_ops (stmt);
792 /* Takes elements from build_uses and turns them into use operands of STMT.
793 TODO -- Make build_uses VEC of tree *. */
795 static inline void
796 finalize_ssa_use_ops (tree stmt)
798 unsigned new_i;
799 struct use_optype_d new_list;
800 use_optype_p old_ops, ptr, last;
802 new_list.next = NULL;
803 last = &new_list;
805 old_ops = USE_OPS (stmt);
807 /* If there is anything in the old list, free it. */
808 if (old_ops)
810 for (ptr = old_ops; ptr; ptr = ptr->next)
811 delink_imm_use (USE_OP_PTR (ptr));
812 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
813 gimple_ssa_operands (cfun)->free_uses = old_ops;
816 /* Now create nodes for all the new nodes. */
817 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
818 add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
820 last->next = NULL;
822 /* Now set the stmt's operands. */
823 USE_OPS (stmt) = new_list.next;
825 #ifdef ENABLE_CHECKING
827 unsigned x = 0;
828 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
829 x++;
831 gcc_assert (x == VEC_length (tree, build_uses));
833 #endif
836 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
838 static void
839 finalize_ssa_uses (tree stmt)
841 #ifdef ENABLE_CHECKING
843 unsigned x;
844 unsigned num = VEC_length (tree, build_uses);
846 /* If the pointer to the operand is the statement itself, something is
847 wrong. It means that we are pointing to a local variable (the
848 initial call to update_stmt_operands does not pass a pointer to a
849 statement). */
850 for (x = 0; x < num; x++)
851 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
853 #endif
854 finalize_ssa_use_ops (stmt);
858 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
859 STMT. FIXME, for now VDEF operators should have a single operand
860 in their RHS. */
862 static inline void
863 finalize_ssa_vdef_ops (tree stmt)
865 unsigned new_i;
866 struct vdef_optype_d new_list;
867 vdef_optype_p old_ops, ptr, last;
868 stmt_ann_t ann = stmt_ann (stmt);
870 /* Set the symbols referenced by STMT. */
871 if (!bitmap_empty_p (build_stores))
873 if (ann->operands.stores == NULL)
874 ann->operands.stores = BITMAP_ALLOC (NULL);
876 bitmap_copy (ann->operands.stores, build_stores);
878 else
879 BITMAP_FREE (ann->operands.stores);
881 /* If aliases have not been computed, do not instantiate a virtual
882 operator on STMT. Initially, we only compute the SSA form on
883 GIMPLE registers. The virtual SSA form is only computed after
884 alias analysis, so virtual operators will remain unrenamed and
885 the verifier will complain. However, alias analysis needs to
886 access symbol load/store information, so we need to compute
887 those. */
888 if (!gimple_aliases_computed_p (cfun))
889 return;
891 new_list.next = NULL;
892 last = &new_list;
894 old_ops = VDEF_OPS (stmt);
895 new_i = 0;
896 while (old_ops && new_i < VEC_length (tree, build_vdefs))
898 tree op = VEC_index (tree, build_vdefs, new_i);
899 unsigned new_uid = get_name_decl (op);
900 unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops));
902 /* FIXME, for now each VDEF operator should have at most one
903 operand in their RHS. */
904 gcc_assert (VDEF_NUM (old_ops) == 1);
906 if (old_uid == new_uid)
908 /* If the symbols are the same, reuse the existing operand. */
909 MOVE_HEAD_AFTER (old_ops, last);
910 set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
911 new_i++;
913 else if (old_uid < new_uid)
915 /* If old is less than new, old goes to the free list. */
916 delink_imm_use (VDEF_OP_PTR (old_ops, 0));
917 MOVE_HEAD_TO_FREELIST (old_ops, vdef);
919 else
921 /* This is a new operand. */
922 add_vdef_op (stmt, op, 1, &last);
923 new_i++;
927 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
928 for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
929 add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, &last);
931 last->next = NULL;
933 /* If there is anything in the old list, free it. */
934 if (old_ops)
936 for (ptr = old_ops; ptr; ptr = ptr->next)
937 delink_imm_use (VDEF_OP_PTR (ptr, 0));
938 old_ops->next = gimple_ssa_operands (cfun)->free_vdefs;
939 gimple_ssa_operands (cfun)->free_vdefs = old_ops;
942 /* Now set STMT's operands. */
943 VDEF_OPS (stmt) = new_list.next;
945 #ifdef ENABLE_CHECKING
947 unsigned x = 0;
948 for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
949 x++;
951 gcc_assert (x == VEC_length (tree, build_vdefs));
953 #endif
957 static void
958 finalize_ssa_vdefs (tree stmt)
960 finalize_ssa_vdef_ops (stmt);
965 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
966 STMT. */
968 static inline void
969 finalize_ssa_vuse_ops (tree stmt)
971 unsigned new_i;
972 int old_i;
973 struct vuse_optype_d new_list;
974 vuse_optype_p old_ops, last;
975 VEC(tree,heap) *new_ops;
976 stmt_ann_t ann;
978 /* Set the symbols referenced by STMT. */
979 ann = stmt_ann (stmt);
980 if (!bitmap_empty_p (build_loads))
982 if (ann->operands.loads == NULL)
983 ann->operands.loads = BITMAP_ALLOC (NULL);
985 bitmap_copy (ann->operands.loads, build_loads);
987 else
988 BITMAP_FREE (ann->operands.loads);
990 /* If aliases have not been computed, do not instantiate a virtual
991 operator on STMT. Initially, we only compute the SSA form on
992 GIMPLE registers. The virtual SSA form is only computed after
993 alias analysis, so virtual operators will remain unrenamed and
994 the verifier will complain. However, alias analysis needs to
995 access symbol load/store information, so we need to compute
996 those. */
997 if (!gimple_aliases_computed_p (cfun))
998 return;
1000 /* STMT should have at most one VUSE operator. */
1001 old_ops = VUSE_OPS (stmt);
1002 gcc_assert (old_ops == NULL || old_ops->next == NULL);
1004 new_ops = NULL;
1005 new_i = old_i = 0;
1006 while (old_ops
1007 && old_i < VUSE_NUM (old_ops)
1008 && new_i < VEC_length (tree, build_vuses))
1010 tree new_op = VEC_index (tree, build_vuses, new_i);
1011 tree old_op = VUSE_OP (old_ops, old_i);
1012 unsigned new_uid = get_name_decl (new_op);
1013 unsigned old_uid = get_name_decl (old_op);
1015 if (old_uid == new_uid)
1017 /* If the symbols are the same, reuse the existing operand. */
1018 VEC_safe_push (tree, heap, new_ops, old_op);
1019 new_i++;
1020 old_i++;
1022 else if (old_uid < new_uid)
1024 /* If OLD_UID is less than NEW_UID, the old operand has
1025 disappeared, skip to the next old operand. */
1026 old_i++;
1028 else
1030 /* This is a new operand. */
1031 VEC_safe_push (tree, heap, new_ops, new_op);
1032 new_i++;
1036 /* If there is anything remaining in the build_vuses list, simply emit it. */
1037 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
1038 VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i));
1040 /* If there is anything in the old list, free it. */
1041 if (old_ops)
1043 for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
1044 delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
1045 old_ops->next = gimple_ssa_operands (cfun)->free_vuses;
1046 gimple_ssa_operands (cfun)->free_vuses = old_ops;
1048 VUSE_OPS (stmt) = NULL;
1051 /* If there are any operands, instantiate a VUSE operator for STMT. */
1052 if (new_ops)
1054 tree op;
1055 unsigned i;
1057 new_list.next = NULL;
1058 last = &new_list;
1059 add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), &last);
1060 last->next = NULL;
1062 for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
1063 SET_USE (VUSE_OP_PTR (last, (int) i), op);
1065 VUSE_OPS (stmt) = new_list.next;
1068 #ifdef ENABLE_CHECKING
1070 unsigned x;
1072 if (VUSE_OPS (stmt))
1074 gcc_assert (VUSE_OPS (stmt)->next == NULL);
1075 x = VUSE_NUM (VUSE_OPS (stmt));
1077 else
1078 x = 0;
1080 gcc_assert (x == VEC_length (tree, build_vuses));
1082 #endif
1085 /* Return a new VUSE operand vector for STMT. */
1087 static void
1088 finalize_ssa_vuses (tree stmt)
1090 unsigned num, num_vdefs;
1091 unsigned vuse_index;
1093 /* Remove superfluous VUSE operands. If the statement already has a
1094 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1095 needed because VDEFs imply a VUSE of the variable. For instance,
1096 suppose that variable 'a' is pointed-to by p and q:
1098 # VUSE <a_2>
1099 # a_3 = VDEF <a_2>
1100 *p = *q;
1102 The VUSE <a_2> is superfluous because it is implied by the
1103 VDEF operator. */
1104 num = VEC_length (tree, build_vuses);
1105 num_vdefs = VEC_length (tree, build_vdefs);
1107 if (num > 0 && num_vdefs > 0)
1108 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
1110 tree vuse;
1111 vuse = VEC_index (tree, build_vuses, vuse_index);
1112 if (TREE_CODE (vuse) != SSA_NAME)
1114 var_ann_t ann = var_ann (vuse);
1115 ann->in_vuse_list = 0;
1116 if (ann->in_vdef_list)
1118 VEC_ordered_remove (tree, build_vuses, vuse_index);
1119 continue;
1122 vuse_index++;
1125 finalize_ssa_vuse_ops (stmt);
1129 /* Clear the in_list bits and empty the build array for VDEFs and
1130 VUSEs. */
1132 static inline void
1133 cleanup_build_arrays (void)
1135 unsigned i;
1136 tree t;
1138 for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++)
1139 if (TREE_CODE (t) != SSA_NAME)
1140 var_ann (t)->in_vdef_list = false;
1142 for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++)
1143 if (TREE_CODE (t) != SSA_NAME)
1144 var_ann (t)->in_vuse_list = false;
1146 VEC_truncate (tree, build_vdefs, 0);
1147 VEC_truncate (tree, build_vuses, 0);
1148 VEC_truncate (tree, build_defs, 0);
1149 VEC_truncate (tree, build_uses, 0);
1150 bitmap_clear (build_loads);
1151 bitmap_clear (build_stores);
1155 /* Finalize all the build vectors, fill the new ones into INFO. */
1157 static inline void
1158 finalize_ssa_stmt_operands (tree stmt)
1160 finalize_ssa_defs (stmt);
1161 finalize_ssa_uses (stmt);
1162 finalize_ssa_vdefs (stmt);
1163 finalize_ssa_vuses (stmt);
1164 cleanup_build_arrays ();
1168 /* Start the process of building up operands vectors in INFO. */
1170 static inline void
1171 start_ssa_stmt_operands (void)
1173 gcc_assert (VEC_length (tree, build_defs) == 0);
1174 gcc_assert (VEC_length (tree, build_uses) == 0);
1175 gcc_assert (VEC_length (tree, build_vuses) == 0);
1176 gcc_assert (VEC_length (tree, build_vdefs) == 0);
1177 gcc_assert (bitmap_empty_p (build_loads));
1178 gcc_assert (bitmap_empty_p (build_stores));
1182 /* Add DEF_P to the list of pointers to operands. */
1184 static inline void
1185 append_def (tree *def_p)
1187 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
1191 /* Add USE_P to the list of pointers to operands. */
1193 static inline void
1194 append_use (tree *use_p)
1196 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
1200 /* Add VAR to the set of variables that require a VDEF operator. */
1202 static inline void
1203 append_vdef (tree var)
1205 tree sym;
1207 if (TREE_CODE (var) != SSA_NAME)
1209 tree mpt;
1210 var_ann_t ann;
1212 /* If VAR belongs to a memory partition, use it instead of VAR. */
1213 mpt = memory_partition (var);
1214 if (mpt)
1215 var = mpt;
1217 /* Don't allow duplicate entries. */
1218 ann = get_var_ann (var);
1219 if (ann->in_vdef_list)
1220 return;
1222 ann->in_vdef_list = true;
1223 sym = var;
1225 else
1226 sym = SSA_NAME_VAR (var);
1228 VEC_safe_push (tree, heap, build_vdefs, var);
1229 bitmap_set_bit (build_stores, DECL_UID (sym));
1233 /* Add VAR to the set of variables that require a VUSE operator. */
1235 static inline void
1236 append_vuse (tree var)
1238 tree sym;
1240 if (TREE_CODE (var) != SSA_NAME)
1242 tree mpt;
1243 var_ann_t ann;
1245 /* If VAR belongs to a memory partition, use it instead of VAR. */
1246 mpt = memory_partition (var);
1247 if (mpt)
1248 var = mpt;
1250 /* Don't allow duplicate entries. */
1251 ann = get_var_ann (var);
1252 if (ann->in_vuse_list || ann->in_vdef_list)
1253 return;
1255 ann->in_vuse_list = true;
1256 sym = var;
1258 else
1259 sym = SSA_NAME_VAR (var);
1261 VEC_safe_push (tree, heap, build_vuses, var);
1262 bitmap_set_bit (build_loads, DECL_UID (sym));
1266 /* REF is a tree that contains the entire pointer dereference
1267 expression, if available, or NULL otherwise. ALIAS is the variable
1268 we are asking if REF can access. OFFSET and SIZE come from the
1269 memory access expression that generated this virtual operand. */
1271 static bool
1272 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1273 HOST_WIDE_INT size)
1275 bool offsetgtz = offset > 0;
1276 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1277 tree base = ref ? get_base_address (ref) : NULL;
1279 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1280 using a call-clobbered memory tag. By definition, call-clobbered
1281 memory tags can always touch .GLOBAL_VAR. */
1282 if (alias == gimple_global_var (cfun))
1283 return true;
1285 /* If ALIAS is an SFT, it can't be touched if the offset
1286 and size of the access is not overlapping with the SFT offset and
1287 size. This is only true if we are accessing through a pointer
1288 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1289 be accessing through a pointer to some substruct of the
1290 structure, and if we try to prune there, we will have the wrong
1291 offset, and get the wrong answer.
1292 i.e., we can't prune without more work if we have something like
1294 struct gcc_target
1296 struct asm_out
1298 const char *byte_op;
1299 struct asm_int_op
1301 const char *hi;
1302 } aligned_op;
1303 } asm_out;
1304 } targetm;
1306 foo = &targetm.asm_out.aligned_op;
1307 return foo->hi;
1309 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1310 terms of SFT_PARENT_VAR, that is where it is.
1311 However, the access through the foo pointer will be at offset 0. */
1312 if (size != -1
1313 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1314 && base
1315 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1316 && !overlap_subvar (offset, size, alias, NULL))
1318 #ifdef ACCESS_DEBUGGING
1319 fprintf (stderr, "Access to ");
1320 print_generic_expr (stderr, ref, 0);
1321 fprintf (stderr, " may not touch ");
1322 print_generic_expr (stderr, alias, 0);
1323 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1324 #endif
1325 return false;
1328 /* Without strict aliasing, it is impossible for a component access
1329 through a pointer to touch a random variable, unless that
1330 variable *is* a structure or a pointer.
1332 That is, given p->c, and some random global variable b,
1333 there is no legal way that p->c could be an access to b.
1335 Without strict aliasing on, we consider it legal to do something
1336 like:
1338 struct foos { int l; };
1339 int foo;
1340 static struct foos *getfoo(void);
1341 int main (void)
1343 struct foos *f = getfoo();
1344 f->l = 1;
1345 foo = 2;
1346 if (f->l == 1)
1347 abort();
1348 exit(0);
1350 static struct foos *getfoo(void)
1351 { return (struct foos *)&foo; }
1353 (taken from 20000623-1.c)
1355 The docs also say/imply that access through union pointers
1356 is legal (but *not* if you take the address of the union member,
1357 i.e. the inverse), such that you can do
1359 typedef union {
1360 int d;
1361 } U;
1363 int rv;
1364 void breakme()
1366 U *rv0;
1367 U *pretmp = (U*)&rv;
1368 rv0 = pretmp;
1369 rv0->d = 42;
1371 To implement this, we just punt on accesses through union
1372 pointers entirely.
1374 else if (ref
1375 && flag_strict_aliasing
1376 && TREE_CODE (ref) != INDIRECT_REF
1377 && !MTAG_P (alias)
1378 && (TREE_CODE (base) != INDIRECT_REF
1379 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1380 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1381 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1382 && !var_ann (alias)->is_heapvar
1383 /* When the struct has may_alias attached to it, we need not to
1384 return true. */
1385 && get_alias_set (base))
1387 #ifdef ACCESS_DEBUGGING
1388 fprintf (stderr, "Access to ");
1389 print_generic_expr (stderr, ref, 0);
1390 fprintf (stderr, " may not touch ");
1391 print_generic_expr (stderr, alias, 0);
1392 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1393 #endif
1394 return false;
1397 /* If the offset of the access is greater than the size of one of
1398 the possible aliases, it can't be touching that alias, because it
1399 would be past the end of the structure. */
1400 else if (ref
1401 && flag_strict_aliasing
1402 && TREE_CODE (ref) != INDIRECT_REF
1403 && !MTAG_P (alias)
1404 && !POINTER_TYPE_P (TREE_TYPE (alias))
1405 && offsetgtz
1406 && DECL_SIZE (alias)
1407 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1408 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1410 #ifdef ACCESS_DEBUGGING
1411 fprintf (stderr, "Access to ");
1412 print_generic_expr (stderr, ref, 0);
1413 fprintf (stderr, " may not touch ");
1414 print_generic_expr (stderr, alias, 0);
1415 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1416 #endif
1417 return false;
1420 return true;
1424 /* Add VAR to the virtual operands array. FLAGS is as in
1425 get_expr_operands. FULL_REF is a tree that contains the entire
1426 pointer dereference expression, if available, or NULL otherwise.
1427 OFFSET and SIZE come from the memory access expression that
1428 generated this virtual operand. FOR_CLOBBER is true is this is
1429 adding a virtual operand for a call clobber. */
1431 static void
1432 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1433 tree full_ref, HOST_WIDE_INT offset,
1434 HOST_WIDE_INT size, bool for_clobber)
1436 VEC(tree,gc) *aliases;
1437 tree sym;
1438 var_ann_t v_ann;
1440 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1441 v_ann = var_ann (sym);
1443 /* Mark the statement as having memory operands. */
1444 s_ann->references_memory = true;
1446 /* Mark statements with volatile operands. Optimizers should back
1447 off from statements having volatile operands. */
1448 if (TREE_THIS_VOLATILE (sym) && s_ann)
1449 s_ann->has_volatile_ops = true;
1451 /* If the variable cannot be modified and this is a VDEF change
1452 it into a VUSE. This happens when read-only variables are marked
1453 call-clobbered and/or aliased to writable variables. So we only
1454 check that this only happens on non-specific stores.
1456 Note that if this is a specific store, i.e. associated with a
1457 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
1458 into validation problems.
1460 This can happen when programs cast away const, leaving us with a
1461 store to read-only memory. If the statement is actually executed
1462 at runtime, then the program is ill formed. If the statement is
1463 not executed then all is well. At the very least, we cannot ICE. */
1464 if ((flags & opf_implicit) && unmodifiable_var_p (var))
1465 flags &= ~opf_def;
1467 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1468 virtual operands, unless the caller has specifically requested
1469 not to add virtual operands (used when adding operands inside an
1470 ADDR_EXPR expression). */
1471 if (flags & opf_no_vops)
1472 return;
1474 aliases = v_ann->may_aliases;
1475 if (aliases == NULL)
1477 /* The variable is not aliased or it is an alias tag. */
1478 if (flags & opf_def)
1479 append_vdef (var);
1480 else
1481 append_vuse (var);
1483 else
1485 unsigned i;
1486 tree al;
1488 /* The variable is aliased. Add its aliases to the virtual
1489 operands. */
1490 gcc_assert (VEC_length (tree, aliases) != 0);
1492 if (flags & opf_def)
1494 bool none_added = true;
1496 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1498 if (!access_can_touch_variable (full_ref, al, offset, size))
1499 continue;
1501 none_added = false;
1502 append_vdef (al);
1505 /* If the variable is also an alias tag, add a virtual
1506 operand for it, otherwise we will miss representing
1507 references to the members of the variable's alias set.
1508 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1510 It is also necessary to add bare defs on clobbers for
1511 SMT's, so that bare SMT uses caused by pruning all the
1512 aliases will link up properly with calls. In order to
1513 keep the number of these bare defs we add down to the
1514 minimum necessary, we keep track of which SMT's were used
1515 alone in statement vdefs or VUSEs. */
1516 if (v_ann->is_aliased
1517 || none_added
1518 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1519 && for_clobber))
1521 append_vdef (var);
1524 else
1526 bool none_added = true;
1527 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1529 if (!access_can_touch_variable (full_ref, al, offset, size))
1530 continue;
1531 none_added = false;
1532 append_vuse (al);
1535 /* Similarly, append a virtual uses for VAR itself, when
1536 it is an alias tag. */
1537 if (v_ann->is_aliased || none_added)
1538 append_vuse (var);
1544 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1545 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1546 the statement's real operands, otherwise it is added to virtual
1547 operands. */
1549 static void
1550 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1552 tree var, sym;
1553 var_ann_t v_ann;
1555 gcc_assert (SSA_VAR_P (*var_p) && s_ann);
1557 var = *var_p;
1558 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1559 v_ann = var_ann (sym);
1561 /* Mark statements with volatile operands. */
1562 if (TREE_THIS_VOLATILE (sym))
1563 s_ann->has_volatile_ops = true;
1565 if (is_gimple_reg (sym))
1567 /* The variable is a GIMPLE register. Add it to real operands. */
1568 if (flags & opf_def)
1569 append_def (var_p);
1570 else
1571 append_use (var_p);
1573 else
1574 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1578 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1579 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1581 STMT is the statement being processed, EXPR is the INDIRECT_REF
1582 that got us here.
1584 FLAGS is as in get_expr_operands.
1586 FULL_REF contains the full pointer dereference expression, if we
1587 have it, or NULL otherwise.
1589 OFFSET and SIZE are the location of the access inside the
1590 dereferenced pointer, if known.
1592 RECURSE_ON_BASE should be set to true if we want to continue
1593 calling get_expr_operands on the base pointer, and false if
1594 something else will do it for us. */
1596 static void
1597 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1598 tree full_ref,
1599 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1600 bool recurse_on_base)
1602 tree *pptr = &TREE_OPERAND (expr, 0);
1603 tree ptr = *pptr;
1604 stmt_ann_t s_ann = stmt_ann (stmt);
1606 s_ann->references_memory = true;
1608 if (SSA_VAR_P (ptr))
1610 struct ptr_info_def *pi = NULL;
1612 /* If PTR has flow-sensitive points-to information, use it. */
1613 if (TREE_CODE (ptr) == SSA_NAME
1614 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1615 && pi->name_mem_tag)
1617 /* PTR has its own memory tag. Use it. */
1618 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1619 full_ref, offset, size, false);
1621 else
1623 /* If PTR is not an SSA_NAME or it doesn't have a name
1624 tag, use its symbol memory tag. */
1625 var_ann_t v_ann;
1627 /* If we are emitting debugging dumps, display a warning if
1628 PTR is an SSA_NAME with no flow-sensitive alias
1629 information. That means that we may need to compute
1630 aliasing again. */
1631 if (dump_file
1632 && TREE_CODE (ptr) == SSA_NAME
1633 && pi == NULL)
1635 fprintf (dump_file,
1636 "NOTE: no flow-sensitive alias info for ");
1637 print_generic_expr (dump_file, ptr, dump_flags);
1638 fprintf (dump_file, " in ");
1639 print_generic_stmt (dump_file, stmt, dump_flags);
1642 if (TREE_CODE (ptr) == SSA_NAME)
1643 ptr = SSA_NAME_VAR (ptr);
1644 v_ann = var_ann (ptr);
1646 if (v_ann->symbol_mem_tag)
1647 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1648 full_ref, offset, size, false);
1651 else if (TREE_CODE (ptr) == INTEGER_CST)
1653 /* If a constant is used as a pointer, we can't generate a real
1654 operand for it but we mark the statement volatile to prevent
1655 optimizations from messing things up. */
1656 if (s_ann)
1657 s_ann->has_volatile_ops = true;
1658 return;
1660 else
1662 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1663 gcc_unreachable ();
1666 /* If requested, add a USE operand for the base pointer. */
1667 if (recurse_on_base)
1668 get_expr_operands (stmt, pptr, opf_use);
1672 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1674 static void
1675 get_tmr_operands (tree stmt, tree expr, int flags)
1677 tree tag, ref;
1678 HOST_WIDE_INT offset, size, maxsize;
1679 subvar_t svars, sv;
1680 stmt_ann_t s_ann = stmt_ann (stmt);
1682 /* This statement references memory. */
1683 s_ann->references_memory = 1;
1685 /* First record the real operands. */
1686 get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
1687 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
1689 if (TMR_SYMBOL (expr))
1690 add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
1692 tag = TMR_TAG (expr);
1693 if (!tag)
1695 /* Something weird, so ensure that we will be careful. */
1696 s_ann->has_volatile_ops = true;
1697 return;
1700 if (DECL_P (tag))
1702 get_expr_operands (stmt, &tag, flags);
1703 return;
1706 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1707 gcc_assert (ref != NULL_TREE);
1708 svars = get_subvars_for_var (ref);
1709 for (sv = svars; sv; sv = sv->next)
1711 bool exact;
1713 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1714 add_stmt_operand (&sv->var, s_ann, flags);
1719 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1720 clobbered variables in the function. */
1722 static void
1723 add_call_clobber_ops (tree stmt, tree callee)
1725 unsigned u;
1726 bitmap_iterator bi;
1727 stmt_ann_t s_ann = stmt_ann (stmt);
1728 bitmap not_read_b, not_written_b;
1730 /* Functions that are not const, pure or never return may clobber
1731 call-clobbered variables. */
1732 if (s_ann)
1733 s_ann->makes_clobbering_call = true;
1735 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1736 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1737 if (gimple_global_var (cfun))
1739 tree var = gimple_global_var (cfun);
1740 add_stmt_operand (&var, s_ann, opf_def);
1741 return;
1744 /* Get info for local and module level statics. There is a bit
1745 set for each static if the call being processed does not read
1746 or write that variable. */
1747 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1748 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1750 /* Add a VDEF operand for every call clobbered variable. */
1751 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1753 tree var = referenced_var_lookup (u);
1754 unsigned int escape_mask = var_ann (var)->escape_mask;
1755 tree real_var = var;
1756 bool not_read;
1757 bool not_written;
1759 /* Not read and not written are computed on regular vars, not
1760 subvars, so look at the parent var if this is an SFT. */
1761 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1762 real_var = SFT_PARENT_VAR (var);
1764 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1765 DECL_UID (real_var)) : false;
1766 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1767 DECL_UID (real_var)) : false;
1768 gcc_assert (!unmodifiable_var_p (var));
1770 clobber_stats.clobbered_vars++;
1772 /* See if this variable is really clobbered by this function. */
1774 /* Trivial case: Things escaping only to pure/const are not
1775 clobbered by non-pure-const, and only read by pure/const. */
1776 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1778 tree call = get_call_expr_in (stmt);
1779 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1781 add_stmt_operand (&var, s_ann, opf_use);
1782 clobber_stats.unescapable_clobbers_avoided++;
1783 continue;
1785 else
1787 clobber_stats.unescapable_clobbers_avoided++;
1788 continue;
1792 if (not_written)
1794 clobber_stats.static_write_clobbers_avoided++;
1795 if (!not_read)
1796 add_stmt_operand (&var, s_ann, opf_use);
1797 else
1798 clobber_stats.static_read_clobbers_avoided++;
1800 else
1801 add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
1806 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1807 function. */
1809 static void
1810 add_call_read_ops (tree stmt, tree callee)
1812 unsigned u;
1813 bitmap_iterator bi;
1814 stmt_ann_t s_ann = stmt_ann (stmt);
1815 bitmap not_read_b;
1817 /* if the function is not pure, it may reference memory. Add
1818 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1819 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1820 if (gimple_global_var (cfun))
1822 tree var = gimple_global_var (cfun);
1823 add_stmt_operand (&var, s_ann, opf_use);
1824 return;
1827 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1829 /* Add a VUSE for each call-clobbered variable. */
1830 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1832 tree var = referenced_var (u);
1833 tree real_var = var;
1834 bool not_read;
1836 clobber_stats.readonly_clobbers++;
1838 /* Not read and not written are computed on regular vars, not
1839 subvars, so look at the parent var if this is an SFT. */
1841 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1842 real_var = SFT_PARENT_VAR (var);
1844 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1845 : false;
1847 if (not_read)
1849 clobber_stats.static_readonly_clobbers_avoided++;
1850 continue;
1853 add_stmt_operand (&var, s_ann, opf_use | opf_implicit);
1858 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1860 static void
1861 get_call_expr_operands (tree stmt, tree expr)
1863 tree op;
1864 int call_flags = call_expr_flags (expr);
1865 stmt_ann_t ann = stmt_ann (stmt);
1867 ann->references_memory = true;
1869 /* If aliases have been computed already, add VDEF or VUSE
1870 operands for all the symbols that have been found to be
1871 call-clobbered. */
1872 if (gimple_aliases_computed_p (cfun)
1873 && !(call_flags & ECF_NOVOPS))
1875 /* A 'pure' or a 'const' function never call-clobbers anything.
1876 A 'noreturn' function might, but since we don't return anyway
1877 there is no point in recording that. */
1878 if (TREE_SIDE_EFFECTS (expr)
1879 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1880 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1881 else if (!(call_flags & ECF_CONST))
1882 add_call_read_ops (stmt, get_callee_fndecl (expr));
1885 /* Find uses in the called function. */
1886 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
1888 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1889 get_expr_operands (stmt, &TREE_VALUE (op), opf_use);
1891 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
1895 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1897 static void
1898 get_asm_expr_operands (tree stmt)
1900 stmt_ann_t s_ann;
1901 int i, noutputs;
1902 const char **oconstraints;
1903 const char *constraint;
1904 bool allows_mem, allows_reg, is_inout;
1905 tree link;
1907 s_ann = stmt_ann (stmt);
1908 noutputs = list_length (ASM_OUTPUTS (stmt));
1909 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1911 /* Gather all output operands. */
1912 for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
1914 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1915 oconstraints[i] = constraint;
1916 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1917 &allows_reg, &is_inout);
1919 /* This should have been split in gimplify_asm_expr. */
1920 gcc_assert (!allows_reg || !is_inout);
1922 /* Memory operands are addressable. Note that STMT needs the
1923 address of this operand. */
1924 if (!allows_reg && allows_mem)
1926 tree t = get_base_address (TREE_VALUE (link));
1927 if (t && DECL_P (t) && s_ann)
1928 add_to_addressable_set (t, &s_ann->addresses_taken);
1931 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
1934 /* Gather all input operands. */
1935 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1937 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1938 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
1939 &allows_mem, &allows_reg);
1941 /* Memory operands are addressable. Note that STMT needs the
1942 address of this operand. */
1943 if (!allows_reg && allows_mem)
1945 tree t = get_base_address (TREE_VALUE (link));
1946 if (t && DECL_P (t) && s_ann)
1947 add_to_addressable_set (t, &s_ann->addresses_taken);
1950 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1953 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1954 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1955 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1957 unsigned i;
1958 bitmap_iterator bi;
1960 s_ann->references_memory = true;
1962 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1964 tree var = referenced_var (i);
1965 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
1968 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1970 tree var = referenced_var (i);
1972 /* Subvars are explicitly represented in this list, so we
1973 don't need the original to be added to the clobber ops,
1974 but the original *will* be in this list because we keep
1975 the addressability of the original variable up-to-date
1976 to avoid confusing the back-end. */
1977 if (var_can_have_subvars (var)
1978 && get_subvars_for_var (var) != NULL)
1979 continue;
1981 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
1983 break;
1988 /* Scan operands for the assignment expression EXPR in statement STMT. */
1990 static void
1991 get_modify_stmt_operands (tree stmt, tree expr)
1993 /* First get operands from the RHS. */
1994 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use);
1996 /* For the LHS, use a regular definition (opf_def) for GIMPLE
1997 registers. If the LHS is a store to memory, we will need
1998 a preserving definition (VDEF).
2000 Preserving definitions are those that modify a part of an
2001 aggregate object for which no subvars have been computed (or the
2002 reference does not correspond exactly to one of them). Stores
2003 through a pointer are also represented with VDEF operators.
2005 We used to distinguish between preserving and killing definitions.
2006 We always emit preserving definitions now. */
2007 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def);
2011 /* Recursively scan the expression pointed to by EXPR_P in statement
2012 STMT. FLAGS is one of the OPF_* constants modifying how to
2013 interpret the operands found. */
2015 static void
2016 get_expr_operands (tree stmt, tree *expr_p, int flags)
2018 enum tree_code code;
2019 enum tree_code_class class;
2020 tree expr = *expr_p;
2021 stmt_ann_t s_ann = stmt_ann (stmt);
2023 if (expr == NULL)
2024 return;
2026 code = TREE_CODE (expr);
2027 class = TREE_CODE_CLASS (code);
2029 switch (code)
2031 case ADDR_EXPR:
2032 /* Taking the address of a variable does not represent a
2033 reference to it, but the fact that the statement takes its
2034 address will be of interest to some passes (e.g. alias
2035 resolution). */
2036 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
2038 /* If the address is invariant, there may be no interesting
2039 variable references inside. */
2040 if (is_gimple_min_invariant (expr))
2041 return;
2043 /* Otherwise, there may be variables referenced inside but there
2044 should be no VUSEs created, since the referenced objects are
2045 not really accessed. The only operands that we should find
2046 here are ARRAY_REF indices which will always be real operands
2047 (GIMPLE does not allow non-registers as array indices). */
2048 flags |= opf_no_vops;
2049 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2050 return;
2052 case SSA_NAME:
2053 case STRUCT_FIELD_TAG:
2054 case SYMBOL_MEMORY_TAG:
2055 case NAME_MEMORY_TAG:
2056 add_stmt_operand (expr_p, s_ann, flags);
2057 return;
2059 case VAR_DECL:
2060 case PARM_DECL:
2061 case RESULT_DECL:
2063 subvar_t svars;
2065 /* Add the subvars for a variable, if it has subvars, to DEFS
2066 or USES. Otherwise, add the variable itself. Whether it
2067 goes to USES or DEFS depends on the operand flags. */
2068 if (var_can_have_subvars (expr)
2069 && (svars = get_subvars_for_var (expr)))
2071 subvar_t sv;
2072 for (sv = svars; sv; sv = sv->next)
2073 add_stmt_operand (&sv->var, s_ann, flags);
2075 else
2076 add_stmt_operand (expr_p, s_ann, flags);
2078 return;
2081 case MISALIGNED_INDIRECT_REF:
2082 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2083 /* fall through */
2085 case ALIGN_INDIRECT_REF:
2086 case INDIRECT_REF:
2087 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
2088 return;
2090 case TARGET_MEM_REF:
2091 get_tmr_operands (stmt, expr, flags);
2092 return;
2094 case ARRAY_REF:
2095 case ARRAY_RANGE_REF:
2096 case COMPONENT_REF:
2097 case REALPART_EXPR:
2098 case IMAGPART_EXPR:
2100 tree ref;
2101 HOST_WIDE_INT offset, size, maxsize;
2102 bool none = true;
2104 /* This component reference becomes an access to all of the
2105 subvariables it can touch, if we can determine that, but
2106 *NOT* the real one. If we can't determine which fields we
2107 could touch, the recursion will eventually get to a
2108 variable and add *all* of its subvars, or whatever is the
2109 minimum correct subset. */
2110 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2111 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
2113 subvar_t sv;
2114 subvar_t svars = get_subvars_for_var (ref);
2116 for (sv = svars; sv; sv = sv->next)
2118 bool exact;
2120 if (overlap_subvar (offset, maxsize, sv->var, &exact))
2122 int subvar_flags = flags;
2123 none = false;
2124 add_stmt_operand (&sv->var, s_ann, subvar_flags);
2128 if (!none)
2129 flags |= opf_no_vops;
2131 else if (TREE_CODE (ref) == INDIRECT_REF)
2133 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
2134 maxsize, false);
2135 flags |= opf_no_vops;
2138 /* Even if we found subvars above we need to ensure to see
2139 immediate uses for d in s.a[d]. In case of s.a having
2140 a subvar or we would miss it otherwise. */
2141 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2143 if (code == COMPONENT_REF)
2145 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
2146 s_ann->has_volatile_ops = true;
2147 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2149 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
2151 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2152 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2153 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
2156 return;
2159 case WITH_SIZE_EXPR:
2160 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2161 and an rvalue reference to its second argument. */
2162 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2163 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2164 return;
2166 case CALL_EXPR:
2167 get_call_expr_operands (stmt, expr);
2168 return;
2170 case COND_EXPR:
2171 case VEC_COND_EXPR:
2172 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
2173 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2174 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2175 return;
2177 case GIMPLE_MODIFY_STMT:
2178 get_modify_stmt_operands (stmt, expr);
2179 return;
2181 case CONSTRUCTOR:
2183 /* General aggregate CONSTRUCTORs have been decomposed, but they
2184 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2185 constructor_elt *ce;
2186 unsigned HOST_WIDE_INT idx;
2188 for (idx = 0;
2189 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2190 idx++)
2191 get_expr_operands (stmt, &ce->value, opf_use);
2193 return;
2196 case BIT_FIELD_REF:
2197 case TRUTH_NOT_EXPR:
2198 case VIEW_CONVERT_EXPR:
2199 do_unary:
2200 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2201 return;
2203 case TRUTH_AND_EXPR:
2204 case TRUTH_OR_EXPR:
2205 case TRUTH_XOR_EXPR:
2206 case COMPOUND_EXPR:
2207 case OBJ_TYPE_REF:
2208 case ASSERT_EXPR:
2209 do_binary:
2211 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2212 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2213 return;
2216 case DOT_PROD_EXPR:
2217 case REALIGN_LOAD_EXPR:
2219 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2220 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2221 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2222 return;
2225 case BLOCK:
2226 case FUNCTION_DECL:
2227 case EXC_PTR_EXPR:
2228 case FILTER_EXPR:
2229 case LABEL_DECL:
2230 case CONST_DECL:
2231 case OMP_PARALLEL:
2232 case OMP_SECTIONS:
2233 case OMP_FOR:
2234 case OMP_SINGLE:
2235 case OMP_MASTER:
2236 case OMP_ORDERED:
2237 case OMP_CRITICAL:
2238 case OMP_RETURN:
2239 case OMP_CONTINUE:
2240 /* Expressions that make no memory references. */
2241 return;
2243 default:
2244 if (class == tcc_unary)
2245 goto do_unary;
2246 if (class == tcc_binary || class == tcc_comparison)
2247 goto do_binary;
2248 if (class == tcc_constant || class == tcc_type)
2249 return;
2252 /* If we get here, something has gone wrong. */
2253 #ifdef ENABLE_CHECKING
2254 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2255 debug_tree (expr);
2256 fputs ("\n", stderr);
2257 #endif
2258 gcc_unreachable ();
2262 /* Parse STMT looking for operands. When finished, the various
2263 build_* operand vectors will have potential operands in them. */
2265 static void
2266 parse_ssa_operands (tree stmt)
2268 enum tree_code code;
2270 code = TREE_CODE (stmt);
2271 switch (code)
2273 case GIMPLE_MODIFY_STMT:
2274 get_modify_stmt_operands (stmt, stmt);
2275 break;
2277 case COND_EXPR:
2278 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
2279 break;
2281 case SWITCH_EXPR:
2282 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
2283 break;
2285 case ASM_EXPR:
2286 get_asm_expr_operands (stmt);
2287 break;
2289 case RETURN_EXPR:
2290 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
2291 break;
2293 case GOTO_EXPR:
2294 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
2295 break;
2297 case LABEL_EXPR:
2298 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
2299 break;
2301 case BIND_EXPR:
2302 case CASE_LABEL_EXPR:
2303 case TRY_CATCH_EXPR:
2304 case TRY_FINALLY_EXPR:
2305 case EH_FILTER_EXPR:
2306 case CATCH_EXPR:
2307 case RESX_EXPR:
2308 /* These nodes contain no variable references. */
2309 break;
2311 default:
2312 /* Notice that if get_expr_operands tries to use &STMT as the
2313 operand pointer (which may only happen for USE operands), we
2314 will fail in add_stmt_operand. This default will handle
2315 statements like empty statements, or CALL_EXPRs that may
2316 appear on the RHS of a statement or as statements themselves. */
2317 get_expr_operands (stmt, &stmt, opf_use);
2318 break;
2323 /* Create an operands cache for STMT. */
2325 static void
2326 build_ssa_operands (tree stmt)
2328 stmt_ann_t ann = get_stmt_ann (stmt);
2330 /* Initially assume that the statement has no volatile operands and
2331 makes no memory references. */
2332 ann->has_volatile_ops = false;
2333 ann->references_memory = false;
2335 start_ssa_stmt_operands ();
2336 parse_ssa_operands (stmt);
2337 operand_build_sort_virtual (build_vuses);
2338 operand_build_sort_virtual (build_vdefs);
2339 finalize_ssa_stmt_operands (stmt);
2341 /* For added safety, assume that statements with volatile operands
2342 also reference memory. */
2343 if (ann->has_volatile_ops)
2344 ann->references_memory = true;
2348 /* Free any operands vectors in OPS. */
2350 void
2351 free_ssa_operands (stmt_operands_p ops)
2353 ops->def_ops = NULL;
2354 ops->use_ops = NULL;
2355 ops->vdef_ops = NULL;
2356 ops->vuse_ops = NULL;
2357 BITMAP_FREE (ops->loads);
2358 BITMAP_FREE (ops->stores);
2362 /* Get the operands of statement STMT. */
2364 void
2365 update_stmt_operands (tree stmt)
2367 stmt_ann_t ann = get_stmt_ann (stmt);
2369 /* If update_stmt_operands is called before SSA is initialized, do
2370 nothing. */
2371 if (!ssa_operands_active ())
2372 return;
2374 /* The optimizers cannot handle statements that are nothing but a
2375 _DECL. This indicates a bug in the gimplifier. */
2376 gcc_assert (!SSA_VAR_P (stmt));
2378 timevar_push (TV_TREE_OPS);
2380 gcc_assert (ann->modified);
2381 build_ssa_operands (stmt);
2382 ann->modified = 0;
2384 timevar_pop (TV_TREE_OPS);
2388 /* Copies virtual operands from SRC to DST. */
2390 void
2391 copy_virtual_operands (tree dest, tree src)
2393 int i, n;
2394 vuse_optype_p src_vuses, dest_vuses;
2395 vdef_optype_p src_vdefs, dest_vdefs;
2396 struct vuse_optype_d vuse;
2397 struct vdef_optype_d vdef;
2398 stmt_ann_t dest_ann;
2400 VDEF_OPS (dest) = NULL;
2401 VUSE_OPS (dest) = NULL;
2403 dest_ann = get_stmt_ann (dest);
2404 BITMAP_FREE (dest_ann->operands.loads);
2405 BITMAP_FREE (dest_ann->operands.stores);
2407 if (LOADED_SYMS (src))
2409 dest_ann->operands.loads = BITMAP_ALLOC (NULL);
2410 bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src));
2413 if (STORED_SYMS (src))
2415 dest_ann->operands.stores = BITMAP_ALLOC (NULL);
2416 bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src));
2419 /* Copy all the VUSE operators and corresponding operands. */
2420 dest_vuses = &vuse;
2421 for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
2423 n = VUSE_NUM (src_vuses);
2424 dest_vuses = add_vuse_op (dest, NULL_TREE, n, &dest_vuses);
2425 dest_vuses->next = NULL;
2426 for (i = 0; i < n; i++)
2427 SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
2429 if (VUSE_OPS (dest) == NULL)
2430 VUSE_OPS (dest) = vuse.next;
2433 /* Copy all the VDEF operators and corresponding operands. */
2434 dest_vdefs = &vdef;
2435 for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
2437 n = VUSE_NUM (src_vdefs);
2438 dest_vdefs = add_vdef_op (dest, NULL_TREE, n, &dest_vdefs);
2439 dest_vdefs->next = NULL;
2440 VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
2441 for (i = 0; i < n; i++)
2442 SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
2444 if (VDEF_OPS (dest) == NULL)
2445 VDEF_OPS (dest) = vdef.next;
2450 /* Specifically for use in DOM's expression analysis. Given a store, we
2451 create an artificial stmt which looks like a load from the store, this can
2452 be used to eliminate redundant loads. OLD_OPS are the operands from the
2453 store stmt, and NEW_STMT is the new load which represents a load of the
2454 values stored. */
2456 void
2457 create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
2459 tree op;
2460 ssa_op_iter iter;
2461 use_operand_p use_p;
2462 unsigned i;
2464 get_stmt_ann (new_stmt);
2466 /* Process NEW_STMT looking for operands. */
2467 start_ssa_stmt_operands ();
2468 parse_ssa_operands (new_stmt);
2470 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2471 if (TREE_CODE (op) != SSA_NAME)
2472 var_ann (op)->in_vuse_list = false;
2474 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2475 if (TREE_CODE (op) != SSA_NAME)
2476 var_ann (op)->in_vdef_list = false;
2478 /* Remove any virtual operands that were found. */
2479 VEC_truncate (tree, build_vdefs, 0);
2480 VEC_truncate (tree, build_vuses, 0);
2482 /* For each VDEF on the original statement, we want to create a
2483 VUSE of the VDEF result operand on the new statement. */
2484 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
2485 append_vuse (op);
2487 finalize_ssa_stmt_operands (new_stmt);
2489 /* All uses in this fake stmt must not be in the immediate use lists. */
2490 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2491 delink_imm_use (use_p);
2495 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2496 to test the validity of the swap operation. */
2498 void
2499 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2501 tree op0, op1;
2502 op0 = *exp0;
2503 op1 = *exp1;
2505 /* If the operand cache is active, attempt to preserve the relative
2506 positions of these two operands in their respective immediate use
2507 lists. */
2508 if (ssa_operands_active () && op0 != op1)
2510 use_optype_p use0, use1, ptr;
2511 use0 = use1 = NULL;
2513 /* Find the 2 operands in the cache, if they are there. */
2514 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2515 if (USE_OP_PTR (ptr)->use == exp0)
2517 use0 = ptr;
2518 break;
2521 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2522 if (USE_OP_PTR (ptr)->use == exp1)
2524 use1 = ptr;
2525 break;
2528 /* If both uses don't have operand entries, there isn't much we can do
2529 at this point. Presumably we don't need to worry about it. */
2530 if (use0 && use1)
2532 tree *tmp = USE_OP_PTR (use1)->use;
2533 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2534 USE_OP_PTR (use0)->use = tmp;
2538 /* Now swap the data. */
2539 *exp0 = op1;
2540 *exp1 = op0;
2544 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2545 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2546 a single variable whose address has been taken or any other valid
2547 GIMPLE memory reference (structure reference, array, etc). If the
2548 base address of REF is a decl that has sub-variables, also add all
2549 of its sub-variables. */
2551 void
2552 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2554 tree var;
2555 subvar_t svars;
2557 gcc_assert (addresses_taken);
2559 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2560 as the only thing we take the address of. If VAR is a structure,
2561 taking the address of a field means that the whole structure may
2562 be referenced using pointer arithmetic. See PR 21407 and the
2563 ensuing mailing list discussion. */
2564 var = get_base_address (ref);
2565 if (var && SSA_VAR_P (var))
2567 if (*addresses_taken == NULL)
2568 *addresses_taken = BITMAP_GGC_ALLOC ();
2570 if (var_can_have_subvars (var)
2571 && (svars = get_subvars_for_var (var)))
2573 subvar_t sv;
2574 for (sv = svars; sv; sv = sv->next)
2576 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2577 TREE_ADDRESSABLE (sv->var) = 1;
2580 else
2582 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2583 TREE_ADDRESSABLE (var) = 1;
2589 /* Scan the immediate_use list for VAR making sure its linked properly.
2590 Return TRUE if there is a problem and emit an error message to F. */
2592 bool
2593 verify_imm_links (FILE *f, tree var)
2595 use_operand_p ptr, prev, list;
2596 int count;
2598 gcc_assert (TREE_CODE (var) == SSA_NAME);
2600 list = &(SSA_NAME_IMM_USE_NODE (var));
2601 gcc_assert (list->use == NULL);
2603 if (list->prev == NULL)
2605 gcc_assert (list->next == NULL);
2606 return false;
2609 prev = list;
2610 count = 0;
2611 for (ptr = list->next; ptr != list; )
2613 if (prev != ptr->prev)
2614 goto error;
2616 if (ptr->use == NULL)
2617 goto error; /* 2 roots, or SAFE guard node. */
2618 else if (*(ptr->use) != var)
2619 goto error;
2621 prev = ptr;
2622 ptr = ptr->next;
2624 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2625 problem. */
2626 if (count++ > 50000000)
2627 goto error;
2630 /* Verify list in the other direction. */
2631 prev = list;
2632 for (ptr = list->prev; ptr != list; )
2634 if (prev != ptr->next)
2635 goto error;
2636 prev = ptr;
2637 ptr = ptr->prev;
2638 if (count-- < 0)
2639 goto error;
2642 if (count != 0)
2643 goto error;
2645 return false;
2647 error:
2648 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2650 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2651 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2653 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2654 (void *)ptr->use);
2655 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2656 fprintf(f, "\n");
2657 return true;
2661 /* Dump all the immediate uses to FILE. */
2663 void
2664 dump_immediate_uses_for (FILE *file, tree var)
2666 imm_use_iterator iter;
2667 use_operand_p use_p;
2669 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2671 print_generic_expr (file, var, TDF_SLIM);
2672 fprintf (file, " : -->");
2673 if (has_zero_uses (var))
2674 fprintf (file, " no uses.\n");
2675 else
2676 if (has_single_use (var))
2677 fprintf (file, " single use.\n");
2678 else
2679 fprintf (file, "%d uses.\n", num_imm_uses (var));
2681 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2683 if (use_p->stmt == NULL && use_p->use == NULL)
2684 fprintf (file, "***end of stmt iterator marker***\n");
2685 else
2686 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2687 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS);
2688 else
2689 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2691 fprintf(file, "\n");
2695 /* Dump all the immediate uses to FILE. */
2697 void
2698 dump_immediate_uses (FILE *file)
2700 tree var;
2701 unsigned int x;
2703 fprintf (file, "Immediate_uses: \n\n");
2704 for (x = 1; x < num_ssa_names; x++)
2706 var = ssa_name(x);
2707 if (!var)
2708 continue;
2709 dump_immediate_uses_for (file, var);
2714 /* Dump def-use edges on stderr. */
2716 void
2717 debug_immediate_uses (void)
2719 dump_immediate_uses (stderr);
2723 /* Dump def-use edges on stderr. */
2725 void
2726 debug_immediate_uses_for (tree var)
2728 dump_immediate_uses_for (stderr, var);
2732 /* Create a new change buffer for the statement pointed by STMT_P and
2733 push the buffer into SCB_STACK. Each change buffer
2734 records state information needed to determine what changed in the
2735 statement. Mainly, this keeps track of symbols that may need to be
2736 put into SSA form, SSA name replacements and other information
2737 needed to keep the SSA form up to date. */
2739 void
2740 push_stmt_changes (tree *stmt_p)
2742 tree stmt;
2743 scb_t buf;
2745 stmt = *stmt_p;
2747 /* It makes no sense to keep track of PHI nodes. */
2748 if (TREE_CODE (stmt) == PHI_NODE)
2749 return;
2751 buf = xmalloc (sizeof *buf);
2752 memset (buf, 0, sizeof *buf);
2754 buf->stmt_p = stmt_p;
2756 if (stmt_references_memory_p (stmt))
2758 tree op;
2759 ssa_op_iter i;
2761 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2763 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2764 if (buf->loads == NULL)
2765 buf->loads = BITMAP_ALLOC (NULL);
2766 bitmap_set_bit (buf->loads, DECL_UID (sym));
2769 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2771 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2772 if (buf->stores == NULL)
2773 buf->stores = BITMAP_ALLOC (NULL);
2774 bitmap_set_bit (buf->stores, DECL_UID (sym));
2778 VEC_safe_push (scb_t, heap, scb_stack, buf);
2782 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2783 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2785 static void
2786 mark_difference_for_renaming (bitmap s1, bitmap s2)
2788 if (s1 == NULL && s2 == NULL)
2789 return;
2791 if (s1 && s2 == NULL)
2792 mark_set_for_renaming (s1);
2793 else if (s1 == NULL && s2)
2794 mark_set_for_renaming (s2);
2795 else if (!bitmap_equal_p (s1, s2))
2797 bitmap t1 = BITMAP_ALLOC (NULL);
2798 bitmap t2 = BITMAP_ALLOC (NULL);
2800 bitmap_and_compl (t1, s1, s2);
2801 bitmap_and_compl (t2, s2, s1);
2802 bitmap_ior_into (t1, t2);
2803 mark_set_for_renaming (t1);
2805 BITMAP_FREE (t1);
2806 BITMAP_FREE (t2);
2811 /* Pop the top SCB from SCB_STACK and act on the differences between
2812 what was recorded by push_stmt_changes and the current state of
2813 the statement. */
2815 void
2816 pop_stmt_changes (tree *stmt_p)
2818 tree op, stmt;
2819 ssa_op_iter iter;
2820 bitmap loads, stores;
2821 scb_t buf;
2823 stmt = *stmt_p;
2825 /* It makes no sense to keep track of PHI nodes. */
2826 if (TREE_CODE (stmt) == PHI_NODE)
2827 return;
2829 buf = VEC_pop (scb_t, scb_stack);
2830 gcc_assert (stmt_p == buf->stmt_p);
2832 /* Force an operand re-scan on the statement and mark any newly
2833 exposed variables. */
2834 update_stmt (stmt);
2836 /* Determine whether any memory symbols need to be renamed. If the
2837 sets of loads and stores are different after the statement is
2838 modified, then the affected symbols need to be renamed.
2840 Note that it may be possible for the statement to not reference
2841 memory anymore, but we still need to act on the differences in
2842 the sets of symbols. */
2843 loads = stores = NULL;
2844 if (stmt_references_memory_p (stmt))
2846 tree op;
2847 ssa_op_iter i;
2849 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2851 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2852 if (loads == NULL)
2853 loads = BITMAP_ALLOC (NULL);
2854 bitmap_set_bit (loads, DECL_UID (sym));
2857 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2859 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2860 if (stores == NULL)
2861 stores = BITMAP_ALLOC (NULL);
2862 bitmap_set_bit (stores, DECL_UID (sym));
2866 /* If LOADS is different from BUF->LOADS, the affected
2867 symbols need to be marked for renaming. */
2868 mark_difference_for_renaming (loads, buf->loads);
2870 /* Similarly for STORES and BUF->STORES. */
2871 mark_difference_for_renaming (stores, buf->stores);
2873 /* Mark all the naked GIMPLE register operands for renaming. */
2874 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
2875 if (DECL_P (op))
2876 mark_sym_for_renaming (op);
2878 /* FIXME, need to add more finalizers here. Cleanup EH info,
2879 recompute invariants for address expressions, add
2880 SSA replacement mappings, etc. For instance, given
2881 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2882 the form:
2884 # SMT.4_20 = VDEF <SMT.4_16>
2885 D.1576_11 = 1.0e+0;
2887 So, the VDEF will disappear, but instead of marking SMT.4 for
2888 renaming it would be far more efficient to establish a
2889 replacement mapping that would replace every reference of
2890 SMT.4_20 with SMT.4_16. */
2892 /* Free memory used by the buffer. */
2893 BITMAP_FREE (buf->loads);
2894 BITMAP_FREE (buf->stores);
2895 BITMAP_FREE (loads);
2896 BITMAP_FREE (stores);
2897 buf->stmt_p = NULL;
2898 free (buf);
2902 /* Discard the topmost change buffer from SCB_STACK. This is useful
2903 when the caller realized that it did not actually modified the
2904 statement. It avoids the expensive operand re-scan. */
2906 void
2907 discard_stmt_changes (tree *stmt_p)
2909 scb_t buf;
2910 tree stmt;
2912 /* It makes no sense to keep track of PHI nodes. */
2913 stmt = *stmt_p;
2914 if (TREE_CODE (stmt) == PHI_NODE)
2915 return;
2917 buf = VEC_pop (scb_t, scb_stack);
2918 gcc_assert (stmt_p == buf->stmt_p);
2920 /* Free memory used by the buffer. */
2921 BITMAP_FREE (buf->loads);
2922 BITMAP_FREE (buf->stores);
2923 buf->stmt_p = NULL;
2924 free (buf);
2928 /* Returns true if statement STMT may access memory. */
2930 bool
2931 stmt_references_memory_p (tree stmt)
2933 if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE)
2934 return false;
2936 return stmt_ann (stmt)->references_memory;
2940 /* Return the memory partition tag (MPT) associated with memory
2941 symbol SYM. From a correctness standpoint, memory partitions can
2942 be assigned in any arbitrary fashion as long as this rule is
2943 observed: Given two memory partitions MPT.i and MPT.j, they must
2944 not contain symbols in common.
2946 Memory partitions are used when putting the program into Memory-SSA
2947 form. In particular, in Memory-SSA PHI nodes are not computed for
2948 individual memory symbols. They are computed for memory
2949 partitions. This reduces the amount of PHI nodes in the SSA graph
2950 at the expense of precision (i.e., it makes unrelated stores affect
2951 each other).
2953 However, it is possible to increase precision by changing this
2954 partitioning scheme. For instance, if the partitioning scheme is
2955 such that get_mpt_for is the identity function (that is,
2956 get_mpt_for (s) = s), this will result in ultimate precision at the
2957 expense of huge SSA webs.
2959 At the other extreme, a partitioning scheme that groups all the
2960 symbols in the same set results in minimal SSA webs and almost
2961 total loss of precision. */
2963 tree
2964 get_mpt_for (tree sym)
2966 tree mpt;
2968 /* Don't create a new tag unnecessarily. */
2969 mpt = memory_partition (sym);
2970 if (mpt == NULL_TREE)
2972 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
2973 TREE_ADDRESSABLE (mpt) = 0;
2974 MTAG_GLOBAL (mpt) = 1;
2975 add_referenced_var (mpt);
2976 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
2977 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (NULL);
2978 set_memory_partition (sym, mpt);
2981 return mpt;
2985 /* Dump memory partition information to FILE. */
2987 void
2988 dump_memory_partitions (FILE *file)
2990 unsigned i, npart;
2991 unsigned long nsyms;
2992 tree mpt;
2994 fprintf (file, "\nMemory partitions\n\n");
2995 for (i = 0, npart = 0, nsyms = 0;
2996 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
2997 i++)
2999 if (mpt)
3001 bitmap syms = MPT_SYMBOLS (mpt);
3002 unsigned long n = bitmap_count_bits (syms);
3004 fprintf (file, "#%u: ", i);
3005 print_generic_expr (file, mpt, 0);
3006 fprintf (file, ": %lu elements: ", n);
3007 dump_decl_set (file, syms);
3008 npart++;
3009 nsyms += n;
3013 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
3017 /* Dump memory partition information to stderr. */
3019 void
3020 debug_memory_partitions (void)
3022 dump_memory_partitions (stderr);