* doc/loop.texi: Document possibility not to perform disambiguation
[official-gcc.git] / gcc / tree-ssa-operands.c
blobaa56c197a25c77f079ec1a27f7e6436d3f6bf9aa
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
82 static struct
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars;
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided;
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided;
95 /* Number of write-clobbers avoided because the variable can't escape to
96 this call. */
97 unsigned int unescapable_clobbers_avoided;
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers;
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided;
105 } clobber_stats;
108 /* Flags to describe operand properties in helpers. */
110 /* By default, operands are loaded. */
111 #define opf_use 0
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
122 VUSE for 'b'. */
123 #define opf_no_vops (1 << 1)
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
130 /* Array for building all the def operands. */
131 static VEC(tree,heap) *build_defs;
133 /* Array for building all the use operands. */
134 static VEC(tree,heap) *build_uses;
136 /* Set for building all the VDEF operands. */
137 static VEC(tree,heap) *build_vdefs;
139 /* Set for building all the VUSE operands. */
140 static VEC(tree,heap) *build_vuses;
142 /* Bitmap obstack for our datastructures that needs to survive across
143 compilations of multiple functions. */
144 static bitmap_obstack operands_bitmap_obstack;
145 /* Set for building all the loaded symbols. */
146 static bitmap build_loads;
148 /* Set for building all the stored symbols. */
149 static bitmap build_stores;
151 static void get_expr_operands (tree, tree *, int);
153 /* Number of functions with initialized ssa_operands. */
154 static int n_initialized = 0;
156 /* Statement change buffer. Data structure used to record state
157 information for statements. This is used to determine what needs
158 to be done in order to update the SSA web after a statement is
159 modified by a pass. If STMT is a statement that has just been
160 created, or needs to be folded via fold_stmt, or anything that
161 changes its physical structure then the pass should:
163 1- Call push_stmt_changes (&stmt) to record the current state of
164 STMT before any modifications are made.
166 2- Make all appropriate modifications to the statement.
168 3- Call pop_stmt_changes (&stmt) to find new symbols that
169 need to be put in SSA form, SSA name mappings for names that
170 have disappeared, recompute invariantness for address
171 expressions, cleanup EH information, etc.
173 If it is possible to determine that the statement was not modified,
174 instead of calling pop_stmt_changes it is quicker to call
175 discard_stmt_changes to avoid the expensive and unnecessary operand
176 re-scan and change comparison. */
178 struct scb_d
180 /* Pointer to the statement being modified. */
181 tree *stmt_p;
183 /* If the statement references memory these are the sets of symbols
184 loaded and stored by the statement. */
185 bitmap loads;
186 bitmap stores;
189 typedef struct scb_d *scb_t;
190 DEF_VEC_P(scb_t);
191 DEF_VEC_ALLOC_P(scb_t,heap);
193 /* Stack of statement change buffers (SCB). Every call to
194 push_stmt_changes pushes a new buffer onto the stack. Calls to
195 pop_stmt_changes pop a buffer off of the stack and compute the set
196 of changes for the popped statement. */
197 static VEC(scb_t,heap) *scb_stack;
199 /* Return the DECL_UID of the base variable of T. */
201 static inline unsigned
202 get_name_decl (tree t)
204 if (TREE_CODE (t) != SSA_NAME)
205 return DECL_UID (t);
206 else
207 return DECL_UID (SSA_NAME_VAR (t));
211 /* Comparison function for qsort used in operand_build_sort_virtual. */
213 static int
214 operand_build_cmp (const void *p, const void *q)
216 tree e1 = *((const tree *)p);
217 tree e2 = *((const tree *)q);
218 unsigned int u1,u2;
220 u1 = get_name_decl (e1);
221 u2 = get_name_decl (e2);
223 /* We want to sort in ascending order. They can never be equal. */
224 #ifdef ENABLE_CHECKING
225 gcc_assert (u1 != u2);
226 #endif
227 return (u1 > u2 ? 1 : -1);
231 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
233 static inline void
234 operand_build_sort_virtual (VEC(tree,heap) *list)
236 int num = VEC_length (tree, list);
238 if (num < 2)
239 return;
241 if (num == 2)
243 if (get_name_decl (VEC_index (tree, list, 0))
244 > get_name_decl (VEC_index (tree, list, 1)))
246 /* Swap elements if in the wrong order. */
247 tree tmp = VEC_index (tree, list, 0);
248 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
249 VEC_replace (tree, list, 1, tmp);
251 return;
254 /* There are 3 or more elements, call qsort. */
255 qsort (VEC_address (tree, list),
256 VEC_length (tree, list),
257 sizeof (tree),
258 operand_build_cmp);
262 /* Return true if the SSA operands cache is active. */
264 bool
265 ssa_operands_active (void)
267 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
271 /* VOPs are of variable sized, so the free list maps "free buckets" to the
272 following table:
273 bucket # operands
274 ------ ----------
278 15 16
279 16 17-24
280 17 25-32
281 18 31-40
283 29 121-128
284 Any VOPs larger than this are simply added to the largest bucket when they
285 are freed. */
288 /* Return the number of operands used in bucket BUCKET. */
290 static inline int
291 vop_free_bucket_size (int bucket)
293 #ifdef ENABLE_CHECKING
294 gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS);
295 #endif
296 if (bucket < 16)
297 return bucket + 1;
298 return (bucket - 13) * 8;
302 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
303 beyond the end of the bucket table, return -1. */
305 static inline int
306 vop_free_bucket_index (int num)
308 gcc_assert (num > 0 && NUM_VOP_FREE_BUCKETS > 16);
310 /* Sizes 1 through 16 use buckets 0-15. */
311 if (num <= 16)
312 return num - 1;
313 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
314 num = 14 + (num - 1) / 8;
315 if (num >= NUM_VOP_FREE_BUCKETS)
316 return -1;
317 else
318 return num;
322 /* Initialize the VOP free buckets. */
324 static inline void
325 init_vop_buckets (void)
327 int x;
329 for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++)
330 gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL;
334 /* Add PTR to the appropriate VOP bucket. */
336 static inline void
337 add_vop_to_freelist (voptype_p ptr)
339 int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev));
341 /* Too large, use the largest bucket so its not a complete throw away. */
342 if (bucket == -1)
343 bucket = NUM_VOP_FREE_BUCKETS - 1;
345 ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
346 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr;
350 /* These are the sizes of the operand memory buffer which gets allocated each
351 time more operands space is required. The final value is the amount that is
352 allocated every time after that. */
354 #define OP_SIZE_INIT 0
355 #define OP_SIZE_1 30
356 #define OP_SIZE_2 110
357 #define OP_SIZE_3 511
359 /* Initialize the operand cache routines. */
361 void
362 init_ssa_operands (void)
364 if (!n_initialized++)
366 build_defs = VEC_alloc (tree, heap, 5);
367 build_uses = VEC_alloc (tree, heap, 10);
368 build_vuses = VEC_alloc (tree, heap, 25);
369 build_vdefs = VEC_alloc (tree, heap, 25);
370 bitmap_obstack_initialize (&operands_bitmap_obstack);
371 build_loads = BITMAP_ALLOC (&operands_bitmap_obstack);
372 build_stores = BITMAP_ALLOC (&operands_bitmap_obstack);
373 scb_stack = VEC_alloc (scb_t, heap, 20);
376 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
377 gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
378 gimple_ssa_operands (cfun)->operand_memory_index
379 = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
380 gimple_ssa_operands (cfun)->ops_active = true;
381 memset (&clobber_stats, 0, sizeof (clobber_stats));
382 init_vop_buckets ();
383 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
387 /* Dispose of anything required by the operand routines. */
389 void
390 fini_ssa_operands (void)
392 struct ssa_operand_memory_d *ptr;
393 unsigned ix;
394 tree mpt;
396 if (!--n_initialized)
398 VEC_free (tree, heap, build_defs);
399 VEC_free (tree, heap, build_uses);
400 VEC_free (tree, heap, build_vdefs);
401 VEC_free (tree, heap, build_vuses);
402 BITMAP_FREE (build_loads);
403 BITMAP_FREE (build_stores);
405 /* The change buffer stack had better be empty. */
406 gcc_assert (VEC_length (scb_t, scb_stack) == 0);
407 VEC_free (scb_t, heap, scb_stack);
408 scb_stack = NULL;
411 gimple_ssa_operands (cfun)->free_defs = NULL;
412 gimple_ssa_operands (cfun)->free_uses = NULL;
414 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
416 gimple_ssa_operands (cfun)->operand_memory
417 = gimple_ssa_operands (cfun)->operand_memory->next;
418 ggc_free (ptr);
421 for (ix = 0;
422 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt);
423 ix++)
425 if (mpt)
426 BITMAP_FREE (MPT_SYMBOLS (mpt));
429 VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table);
431 gimple_ssa_operands (cfun)->ops_active = false;
433 if (!n_initialized)
434 bitmap_obstack_release (&operands_bitmap_obstack);
435 if (dump_file && (dump_flags & TDF_STATS))
437 fprintf (dump_file, "Original clobbered vars: %d\n",
438 clobber_stats.clobbered_vars);
439 fprintf (dump_file, "Static write clobbers avoided: %d\n",
440 clobber_stats.static_write_clobbers_avoided);
441 fprintf (dump_file, "Static read clobbers avoided: %d\n",
442 clobber_stats.static_read_clobbers_avoided);
443 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
444 clobber_stats.unescapable_clobbers_avoided);
445 fprintf (dump_file, "Original read-only clobbers: %d\n",
446 clobber_stats.readonly_clobbers);
447 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
448 clobber_stats.static_readonly_clobbers_avoided);
453 /* Return memory for operands of SIZE chunks. */
455 static inline void *
456 ssa_operand_alloc (unsigned size)
458 char *ptr;
460 if (gimple_ssa_operands (cfun)->operand_memory_index + size
461 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
463 struct ssa_operand_memory_d *ptr;
465 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size == OP_SIZE_INIT)
466 gimple_ssa_operands (cfun)->ssa_operand_mem_size
467 = OP_SIZE_1 * sizeof (struct voptype_d);
468 else
469 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size
470 == OP_SIZE_1 * sizeof (struct voptype_d))
471 gimple_ssa_operands (cfun)->ssa_operand_mem_size
472 = OP_SIZE_2 * sizeof (struct voptype_d);
473 else
474 gimple_ssa_operands (cfun)->ssa_operand_mem_size
475 = OP_SIZE_3 * sizeof (struct voptype_d);
477 /* Go right to the maximum size if the request is too large. */
478 if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size)
479 gimple_ssa_operands (cfun)->ssa_operand_mem_size
480 = OP_SIZE_3 * sizeof (struct voptype_d);
482 /* Fail if there is not enough space. If there are this many operands
483 required, first make sure there isn't a different problem causing this
484 many operands. If the decision is that this is OK, then we can
485 specially allocate a buffer just for this request. */
486 gcc_assert (size <= gimple_ssa_operands (cfun)->ssa_operand_mem_size);
488 ptr = (struct ssa_operand_memory_d *)
489 ggc_alloc (sizeof (struct ssa_operand_memory_d)
490 + gimple_ssa_operands (cfun)->ssa_operand_mem_size - 1);
491 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
492 gimple_ssa_operands (cfun)->operand_memory = ptr;
493 gimple_ssa_operands (cfun)->operand_memory_index = 0;
495 ptr = &(gimple_ssa_operands (cfun)->operand_memory
496 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
497 gimple_ssa_operands (cfun)->operand_memory_index += size;
498 return ptr;
502 /* Allocate a DEF operand. */
504 static inline struct def_optype_d *
505 alloc_def (void)
507 struct def_optype_d *ret;
508 if (gimple_ssa_operands (cfun)->free_defs)
510 ret = gimple_ssa_operands (cfun)->free_defs;
511 gimple_ssa_operands (cfun)->free_defs
512 = gimple_ssa_operands (cfun)->free_defs->next;
514 else
515 ret = (struct def_optype_d *)
516 ssa_operand_alloc (sizeof (struct def_optype_d));
517 return ret;
521 /* Allocate a USE operand. */
523 static inline struct use_optype_d *
524 alloc_use (void)
526 struct use_optype_d *ret;
527 if (gimple_ssa_operands (cfun)->free_uses)
529 ret = gimple_ssa_operands (cfun)->free_uses;
530 gimple_ssa_operands (cfun)->free_uses
531 = gimple_ssa_operands (cfun)->free_uses->next;
533 else
534 ret = (struct use_optype_d *)
535 ssa_operand_alloc (sizeof (struct use_optype_d));
536 return ret;
540 /* Allocate a vop with NUM elements. */
542 static inline struct voptype_d *
543 alloc_vop (int num)
545 struct voptype_d *ret = NULL;
546 int alloc_size = 0;
548 int bucket = vop_free_bucket_index (num);
549 if (bucket != -1)
551 /* If there is a free operand, use it. */
552 if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL)
554 ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
555 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] =
556 gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next;
558 else
559 alloc_size = vop_free_bucket_size(bucket);
561 else
562 alloc_size = num;
564 if (alloc_size > 0)
565 ret = (struct voptype_d *)ssa_operand_alloc (
566 sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t));
568 VUSE_VECT_NUM_ELEM (ret->usev) = num;
569 return ret;
573 /* This routine makes sure that PTR is in an immediate use list, and makes
574 sure the stmt pointer is set to the current stmt. */
576 static inline void
577 set_virtual_use_link (use_operand_p ptr, tree stmt)
579 /* fold_stmt may have changed the stmt pointers. */
580 if (ptr->stmt != stmt)
581 ptr->stmt = stmt;
583 /* If this use isn't in a list, add it to the correct list. */
584 if (!ptr->prev)
585 link_imm_use (ptr, *(ptr->use));
589 /* Adds OP to the list of defs after LAST. */
591 static inline def_optype_p
592 add_def_op (tree *op, def_optype_p last)
594 def_optype_p new;
596 new = alloc_def ();
597 DEF_OP_PTR (new) = op;
598 last->next = new;
599 new->next = NULL;
600 return new;
604 /* Adds OP to the list of uses of statement STMT after LAST. */
606 static inline use_optype_p
607 add_use_op (tree stmt, tree *op, use_optype_p last)
609 use_optype_p new;
611 new = alloc_use ();
612 USE_OP_PTR (new)->use = op;
613 link_imm_use_stmt (USE_OP_PTR (new), *op, stmt);
614 last->next = new;
615 new->next = NULL;
616 return new;
620 /* Return a virtual op pointer with NUM elements which are all initialized to OP
621 and are linked into the immediate uses for STMT. The new vop is appended
622 after PREV. */
624 static inline voptype_p
625 add_vop (tree stmt, tree op, int num, voptype_p prev)
627 voptype_p new;
628 int x;
630 new = alloc_vop (num);
631 for (x = 0; x < num; x++)
633 VUSE_OP_PTR (new, x)->prev = NULL;
634 SET_VUSE_OP (new, x, op);
635 VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var;
636 link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt);
639 if (prev)
640 prev->next = new;
641 new->next = NULL;
642 return new;
646 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
647 LAST to the new element. */
649 static inline voptype_p
650 add_vuse_op (tree stmt, tree op, int num, voptype_p last)
652 voptype_p new = add_vop (stmt, op, num, last);
653 VDEF_RESULT (new) = NULL_TREE;
654 return new;
658 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
659 LAST to the new element. */
661 static inline voptype_p
662 add_vdef_op (tree stmt, tree op, int num, voptype_p last)
664 voptype_p new = add_vop (stmt, op, num, last);
665 VDEF_RESULT (new) = op;
666 return new;
670 /* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
671 is the head of the operand list it belongs to. */
673 static inline struct voptype_d *
674 realloc_vop (struct voptype_d *ptr, unsigned int num_elem,
675 struct voptype_d **root)
677 unsigned int x, lim;
678 tree stmt, val;
679 struct voptype_d *ret, *tmp;
681 if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
682 return ptr;
684 val = VUSE_OP (ptr, 0);
685 if (TREE_CODE (val) == SSA_NAME)
686 val = SSA_NAME_VAR (val);
688 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
690 /* Delink all the existing uses. */
691 for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
693 use_operand_p use_p = VUSE_OP_PTR (ptr, x);
694 delink_imm_use (use_p);
697 /* If we want less space, simply use this one, and shrink the size. */
698 if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
700 VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
701 return ptr;
704 /* It is growing. Allocate a new one and replace the old one. */
705 ret = add_vuse_op (stmt, val, num_elem, ptr);
707 /* Clear PTR and add its memory to the free list. */
708 lim = VUSE_VECT_NUM_ELEM (ptr->usev);
709 memset (ptr, 0,
710 sizeof (struct voptype_d) + sizeof (vuse_element_t) * (lim- 1));
711 add_vop_to_freelist (ptr);
713 /* Now simply remove the old one. */
714 if (*root == ptr)
716 *root = ret;
717 return ret;
719 else
720 for (tmp = *root;
721 tmp != NULL && tmp->next != ptr;
722 tmp = tmp->next)
724 tmp->next = ret;
725 return ret;
728 /* The pointer passed in isn't in STMT's VDEF lists. */
729 gcc_unreachable ();
733 /* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
735 struct voptype_d *
736 realloc_vdef (struct voptype_d *ptr, unsigned int num_elem)
738 tree val, stmt;
739 struct voptype_d *ret;
741 val = VDEF_RESULT (ptr);
742 stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
743 ret = realloc_vop (ptr, num_elem, &(VDEF_OPS (stmt)));
744 VDEF_RESULT (ret) = val;
745 return ret;
749 /* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
751 struct voptype_d *
752 realloc_vuse (struct voptype_d *ptr, unsigned int num_elem)
754 tree stmt;
755 struct voptype_d *ret;
757 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
758 ret = realloc_vop (ptr, num_elem, &(VUSE_OPS (stmt)));
759 return ret;
763 /* Takes elements from build_defs and turns them into def operands of STMT.
764 TODO -- Make build_defs VEC of tree *. */
766 static inline void
767 finalize_ssa_defs (tree stmt)
769 unsigned new_i;
770 struct def_optype_d new_list;
771 def_optype_p old_ops, last;
772 unsigned int num = VEC_length (tree, build_defs);
774 /* There should only be a single real definition per assignment. */
775 gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
777 new_list.next = NULL;
778 last = &new_list;
780 old_ops = DEF_OPS (stmt);
782 new_i = 0;
784 /* Check for the common case of 1 def that hasn't changed. */
785 if (old_ops && old_ops->next == NULL && num == 1
786 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
787 return;
789 /* If there is anything in the old list, free it. */
790 if (old_ops)
792 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
793 gimple_ssa_operands (cfun)->free_defs = old_ops;
796 /* If there is anything remaining in the build_defs list, simply emit it. */
797 for ( ; new_i < num; new_i++)
798 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
800 /* Now set the stmt's operands. */
801 DEF_OPS (stmt) = new_list.next;
803 #ifdef ENABLE_CHECKING
805 def_optype_p ptr;
806 unsigned x = 0;
807 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
808 x++;
810 gcc_assert (x == num);
812 #endif
816 /* Takes elements from build_uses and turns them into use operands of STMT.
817 TODO -- Make build_uses VEC of tree *. */
819 static inline void
820 finalize_ssa_uses (tree stmt)
822 unsigned new_i;
823 struct use_optype_d new_list;
824 use_optype_p old_ops, ptr, last;
826 #ifdef ENABLE_CHECKING
828 unsigned x;
829 unsigned num = VEC_length (tree, build_uses);
831 /* If the pointer to the operand is the statement itself, something is
832 wrong. It means that we are pointing to a local variable (the
833 initial call to update_stmt_operands does not pass a pointer to a
834 statement). */
835 for (x = 0; x < num; x++)
836 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
838 #endif
840 new_list.next = NULL;
841 last = &new_list;
843 old_ops = USE_OPS (stmt);
845 /* If there is anything in the old list, free it. */
846 if (old_ops)
848 for (ptr = old_ops; ptr; ptr = ptr->next)
849 delink_imm_use (USE_OP_PTR (ptr));
850 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
851 gimple_ssa_operands (cfun)->free_uses = old_ops;
854 /* Now create nodes for all the new nodes. */
855 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
856 last = add_use_op (stmt,
857 (tree *) VEC_index (tree, build_uses, new_i),
858 last);
860 /* Now set the stmt's operands. */
861 USE_OPS (stmt) = new_list.next;
863 #ifdef ENABLE_CHECKING
865 unsigned x = 0;
866 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
867 x++;
869 gcc_assert (x == VEC_length (tree, build_uses));
871 #endif
875 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
876 STMT. FIXME, for now VDEF operators should have a single operand
877 in their RHS. */
879 static inline void
880 finalize_ssa_vdefs (tree stmt)
882 unsigned new_i;
883 struct voptype_d new_list;
884 voptype_p old_ops, ptr, last;
885 stmt_ann_t ann = stmt_ann (stmt);
887 /* Set the symbols referenced by STMT. */
888 if (!bitmap_empty_p (build_stores))
890 if (ann->operands.stores == NULL)
891 ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
893 bitmap_copy (ann->operands.stores, build_stores);
895 else
896 BITMAP_FREE (ann->operands.stores);
898 /* If aliases have not been computed, do not instantiate a virtual
899 operator on STMT. Initially, we only compute the SSA form on
900 GIMPLE registers. The virtual SSA form is only computed after
901 alias analysis, so virtual operators will remain unrenamed and
902 the verifier will complain. However, alias analysis needs to
903 access symbol load/store information, so we need to compute
904 those. */
905 if (!gimple_aliases_computed_p (cfun))
906 return;
908 new_list.next = NULL;
909 last = &new_list;
911 old_ops = VDEF_OPS (stmt);
912 new_i = 0;
913 while (old_ops && new_i < VEC_length (tree, build_vdefs))
915 tree op = VEC_index (tree, build_vdefs, new_i);
916 unsigned new_uid = get_name_decl (op);
917 unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops));
919 /* FIXME, for now each VDEF operator should have at most one
920 operand in their RHS. */
921 gcc_assert (VDEF_NUM (old_ops) == 1);
923 if (old_uid == new_uid)
925 /* If the symbols are the same, reuse the existing operand. */
926 last->next = old_ops;
927 last = old_ops;
928 old_ops = old_ops->next;
929 last->next = NULL;
930 set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
931 new_i++;
933 else if (old_uid < new_uid)
935 /* If old is less than new, old goes to the free list. */
936 voptype_p next;
937 delink_imm_use (VDEF_OP_PTR (old_ops, 0));
938 next = old_ops->next;
939 add_vop_to_freelist (old_ops);
940 old_ops = next;
942 else
944 /* This is a new operand. */
945 last = add_vdef_op (stmt, op, 1, last);
946 new_i++;
950 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
951 for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
952 last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last);
954 /* If there is anything in the old list, free it. */
955 if (old_ops)
957 for (ptr = old_ops; ptr; ptr = last)
959 last = ptr->next;
960 delink_imm_use (VDEF_OP_PTR (ptr, 0));
961 add_vop_to_freelist (ptr);
965 /* Now set STMT's operands. */
966 VDEF_OPS (stmt) = new_list.next;
968 #ifdef ENABLE_CHECKING
970 unsigned x = 0;
971 for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
972 x++;
974 gcc_assert (x == VEC_length (tree, build_vdefs));
976 #endif
980 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
981 STMT. */
983 static inline void
984 finalize_ssa_vuse_ops (tree stmt)
986 unsigned new_i, old_i;
987 voptype_p old_ops, last;
988 VEC(tree,heap) *new_ops;
989 stmt_ann_t ann;
991 /* Set the symbols referenced by STMT. */
992 ann = stmt_ann (stmt);
993 if (!bitmap_empty_p (build_loads))
995 if (ann->operands.loads == NULL)
996 ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
998 bitmap_copy (ann->operands.loads, build_loads);
1000 else
1001 BITMAP_FREE (ann->operands.loads);
1003 /* If aliases have not been computed, do not instantiate a virtual
1004 operator on STMT. Initially, we only compute the SSA form on
1005 GIMPLE registers. The virtual SSA form is only computed after
1006 alias analysis, so virtual operators will remain unrenamed and
1007 the verifier will complain. However, alias analysis needs to
1008 access symbol load/store information, so we need to compute
1009 those. */
1010 if (!gimple_aliases_computed_p (cfun))
1011 return;
1013 /* STMT should have at most one VUSE operator. */
1014 old_ops = VUSE_OPS (stmt);
1015 gcc_assert (old_ops == NULL || old_ops->next == NULL);
1017 new_ops = NULL;
1018 new_i = old_i = 0;
1019 while (old_ops
1020 && old_i < VUSE_NUM (old_ops)
1021 && new_i < VEC_length (tree, build_vuses))
1023 tree new_op = VEC_index (tree, build_vuses, new_i);
1024 tree old_op = VUSE_OP (old_ops, old_i);
1025 unsigned new_uid = get_name_decl (new_op);
1026 unsigned old_uid = get_name_decl (old_op);
1028 if (old_uid == new_uid)
1030 /* If the symbols are the same, reuse the existing operand. */
1031 VEC_safe_push (tree, heap, new_ops, old_op);
1032 new_i++;
1033 old_i++;
1035 else if (old_uid < new_uid)
1037 /* If OLD_UID is less than NEW_UID, the old operand has
1038 disappeared, skip to the next old operand. */
1039 old_i++;
1041 else
1043 /* This is a new operand. */
1044 VEC_safe_push (tree, heap, new_ops, new_op);
1045 new_i++;
1049 /* If there is anything remaining in the build_vuses list, simply emit it. */
1050 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
1051 VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i));
1053 /* If there is anything in the old list, free it. */
1054 if (old_ops)
1056 for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
1057 delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
1058 add_vop_to_freelist (old_ops);
1059 VUSE_OPS (stmt) = NULL;
1062 /* If there are any operands, instantiate a VUSE operator for STMT. */
1063 if (new_ops)
1065 tree op;
1066 unsigned i;
1068 last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL);
1070 for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
1071 SET_USE (VUSE_OP_PTR (last, (int) i), op);
1073 VUSE_OPS (stmt) = last;
1076 #ifdef ENABLE_CHECKING
1078 unsigned x;
1080 if (VUSE_OPS (stmt))
1082 gcc_assert (VUSE_OPS (stmt)->next == NULL);
1083 x = VUSE_NUM (VUSE_OPS (stmt));
1085 else
1086 x = 0;
1088 gcc_assert (x == VEC_length (tree, build_vuses));
1090 #endif
1093 /* Return a new VUSE operand vector for STMT. */
1095 static void
1096 finalize_ssa_vuses (tree stmt)
1098 unsigned num, num_vdefs;
1099 unsigned vuse_index;
1101 /* Remove superfluous VUSE operands. If the statement already has a
1102 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1103 needed because VDEFs imply a VUSE of the variable. For instance,
1104 suppose that variable 'a' is pointed-to by p and q:
1106 # VUSE <a_2>
1107 # a_3 = VDEF <a_2>
1108 *p = *q;
1110 The VUSE <a_2> is superfluous because it is implied by the
1111 VDEF operator. */
1112 num = VEC_length (tree, build_vuses);
1113 num_vdefs = VEC_length (tree, build_vdefs);
1115 if (num > 0 && num_vdefs > 0)
1116 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
1118 tree vuse;
1119 vuse = VEC_index (tree, build_vuses, vuse_index);
1120 if (TREE_CODE (vuse) != SSA_NAME)
1122 var_ann_t ann = var_ann (vuse);
1123 ann->in_vuse_list = 0;
1124 if (ann->in_vdef_list)
1126 VEC_ordered_remove (tree, build_vuses, vuse_index);
1127 continue;
1130 vuse_index++;
1133 finalize_ssa_vuse_ops (stmt);
1137 /* Clear the in_list bits and empty the build array for VDEFs and
1138 VUSEs. */
1140 static inline void
1141 cleanup_build_arrays (void)
1143 unsigned i;
1144 tree t;
1146 for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++)
1147 if (TREE_CODE (t) != SSA_NAME)
1148 var_ann (t)->in_vdef_list = false;
1150 for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++)
1151 if (TREE_CODE (t) != SSA_NAME)
1152 var_ann (t)->in_vuse_list = false;
1154 VEC_truncate (tree, build_vdefs, 0);
1155 VEC_truncate (tree, build_vuses, 0);
1156 VEC_truncate (tree, build_defs, 0);
1157 VEC_truncate (tree, build_uses, 0);
1158 bitmap_clear (build_loads);
1159 bitmap_clear (build_stores);
1163 /* Finalize all the build vectors, fill the new ones into INFO. */
1165 static inline void
1166 finalize_ssa_stmt_operands (tree stmt)
1168 finalize_ssa_defs (stmt);
1169 finalize_ssa_uses (stmt);
1170 finalize_ssa_vdefs (stmt);
1171 finalize_ssa_vuses (stmt);
1172 cleanup_build_arrays ();
1176 /* Start the process of building up operands vectors in INFO. */
1178 static inline void
1179 start_ssa_stmt_operands (void)
1181 gcc_assert (VEC_length (tree, build_defs) == 0);
1182 gcc_assert (VEC_length (tree, build_uses) == 0);
1183 gcc_assert (VEC_length (tree, build_vuses) == 0);
1184 gcc_assert (VEC_length (tree, build_vdefs) == 0);
1185 gcc_assert (bitmap_empty_p (build_loads));
1186 gcc_assert (bitmap_empty_p (build_stores));
1190 /* Add DEF_P to the list of pointers to operands. */
1192 static inline void
1193 append_def (tree *def_p)
1195 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
1199 /* Add USE_P to the list of pointers to operands. */
1201 static inline void
1202 append_use (tree *use_p)
1204 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
1208 /* Add VAR to the set of variables that require a VDEF operator. */
1210 static inline void
1211 append_vdef (tree var)
1213 tree sym;
1215 if (TREE_CODE (var) != SSA_NAME)
1217 tree mpt;
1218 var_ann_t ann;
1220 /* If VAR belongs to a memory partition, use it instead of VAR. */
1221 mpt = memory_partition (var);
1222 if (mpt)
1223 var = mpt;
1225 /* Don't allow duplicate entries. */
1226 ann = get_var_ann (var);
1227 if (ann->in_vdef_list)
1228 return;
1230 ann->in_vdef_list = true;
1231 sym = var;
1233 else
1234 sym = SSA_NAME_VAR (var);
1236 VEC_safe_push (tree, heap, build_vdefs, var);
1237 bitmap_set_bit (build_stores, DECL_UID (sym));
1241 /* Add VAR to the set of variables that require a VUSE operator. */
1243 static inline void
1244 append_vuse (tree var)
1246 tree sym;
1248 if (TREE_CODE (var) != SSA_NAME)
1250 tree mpt;
1251 var_ann_t ann;
1253 /* If VAR belongs to a memory partition, use it instead of VAR. */
1254 mpt = memory_partition (var);
1255 if (mpt)
1256 var = mpt;
1258 /* Don't allow duplicate entries. */
1259 ann = get_var_ann (var);
1260 if (ann->in_vuse_list || ann->in_vdef_list)
1261 return;
1263 ann->in_vuse_list = true;
1264 sym = var;
1266 else
1267 sym = SSA_NAME_VAR (var);
1269 VEC_safe_push (tree, heap, build_vuses, var);
1270 bitmap_set_bit (build_loads, DECL_UID (sym));
1274 /* REF is a tree that contains the entire pointer dereference
1275 expression, if available, or NULL otherwise. ALIAS is the variable
1276 we are asking if REF can access. OFFSET and SIZE come from the
1277 memory access expression that generated this virtual operand. */
1279 static bool
1280 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1281 HOST_WIDE_INT size)
1283 bool offsetgtz = offset > 0;
1284 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1285 tree base = ref ? get_base_address (ref) : NULL;
1287 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1288 using a call-clobbered memory tag. By definition, call-clobbered
1289 memory tags can always touch .GLOBAL_VAR. */
1290 if (alias == gimple_global_var (cfun))
1291 return true;
1293 /* If ALIAS is an SFT, it can't be touched if the offset
1294 and size of the access is not overlapping with the SFT offset and
1295 size. This is only true if we are accessing through a pointer
1296 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1297 be accessing through a pointer to some substruct of the
1298 structure, and if we try to prune there, we will have the wrong
1299 offset, and get the wrong answer.
1300 i.e., we can't prune without more work if we have something like
1302 struct gcc_target
1304 struct asm_out
1306 const char *byte_op;
1307 struct asm_int_op
1309 const char *hi;
1310 } aligned_op;
1311 } asm_out;
1312 } targetm;
1314 foo = &targetm.asm_out.aligned_op;
1315 return foo->hi;
1317 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1318 terms of SFT_PARENT_VAR, that is where it is.
1319 However, the access through the foo pointer will be at offset 0. */
1320 if (size != -1
1321 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1322 && base
1323 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1324 && !overlap_subvar (offset, size, alias, NULL))
1326 #ifdef ACCESS_DEBUGGING
1327 fprintf (stderr, "Access to ");
1328 print_generic_expr (stderr, ref, 0);
1329 fprintf (stderr, " may not touch ");
1330 print_generic_expr (stderr, alias, 0);
1331 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1332 #endif
1333 return false;
1336 /* Without strict aliasing, it is impossible for a component access
1337 through a pointer to touch a random variable, unless that
1338 variable *is* a structure or a pointer.
1340 That is, given p->c, and some random global variable b,
1341 there is no legal way that p->c could be an access to b.
1343 Without strict aliasing on, we consider it legal to do something
1344 like:
1346 struct foos { int l; };
1347 int foo;
1348 static struct foos *getfoo(void);
1349 int main (void)
1351 struct foos *f = getfoo();
1352 f->l = 1;
1353 foo = 2;
1354 if (f->l == 1)
1355 abort();
1356 exit(0);
1358 static struct foos *getfoo(void)
1359 { return (struct foos *)&foo; }
1361 (taken from 20000623-1.c)
1363 The docs also say/imply that access through union pointers
1364 is legal (but *not* if you take the address of the union member,
1365 i.e. the inverse), such that you can do
1367 typedef union {
1368 int d;
1369 } U;
1371 int rv;
1372 void breakme()
1374 U *rv0;
1375 U *pretmp = (U*)&rv;
1376 rv0 = pretmp;
1377 rv0->d = 42;
1379 To implement this, we just punt on accesses through union
1380 pointers entirely.
1382 else if (ref
1383 && flag_strict_aliasing
1384 && TREE_CODE (ref) != INDIRECT_REF
1385 && !MTAG_P (alias)
1386 && (TREE_CODE (base) != INDIRECT_REF
1387 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1388 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1389 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1390 && !var_ann (alias)->is_heapvar
1391 /* When the struct has may_alias attached to it, we need not to
1392 return true. */
1393 && get_alias_set (base))
1395 #ifdef ACCESS_DEBUGGING
1396 fprintf (stderr, "Access to ");
1397 print_generic_expr (stderr, ref, 0);
1398 fprintf (stderr, " may not touch ");
1399 print_generic_expr (stderr, alias, 0);
1400 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1401 #endif
1402 return false;
1405 /* If the offset of the access is greater than the size of one of
1406 the possible aliases, it can't be touching that alias, because it
1407 would be past the end of the structure. */
1408 else if (ref
1409 && flag_strict_aliasing
1410 && TREE_CODE (ref) != INDIRECT_REF
1411 && !MTAG_P (alias)
1412 && !POINTER_TYPE_P (TREE_TYPE (alias))
1413 && offsetgtz
1414 && DECL_SIZE (alias)
1415 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1416 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1418 #ifdef ACCESS_DEBUGGING
1419 fprintf (stderr, "Access to ");
1420 print_generic_expr (stderr, ref, 0);
1421 fprintf (stderr, " may not touch ");
1422 print_generic_expr (stderr, alias, 0);
1423 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1424 #endif
1425 return false;
1428 return true;
1432 /* Add VAR to the virtual operands array. FLAGS is as in
1433 get_expr_operands. FULL_REF is a tree that contains the entire
1434 pointer dereference expression, if available, or NULL otherwise.
1435 OFFSET and SIZE come from the memory access expression that
1436 generated this virtual operand. FOR_CLOBBER is true is this is
1437 adding a virtual operand for a call clobber. */
1439 static void
1440 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1441 tree full_ref, HOST_WIDE_INT offset,
1442 HOST_WIDE_INT size, bool for_clobber)
1444 bitmap aliases = NULL;
1445 tree sym;
1446 var_ann_t v_ann;
1448 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1449 v_ann = var_ann (sym);
1451 /* Mark the statement as having memory operands. */
1452 s_ann->references_memory = true;
1454 /* Mark statements with volatile operands. Optimizers should back
1455 off from statements having volatile operands. */
1456 if (TREE_THIS_VOLATILE (sym) && s_ann)
1457 s_ann->has_volatile_ops = true;
1459 /* If the variable cannot be modified and this is a VDEF change
1460 it into a VUSE. This happens when read-only variables are marked
1461 call-clobbered and/or aliased to writable variables. So we only
1462 check that this only happens on non-specific stores.
1464 Note that if this is a specific store, i.e. associated with a
1465 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
1466 into validation problems.
1468 This can happen when programs cast away const, leaving us with a
1469 store to read-only memory. If the statement is actually executed
1470 at runtime, then the program is ill formed. If the statement is
1471 not executed then all is well. At the very least, we cannot ICE. */
1472 if ((flags & opf_implicit) && unmodifiable_var_p (var))
1473 flags &= ~opf_def;
1475 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1476 virtual operands, unless the caller has specifically requested
1477 not to add virtual operands (used when adding operands inside an
1478 ADDR_EXPR expression). */
1479 if (flags & opf_no_vops)
1480 return;
1482 if (MTAG_P (var))
1483 aliases = MTAG_ALIASES (var);
1484 if (aliases == NULL)
1486 if (s_ann && !gimple_aliases_computed_p (cfun))
1487 s_ann->has_volatile_ops = true;
1488 /* The variable is not aliased or it is an alias tag. */
1489 if (flags & opf_def)
1490 append_vdef (var);
1491 else
1492 append_vuse (var);
1494 else
1496 bitmap_iterator bi;
1497 unsigned int i;
1498 tree al;
1500 /* The variable is aliased. Add its aliases to the virtual
1501 operands. */
1502 gcc_assert (!bitmap_empty_p (aliases));
1504 if (flags & opf_def)
1506 bool none_added = true;
1507 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
1509 al = referenced_var (i);
1510 if (!access_can_touch_variable (full_ref, al, offset, size))
1511 continue;
1513 none_added = false;
1514 append_vdef (al);
1517 /* If the variable is also an alias tag, add a virtual
1518 operand for it, otherwise we will miss representing
1519 references to the members of the variable's alias set.
1520 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1522 It is also necessary to add bare defs on clobbers for
1523 SMT's, so that bare SMT uses caused by pruning all the
1524 aliases will link up properly with calls. In order to
1525 keep the number of these bare defs we add down to the
1526 minimum necessary, we keep track of which SMT's were used
1527 alone in statement vdefs or VUSEs. */
1528 if (v_ann->is_aliased
1529 || none_added
1530 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1531 && for_clobber))
1533 append_vdef (var);
1536 else
1538 bool none_added = true;
1539 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
1541 al = referenced_var (i);
1542 if (!access_can_touch_variable (full_ref, al, offset, size))
1543 continue;
1544 none_added = false;
1545 append_vuse (al);
1548 /* Similarly, append a virtual uses for VAR itself, when
1549 it is an alias tag. */
1550 if (v_ann->is_aliased || none_added)
1551 append_vuse (var);
1557 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1558 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1559 the statement's real operands, otherwise it is added to virtual
1560 operands. */
1562 static void
1563 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1565 tree var, sym;
1566 var_ann_t v_ann;
1568 gcc_assert (SSA_VAR_P (*var_p) && s_ann);
1570 var = *var_p;
1571 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1572 v_ann = var_ann (sym);
1574 /* Mark statements with volatile operands. */
1575 if (TREE_THIS_VOLATILE (sym))
1576 s_ann->has_volatile_ops = true;
1578 if (is_gimple_reg (sym))
1580 /* The variable is a GIMPLE register. Add it to real operands. */
1581 if (flags & opf_def)
1582 append_def (var_p);
1583 else
1584 append_use (var_p);
1586 else
1587 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1591 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1592 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1594 STMT is the statement being processed, EXPR is the INDIRECT_REF
1595 that got us here.
1597 FLAGS is as in get_expr_operands.
1599 FULL_REF contains the full pointer dereference expression, if we
1600 have it, or NULL otherwise.
1602 OFFSET and SIZE are the location of the access inside the
1603 dereferenced pointer, if known.
1605 RECURSE_ON_BASE should be set to true if we want to continue
1606 calling get_expr_operands on the base pointer, and false if
1607 something else will do it for us. */
1609 static void
1610 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1611 tree full_ref,
1612 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1613 bool recurse_on_base)
1615 tree *pptr = &TREE_OPERAND (expr, 0);
1616 tree ptr = *pptr;
1617 stmt_ann_t s_ann = stmt_ann (stmt);
1619 s_ann->references_memory = true;
1620 if (s_ann && TREE_THIS_VOLATILE (expr))
1621 s_ann->has_volatile_ops = true;
1623 if (SSA_VAR_P (ptr))
1625 struct ptr_info_def *pi = NULL;
1627 /* If PTR has flow-sensitive points-to information, use it. */
1628 if (TREE_CODE (ptr) == SSA_NAME
1629 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1630 && pi->name_mem_tag)
1632 /* PTR has its own memory tag. Use it. */
1633 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1634 full_ref, offset, size, false);
1636 else
1638 /* If PTR is not an SSA_NAME or it doesn't have a name
1639 tag, use its symbol memory tag. */
1640 var_ann_t v_ann;
1642 /* If we are emitting debugging dumps, display a warning if
1643 PTR is an SSA_NAME with no flow-sensitive alias
1644 information. That means that we may need to compute
1645 aliasing again. */
1646 if (dump_file
1647 && TREE_CODE (ptr) == SSA_NAME
1648 && pi == NULL)
1650 fprintf (dump_file,
1651 "NOTE: no flow-sensitive alias info for ");
1652 print_generic_expr (dump_file, ptr, dump_flags);
1653 fprintf (dump_file, " in ");
1654 print_generic_stmt (dump_file, stmt, dump_flags);
1657 if (TREE_CODE (ptr) == SSA_NAME)
1658 ptr = SSA_NAME_VAR (ptr);
1659 v_ann = var_ann (ptr);
1661 if (v_ann->symbol_mem_tag)
1662 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1663 full_ref, offset, size, false);
1664 /* Aliasing information is missing; mark statement as volatile so we
1665 won't optimize it out too actively. */
1666 else if (s_ann && !gimple_aliases_computed_p (cfun)
1667 && (flags & opf_def))
1668 s_ann->has_volatile_ops = true;
1671 else if (TREE_CODE (ptr) == INTEGER_CST)
1673 /* If a constant is used as a pointer, we can't generate a real
1674 operand for it but we mark the statement volatile to prevent
1675 optimizations from messing things up. */
1676 if (s_ann)
1677 s_ann->has_volatile_ops = true;
1678 return;
1680 else
1682 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1683 gcc_unreachable ();
1686 /* If requested, add a USE operand for the base pointer. */
1687 if (recurse_on_base)
1688 get_expr_operands (stmt, pptr, opf_use);
1692 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1694 static void
1695 get_tmr_operands (tree stmt, tree expr, int flags)
1697 tree tag, ref;
1698 HOST_WIDE_INT offset, size, maxsize;
1699 subvar_t svars, sv;
1700 stmt_ann_t s_ann = stmt_ann (stmt);
1702 /* This statement references memory. */
1703 s_ann->references_memory = 1;
1705 /* First record the real operands. */
1706 get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
1707 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
1709 if (TMR_SYMBOL (expr))
1710 add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
1712 tag = TMR_TAG (expr);
1713 if (!tag)
1715 /* Something weird, so ensure that we will be careful. */
1716 s_ann->has_volatile_ops = true;
1717 return;
1720 if (DECL_P (tag))
1722 get_expr_operands (stmt, &tag, flags);
1723 return;
1726 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1727 gcc_assert (ref != NULL_TREE);
1728 svars = get_subvars_for_var (ref);
1729 for (sv = svars; sv; sv = sv->next)
1731 bool exact;
1733 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1734 add_stmt_operand (&sv->var, s_ann, flags);
1739 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1740 clobbered variables in the function. */
1742 static void
1743 add_call_clobber_ops (tree stmt, tree callee)
1745 unsigned u;
1746 bitmap_iterator bi;
1747 stmt_ann_t s_ann = stmt_ann (stmt);
1748 bitmap not_read_b, not_written_b;
1750 /* Functions that are not const, pure or never return may clobber
1751 call-clobbered variables. */
1752 if (s_ann)
1753 s_ann->makes_clobbering_call = true;
1755 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1756 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1757 if (gimple_global_var (cfun))
1759 tree var = gimple_global_var (cfun);
1760 add_stmt_operand (&var, s_ann, opf_def);
1761 return;
1764 /* Get info for local and module level statics. There is a bit
1765 set for each static if the call being processed does not read
1766 or write that variable. */
1767 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1768 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1770 /* Add a VDEF operand for every call clobbered variable. */
1771 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1773 tree var = referenced_var_lookup (u);
1774 unsigned int escape_mask = var_ann (var)->escape_mask;
1775 tree real_var = var;
1776 bool not_read;
1777 bool not_written;
1779 /* Not read and not written are computed on regular vars, not
1780 subvars, so look at the parent var if this is an SFT. */
1781 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1782 real_var = SFT_PARENT_VAR (var);
1784 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1785 DECL_UID (real_var)) : false;
1786 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1787 DECL_UID (real_var)) : false;
1788 gcc_assert (!unmodifiable_var_p (var));
1790 clobber_stats.clobbered_vars++;
1792 /* See if this variable is really clobbered by this function. */
1794 /* Trivial case: Things escaping only to pure/const are not
1795 clobbered by non-pure-const, and only read by pure/const. */
1796 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1798 tree call = get_call_expr_in (stmt);
1799 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1801 add_stmt_operand (&var, s_ann, opf_use);
1802 clobber_stats.unescapable_clobbers_avoided++;
1803 continue;
1805 else
1807 clobber_stats.unescapable_clobbers_avoided++;
1808 continue;
1812 if (not_written)
1814 clobber_stats.static_write_clobbers_avoided++;
1815 if (!not_read)
1816 add_stmt_operand (&var, s_ann, opf_use);
1817 else
1818 clobber_stats.static_read_clobbers_avoided++;
1820 else
1821 add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
1826 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1827 function. */
1829 static void
1830 add_call_read_ops (tree stmt, tree callee)
1832 unsigned u;
1833 bitmap_iterator bi;
1834 stmt_ann_t s_ann = stmt_ann (stmt);
1835 bitmap not_read_b;
1837 /* if the function is not pure, it may reference memory. Add
1838 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1839 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1840 if (gimple_global_var (cfun))
1842 tree var = gimple_global_var (cfun);
1843 add_stmt_operand (&var, s_ann, opf_use);
1844 return;
1847 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1849 /* Add a VUSE for each call-clobbered variable. */
1850 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1852 tree var = referenced_var (u);
1853 tree real_var = var;
1854 bool not_read;
1856 clobber_stats.readonly_clobbers++;
1858 /* Not read and not written are computed on regular vars, not
1859 subvars, so look at the parent var if this is an SFT. */
1861 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1862 real_var = SFT_PARENT_VAR (var);
1864 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1865 : false;
1867 if (not_read)
1869 clobber_stats.static_readonly_clobbers_avoided++;
1870 continue;
1873 add_stmt_operand (&var, s_ann, opf_use | opf_implicit);
1878 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1880 static void
1881 get_call_expr_operands (tree stmt, tree expr)
1883 tree op;
1884 int call_flags = call_expr_flags (expr);
1885 stmt_ann_t ann = stmt_ann (stmt);
1887 ann->references_memory = true;
1889 /* If aliases have been computed already, add VDEF or VUSE
1890 operands for all the symbols that have been found to be
1891 call-clobbered. */
1892 if (gimple_aliases_computed_p (cfun)
1893 && !(call_flags & ECF_NOVOPS))
1895 /* A 'pure' or a 'const' function never call-clobbers anything.
1896 A 'noreturn' function might, but since we don't return anyway
1897 there is no point in recording that. */
1898 if (TREE_SIDE_EFFECTS (expr)
1899 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1900 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1901 else if (!(call_flags & ECF_CONST))
1902 add_call_read_ops (stmt, get_callee_fndecl (expr));
1905 /* Find uses in the called function. */
1906 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
1908 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1909 get_expr_operands (stmt, &TREE_VALUE (op), opf_use);
1911 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
1915 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1917 static void
1918 get_asm_expr_operands (tree stmt)
1920 stmt_ann_t s_ann;
1921 int i, noutputs;
1922 const char **oconstraints;
1923 const char *constraint;
1924 bool allows_mem, allows_reg, is_inout;
1925 tree link;
1927 s_ann = stmt_ann (stmt);
1928 noutputs = list_length (ASM_OUTPUTS (stmt));
1929 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1931 /* Gather all output operands. */
1932 for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
1934 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1935 oconstraints[i] = constraint;
1936 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1937 &allows_reg, &is_inout);
1939 /* This should have been split in gimplify_asm_expr. */
1940 gcc_assert (!allows_reg || !is_inout);
1942 /* Memory operands are addressable. Note that STMT needs the
1943 address of this operand. */
1944 if (!allows_reg && allows_mem)
1946 tree t = get_base_address (TREE_VALUE (link));
1947 if (t && DECL_P (t) && s_ann)
1948 add_to_addressable_set (t, &s_ann->addresses_taken);
1951 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
1954 /* Gather all input operands. */
1955 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1957 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1958 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
1959 &allows_mem, &allows_reg);
1961 /* Memory operands are addressable. Note that STMT needs the
1962 address of this operand. */
1963 if (!allows_reg && allows_mem)
1965 tree t = get_base_address (TREE_VALUE (link));
1966 if (t && DECL_P (t) && s_ann)
1967 add_to_addressable_set (t, &s_ann->addresses_taken);
1970 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1973 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1974 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1975 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1977 unsigned i;
1978 bitmap_iterator bi;
1980 s_ann->references_memory = true;
1982 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1984 tree var = referenced_var (i);
1985 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
1988 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1990 tree var = referenced_var (i);
1992 /* Subvars are explicitly represented in this list, so we
1993 don't need the original to be added to the clobber ops,
1994 but the original *will* be in this list because we keep
1995 the addressability of the original variable up-to-date
1996 to avoid confusing the back-end. */
1997 if (var_can_have_subvars (var)
1998 && get_subvars_for_var (var) != NULL)
1999 continue;
2001 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
2003 break;
2008 /* Scan operands for the assignment expression EXPR in statement STMT. */
2010 static void
2011 get_modify_stmt_operands (tree stmt, tree expr)
2013 /* First get operands from the RHS. */
2014 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use);
2016 /* For the LHS, use a regular definition (opf_def) for GIMPLE
2017 registers. If the LHS is a store to memory, we will need
2018 a preserving definition (VDEF).
2020 Preserving definitions are those that modify a part of an
2021 aggregate object for which no subvars have been computed (or the
2022 reference does not correspond exactly to one of them). Stores
2023 through a pointer are also represented with VDEF operators.
2025 We used to distinguish between preserving and killing definitions.
2026 We always emit preserving definitions now. */
2027 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def);
2031 /* Recursively scan the expression pointed to by EXPR_P in statement
2032 STMT. FLAGS is one of the OPF_* constants modifying how to
2033 interpret the operands found. */
2035 static void
2036 get_expr_operands (tree stmt, tree *expr_p, int flags)
2038 enum tree_code code;
2039 enum tree_code_class class;
2040 tree expr = *expr_p;
2041 stmt_ann_t s_ann = stmt_ann (stmt);
2043 if (expr == NULL)
2044 return;
2046 code = TREE_CODE (expr);
2047 class = TREE_CODE_CLASS (code);
2049 switch (code)
2051 case ADDR_EXPR:
2052 /* Taking the address of a variable does not represent a
2053 reference to it, but the fact that the statement takes its
2054 address will be of interest to some passes (e.g. alias
2055 resolution). */
2056 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
2058 /* If the address is invariant, there may be no interesting
2059 variable references inside. */
2060 if (is_gimple_min_invariant (expr))
2061 return;
2063 /* Otherwise, there may be variables referenced inside but there
2064 should be no VUSEs created, since the referenced objects are
2065 not really accessed. The only operands that we should find
2066 here are ARRAY_REF indices which will always be real operands
2067 (GIMPLE does not allow non-registers as array indices). */
2068 flags |= opf_no_vops;
2069 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2070 return;
2072 case SSA_NAME:
2073 case STRUCT_FIELD_TAG:
2074 case SYMBOL_MEMORY_TAG:
2075 case NAME_MEMORY_TAG:
2076 add_stmt_operand (expr_p, s_ann, flags);
2077 return;
2079 case VAR_DECL:
2080 case PARM_DECL:
2081 case RESULT_DECL:
2083 subvar_t svars;
2085 /* Add the subvars for a variable, if it has subvars, to DEFS
2086 or USES. Otherwise, add the variable itself. Whether it
2087 goes to USES or DEFS depends on the operand flags. */
2088 if (var_can_have_subvars (expr)
2089 && (svars = get_subvars_for_var (expr)))
2091 subvar_t sv;
2092 for (sv = svars; sv; sv = sv->next)
2093 add_stmt_operand (&sv->var, s_ann, flags);
2095 else
2096 add_stmt_operand (expr_p, s_ann, flags);
2098 return;
2101 case MISALIGNED_INDIRECT_REF:
2102 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2103 /* fall through */
2105 case ALIGN_INDIRECT_REF:
2106 case INDIRECT_REF:
2107 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
2108 return;
2110 case TARGET_MEM_REF:
2111 get_tmr_operands (stmt, expr, flags);
2112 return;
2114 case ARRAY_REF:
2115 case ARRAY_RANGE_REF:
2116 case COMPONENT_REF:
2117 case REALPART_EXPR:
2118 case IMAGPART_EXPR:
2120 tree ref;
2121 HOST_WIDE_INT offset, size, maxsize;
2122 bool none = true;
2124 /* This component reference becomes an access to all of the
2125 subvariables it can touch, if we can determine that, but
2126 *NOT* the real one. If we can't determine which fields we
2127 could touch, the recursion will eventually get to a
2128 variable and add *all* of its subvars, or whatever is the
2129 minimum correct subset. */
2130 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2131 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
2133 subvar_t sv;
2134 subvar_t svars = get_subvars_for_var (ref);
2136 for (sv = svars; sv; sv = sv->next)
2138 bool exact;
2140 if (overlap_subvar (offset, maxsize, sv->var, &exact))
2142 int subvar_flags = flags;
2143 none = false;
2144 add_stmt_operand (&sv->var, s_ann, subvar_flags);
2148 if (!none)
2149 flags |= opf_no_vops;
2151 else if (TREE_CODE (ref) == INDIRECT_REF)
2153 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
2154 maxsize, false);
2155 flags |= opf_no_vops;
2158 /* Even if we found subvars above we need to ensure to see
2159 immediate uses for d in s.a[d]. In case of s.a having
2160 a subvar or we would miss it otherwise. */
2161 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2163 if (code == COMPONENT_REF)
2165 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
2166 s_ann->has_volatile_ops = true;
2167 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2169 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
2171 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2172 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2173 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
2176 return;
2179 case WITH_SIZE_EXPR:
2180 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2181 and an rvalue reference to its second argument. */
2182 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2183 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2184 return;
2186 case CALL_EXPR:
2187 get_call_expr_operands (stmt, expr);
2188 return;
2190 case COND_EXPR:
2191 case VEC_COND_EXPR:
2192 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
2193 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2194 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2195 return;
2197 case GIMPLE_MODIFY_STMT:
2198 get_modify_stmt_operands (stmt, expr);
2199 return;
2201 case CONSTRUCTOR:
2203 /* General aggregate CONSTRUCTORs have been decomposed, but they
2204 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2205 constructor_elt *ce;
2206 unsigned HOST_WIDE_INT idx;
2208 for (idx = 0;
2209 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2210 idx++)
2211 get_expr_operands (stmt, &ce->value, opf_use);
2213 return;
2216 case BIT_FIELD_REF:
2217 case TRUTH_NOT_EXPR:
2218 case VIEW_CONVERT_EXPR:
2219 do_unary:
2220 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2221 return;
2223 case TRUTH_AND_EXPR:
2224 case TRUTH_OR_EXPR:
2225 case TRUTH_XOR_EXPR:
2226 case COMPOUND_EXPR:
2227 case OBJ_TYPE_REF:
2228 case ASSERT_EXPR:
2229 do_binary:
2231 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2232 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2233 return;
2236 case DOT_PROD_EXPR:
2237 case REALIGN_LOAD_EXPR:
2239 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2240 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2241 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2242 return;
2245 case BLOCK:
2246 case FUNCTION_DECL:
2247 case EXC_PTR_EXPR:
2248 case FILTER_EXPR:
2249 case LABEL_DECL:
2250 case CONST_DECL:
2251 case OMP_PARALLEL:
2252 case OMP_SECTIONS:
2253 case OMP_FOR:
2254 case OMP_SINGLE:
2255 case OMP_MASTER:
2256 case OMP_ORDERED:
2257 case OMP_CRITICAL:
2258 case OMP_RETURN:
2259 case OMP_CONTINUE:
2260 /* Expressions that make no memory references. */
2261 return;
2263 default:
2264 if (class == tcc_unary)
2265 goto do_unary;
2266 if (class == tcc_binary || class == tcc_comparison)
2267 goto do_binary;
2268 if (class == tcc_constant || class == tcc_type)
2269 return;
2272 /* If we get here, something has gone wrong. */
2273 #ifdef ENABLE_CHECKING
2274 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2275 debug_tree (expr);
2276 fputs ("\n", stderr);
2277 #endif
2278 gcc_unreachable ();
2282 /* Parse STMT looking for operands. When finished, the various
2283 build_* operand vectors will have potential operands in them. */
2285 static void
2286 parse_ssa_operands (tree stmt)
2288 enum tree_code code;
2290 code = TREE_CODE (stmt);
2291 switch (code)
2293 case GIMPLE_MODIFY_STMT:
2294 get_modify_stmt_operands (stmt, stmt);
2295 break;
2297 case COND_EXPR:
2298 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
2299 break;
2301 case SWITCH_EXPR:
2302 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
2303 break;
2305 case ASM_EXPR:
2306 get_asm_expr_operands (stmt);
2307 break;
2309 case RETURN_EXPR:
2310 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
2311 break;
2313 case GOTO_EXPR:
2314 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
2315 break;
2317 case LABEL_EXPR:
2318 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
2319 break;
2321 case BIND_EXPR:
2322 case CASE_LABEL_EXPR:
2323 case TRY_CATCH_EXPR:
2324 case TRY_FINALLY_EXPR:
2325 case EH_FILTER_EXPR:
2326 case CATCH_EXPR:
2327 case RESX_EXPR:
2328 /* These nodes contain no variable references. */
2329 break;
2331 default:
2332 /* Notice that if get_expr_operands tries to use &STMT as the
2333 operand pointer (which may only happen for USE operands), we
2334 will fail in add_stmt_operand. This default will handle
2335 statements like empty statements, or CALL_EXPRs that may
2336 appear on the RHS of a statement or as statements themselves. */
2337 get_expr_operands (stmt, &stmt, opf_use);
2338 break;
2343 /* Create an operands cache for STMT. */
2345 static void
2346 build_ssa_operands (tree stmt)
2348 stmt_ann_t ann = get_stmt_ann (stmt);
2350 /* Initially assume that the statement has no volatile operands and
2351 makes no memory references. */
2352 ann->has_volatile_ops = false;
2353 ann->references_memory = false;
2354 /* Just clear the bitmap so we don't end up reallocating it over and over. */
2355 if (ann->addresses_taken)
2356 bitmap_clear (ann->addresses_taken);
2358 start_ssa_stmt_operands ();
2359 parse_ssa_operands (stmt);
2360 operand_build_sort_virtual (build_vuses);
2361 operand_build_sort_virtual (build_vdefs);
2362 finalize_ssa_stmt_operands (stmt);
2364 if (ann->addresses_taken && bitmap_empty_p (ann->addresses_taken))
2365 ann->addresses_taken = NULL;
2366 /* For added safety, assume that statements with volatile operands
2367 also reference memory. */
2368 if (ann->has_volatile_ops)
2369 ann->references_memory = true;
2373 /* Free any operands vectors in OPS. */
2375 void
2376 free_ssa_operands (stmt_operands_p ops)
2378 ops->def_ops = NULL;
2379 ops->use_ops = NULL;
2380 ops->vdef_ops = NULL;
2381 ops->vuse_ops = NULL;
2382 BITMAP_FREE (ops->loads);
2383 BITMAP_FREE (ops->stores);
2387 /* Get the operands of statement STMT. */
2389 void
2390 update_stmt_operands (tree stmt)
2392 stmt_ann_t ann = get_stmt_ann (stmt);
2394 /* If update_stmt_operands is called before SSA is initialized, do
2395 nothing. */
2396 if (!ssa_operands_active ())
2397 return;
2399 /* The optimizers cannot handle statements that are nothing but a
2400 _DECL. This indicates a bug in the gimplifier. */
2401 gcc_assert (!SSA_VAR_P (stmt));
2403 timevar_push (TV_TREE_OPS);
2405 gcc_assert (ann->modified);
2406 build_ssa_operands (stmt);
2407 ann->modified = 0;
2409 timevar_pop (TV_TREE_OPS);
2413 /* Copies virtual operands from SRC to DST. */
2415 void
2416 copy_virtual_operands (tree dest, tree src)
2418 unsigned int i, n;
2419 voptype_p src_vuses, dest_vuses;
2420 voptype_p src_vdefs, dest_vdefs;
2421 struct voptype_d vuse;
2422 struct voptype_d vdef;
2423 stmt_ann_t dest_ann;
2425 VDEF_OPS (dest) = NULL;
2426 VUSE_OPS (dest) = NULL;
2428 dest_ann = get_stmt_ann (dest);
2429 BITMAP_FREE (dest_ann->operands.loads);
2430 BITMAP_FREE (dest_ann->operands.stores);
2432 if (LOADED_SYMS (src))
2434 dest_ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
2435 bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src));
2438 if (STORED_SYMS (src))
2440 dest_ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
2441 bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src));
2444 /* Copy all the VUSE operators and corresponding operands. */
2445 dest_vuses = &vuse;
2446 for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
2448 n = VUSE_NUM (src_vuses);
2449 dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses);
2450 for (i = 0; i < n; i++)
2451 SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
2453 if (VUSE_OPS (dest) == NULL)
2454 VUSE_OPS (dest) = vuse.next;
2457 /* Copy all the VDEF operators and corresponding operands. */
2458 dest_vdefs = &vdef;
2459 for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
2461 n = VUSE_NUM (src_vdefs);
2462 dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs);
2463 VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
2464 for (i = 0; i < n; i++)
2465 SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
2467 if (VDEF_OPS (dest) == NULL)
2468 VDEF_OPS (dest) = vdef.next;
2473 /* Specifically for use in DOM's expression analysis. Given a store, we
2474 create an artificial stmt which looks like a load from the store, this can
2475 be used to eliminate redundant loads. OLD_OPS are the operands from the
2476 store stmt, and NEW_STMT is the new load which represents a load of the
2477 values stored. */
2479 void
2480 create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
2482 tree op;
2483 ssa_op_iter iter;
2484 use_operand_p use_p;
2485 unsigned i;
2487 get_stmt_ann (new_stmt);
2489 /* Process NEW_STMT looking for operands. */
2490 start_ssa_stmt_operands ();
2491 parse_ssa_operands (new_stmt);
2493 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2494 if (TREE_CODE (op) != SSA_NAME)
2495 var_ann (op)->in_vuse_list = false;
2497 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2498 if (TREE_CODE (op) != SSA_NAME)
2499 var_ann (op)->in_vdef_list = false;
2501 /* Remove any virtual operands that were found. */
2502 VEC_truncate (tree, build_vdefs, 0);
2503 VEC_truncate (tree, build_vuses, 0);
2505 /* For each VDEF on the original statement, we want to create a
2506 VUSE of the VDEF result operand on the new statement. */
2507 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
2508 append_vuse (op);
2510 finalize_ssa_stmt_operands (new_stmt);
2512 /* All uses in this fake stmt must not be in the immediate use lists. */
2513 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2514 delink_imm_use (use_p);
2518 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2519 to test the validity of the swap operation. */
2521 void
2522 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2524 tree op0, op1;
2525 op0 = *exp0;
2526 op1 = *exp1;
2528 /* If the operand cache is active, attempt to preserve the relative
2529 positions of these two operands in their respective immediate use
2530 lists. */
2531 if (ssa_operands_active () && op0 != op1)
2533 use_optype_p use0, use1, ptr;
2534 use0 = use1 = NULL;
2536 /* Find the 2 operands in the cache, if they are there. */
2537 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2538 if (USE_OP_PTR (ptr)->use == exp0)
2540 use0 = ptr;
2541 break;
2544 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2545 if (USE_OP_PTR (ptr)->use == exp1)
2547 use1 = ptr;
2548 break;
2551 /* If both uses don't have operand entries, there isn't much we can do
2552 at this point. Presumably we don't need to worry about it. */
2553 if (use0 && use1)
2555 tree *tmp = USE_OP_PTR (use1)->use;
2556 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2557 USE_OP_PTR (use0)->use = tmp;
2561 /* Now swap the data. */
2562 *exp0 = op1;
2563 *exp1 = op0;
2567 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2568 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2569 a single variable whose address has been taken or any other valid
2570 GIMPLE memory reference (structure reference, array, etc). If the
2571 base address of REF is a decl that has sub-variables, also add all
2572 of its sub-variables. */
2574 void
2575 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2577 tree var;
2578 subvar_t svars;
2580 gcc_assert (addresses_taken);
2582 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2583 as the only thing we take the address of. If VAR is a structure,
2584 taking the address of a field means that the whole structure may
2585 be referenced using pointer arithmetic. See PR 21407 and the
2586 ensuing mailing list discussion. */
2587 var = get_base_address (ref);
2588 if (var && SSA_VAR_P (var))
2590 if (*addresses_taken == NULL)
2591 *addresses_taken = BITMAP_GGC_ALLOC ();
2593 if (var_can_have_subvars (var)
2594 && (svars = get_subvars_for_var (var)))
2596 subvar_t sv;
2597 for (sv = svars; sv; sv = sv->next)
2599 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2600 TREE_ADDRESSABLE (sv->var) = 1;
2603 else
2605 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2606 TREE_ADDRESSABLE (var) = 1;
2612 /* Scan the immediate_use list for VAR making sure its linked properly.
2613 Return TRUE if there is a problem and emit an error message to F. */
2615 bool
2616 verify_imm_links (FILE *f, tree var)
2618 use_operand_p ptr, prev, list;
2619 int count;
2621 gcc_assert (TREE_CODE (var) == SSA_NAME);
2623 list = &(SSA_NAME_IMM_USE_NODE (var));
2624 gcc_assert (list->use == NULL);
2626 if (list->prev == NULL)
2628 gcc_assert (list->next == NULL);
2629 return false;
2632 prev = list;
2633 count = 0;
2634 for (ptr = list->next; ptr != list; )
2636 if (prev != ptr->prev)
2637 goto error;
2639 if (ptr->use == NULL)
2640 goto error; /* 2 roots, or SAFE guard node. */
2641 else if (*(ptr->use) != var)
2642 goto error;
2644 prev = ptr;
2645 ptr = ptr->next;
2647 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2648 problem. */
2649 if (count++ > 50000000)
2650 goto error;
2653 /* Verify list in the other direction. */
2654 prev = list;
2655 for (ptr = list->prev; ptr != list; )
2657 if (prev != ptr->next)
2658 goto error;
2659 prev = ptr;
2660 ptr = ptr->prev;
2661 if (count-- < 0)
2662 goto error;
2665 if (count != 0)
2666 goto error;
2668 return false;
2670 error:
2671 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2673 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2674 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2676 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2677 (void *)ptr->use);
2678 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2679 fprintf(f, "\n");
2680 return true;
2684 /* Dump all the immediate uses to FILE. */
2686 void
2687 dump_immediate_uses_for (FILE *file, tree var)
2689 imm_use_iterator iter;
2690 use_operand_p use_p;
2692 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2694 print_generic_expr (file, var, TDF_SLIM);
2695 fprintf (file, " : -->");
2696 if (has_zero_uses (var))
2697 fprintf (file, " no uses.\n");
2698 else
2699 if (has_single_use (var))
2700 fprintf (file, " single use.\n");
2701 else
2702 fprintf (file, "%d uses.\n", num_imm_uses (var));
2704 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2706 if (use_p->stmt == NULL && use_p->use == NULL)
2707 fprintf (file, "***end of stmt iterator marker***\n");
2708 else
2709 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2710 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS);
2711 else
2712 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2714 fprintf(file, "\n");
2718 /* Dump all the immediate uses to FILE. */
2720 void
2721 dump_immediate_uses (FILE *file)
2723 tree var;
2724 unsigned int x;
2726 fprintf (file, "Immediate_uses: \n\n");
2727 for (x = 1; x < num_ssa_names; x++)
2729 var = ssa_name(x);
2730 if (!var)
2731 continue;
2732 dump_immediate_uses_for (file, var);
2737 /* Dump def-use edges on stderr. */
2739 void
2740 debug_immediate_uses (void)
2742 dump_immediate_uses (stderr);
2746 /* Dump def-use edges on stderr. */
2748 void
2749 debug_immediate_uses_for (tree var)
2751 dump_immediate_uses_for (stderr, var);
2755 /* Create a new change buffer for the statement pointed by STMT_P and
2756 push the buffer into SCB_STACK. Each change buffer
2757 records state information needed to determine what changed in the
2758 statement. Mainly, this keeps track of symbols that may need to be
2759 put into SSA form, SSA name replacements and other information
2760 needed to keep the SSA form up to date. */
2762 void
2763 push_stmt_changes (tree *stmt_p)
2765 tree stmt;
2766 scb_t buf;
2768 stmt = *stmt_p;
2770 /* It makes no sense to keep track of PHI nodes. */
2771 if (TREE_CODE (stmt) == PHI_NODE)
2772 return;
2774 buf = xmalloc (sizeof *buf);
2775 memset (buf, 0, sizeof *buf);
2777 buf->stmt_p = stmt_p;
2779 if (stmt_references_memory_p (stmt))
2781 tree op;
2782 ssa_op_iter i;
2784 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2786 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2787 if (buf->loads == NULL)
2788 buf->loads = BITMAP_ALLOC (NULL);
2789 bitmap_set_bit (buf->loads, DECL_UID (sym));
2792 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2794 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2795 if (buf->stores == NULL)
2796 buf->stores = BITMAP_ALLOC (NULL);
2797 bitmap_set_bit (buf->stores, DECL_UID (sym));
2801 VEC_safe_push (scb_t, heap, scb_stack, buf);
2805 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2806 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2808 static void
2809 mark_difference_for_renaming (bitmap s1, bitmap s2)
2811 if (s1 == NULL && s2 == NULL)
2812 return;
2814 if (s1 && s2 == NULL)
2815 mark_set_for_renaming (s1);
2816 else if (s1 == NULL && s2)
2817 mark_set_for_renaming (s2);
2818 else if (!bitmap_equal_p (s1, s2))
2820 bitmap t1 = BITMAP_ALLOC (NULL);
2821 bitmap t2 = BITMAP_ALLOC (NULL);
2823 bitmap_and_compl (t1, s1, s2);
2824 bitmap_and_compl (t2, s2, s1);
2825 bitmap_ior_into (t1, t2);
2826 mark_set_for_renaming (t1);
2828 BITMAP_FREE (t1);
2829 BITMAP_FREE (t2);
2834 /* Pop the top SCB from SCB_STACK and act on the differences between
2835 what was recorded by push_stmt_changes and the current state of
2836 the statement. */
2838 void
2839 pop_stmt_changes (tree *stmt_p)
2841 tree op, stmt;
2842 ssa_op_iter iter;
2843 bitmap loads, stores;
2844 scb_t buf;
2846 stmt = *stmt_p;
2848 /* It makes no sense to keep track of PHI nodes. */
2849 if (TREE_CODE (stmt) == PHI_NODE)
2850 return;
2852 buf = VEC_pop (scb_t, scb_stack);
2853 gcc_assert (stmt_p == buf->stmt_p);
2855 /* Force an operand re-scan on the statement and mark any newly
2856 exposed variables. */
2857 update_stmt (stmt);
2859 /* Determine whether any memory symbols need to be renamed. If the
2860 sets of loads and stores are different after the statement is
2861 modified, then the affected symbols need to be renamed.
2863 Note that it may be possible for the statement to not reference
2864 memory anymore, but we still need to act on the differences in
2865 the sets of symbols. */
2866 loads = stores = NULL;
2867 if (stmt_references_memory_p (stmt))
2869 tree op;
2870 ssa_op_iter i;
2872 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2874 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2875 if (loads == NULL)
2876 loads = BITMAP_ALLOC (NULL);
2877 bitmap_set_bit (loads, DECL_UID (sym));
2880 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2882 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2883 if (stores == NULL)
2884 stores = BITMAP_ALLOC (NULL);
2885 bitmap_set_bit (stores, DECL_UID (sym));
2889 /* If LOADS is different from BUF->LOADS, the affected
2890 symbols need to be marked for renaming. */
2891 mark_difference_for_renaming (loads, buf->loads);
2893 /* Similarly for STORES and BUF->STORES. */
2894 mark_difference_for_renaming (stores, buf->stores);
2896 /* Mark all the naked GIMPLE register operands for renaming. */
2897 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
2898 if (DECL_P (op))
2899 mark_sym_for_renaming (op);
2901 /* FIXME, need to add more finalizers here. Cleanup EH info,
2902 recompute invariants for address expressions, add
2903 SSA replacement mappings, etc. For instance, given
2904 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2905 the form:
2907 # SMT.4_20 = VDEF <SMT.4_16>
2908 D.1576_11 = 1.0e+0;
2910 So, the VDEF will disappear, but instead of marking SMT.4 for
2911 renaming it would be far more efficient to establish a
2912 replacement mapping that would replace every reference of
2913 SMT.4_20 with SMT.4_16. */
2915 /* Free memory used by the buffer. */
2916 BITMAP_FREE (buf->loads);
2917 BITMAP_FREE (buf->stores);
2918 BITMAP_FREE (loads);
2919 BITMAP_FREE (stores);
2920 buf->stmt_p = NULL;
2921 free (buf);
2925 /* Discard the topmost change buffer from SCB_STACK. This is useful
2926 when the caller realized that it did not actually modified the
2927 statement. It avoids the expensive operand re-scan. */
2929 void
2930 discard_stmt_changes (tree *stmt_p)
2932 scb_t buf;
2933 tree stmt;
2935 /* It makes no sense to keep track of PHI nodes. */
2936 stmt = *stmt_p;
2937 if (TREE_CODE (stmt) == PHI_NODE)
2938 return;
2940 buf = VEC_pop (scb_t, scb_stack);
2941 gcc_assert (stmt_p == buf->stmt_p);
2943 /* Free memory used by the buffer. */
2944 BITMAP_FREE (buf->loads);
2945 BITMAP_FREE (buf->stores);
2946 buf->stmt_p = NULL;
2947 free (buf);
2951 /* Returns true if statement STMT may access memory. */
2953 bool
2954 stmt_references_memory_p (tree stmt)
2956 if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE)
2957 return false;
2959 return stmt_ann (stmt)->references_memory;
2963 /* Return the memory partition tag (MPT) associated with memory
2964 symbol SYM. From a correctness standpoint, memory partitions can
2965 be assigned in any arbitrary fashion as long as this rule is
2966 observed: Given two memory partitions MPT.i and MPT.j, they must
2967 not contain symbols in common.
2969 Memory partitions are used when putting the program into Memory-SSA
2970 form. In particular, in Memory-SSA PHI nodes are not computed for
2971 individual memory symbols. They are computed for memory
2972 partitions. This reduces the amount of PHI nodes in the SSA graph
2973 at the expense of precision (i.e., it makes unrelated stores affect
2974 each other).
2976 However, it is possible to increase precision by changing this
2977 partitioning scheme. For instance, if the partitioning scheme is
2978 such that get_mpt_for is the identity function (that is,
2979 get_mpt_for (s) = s), this will result in ultimate precision at the
2980 expense of huge SSA webs.
2982 At the other extreme, a partitioning scheme that groups all the
2983 symbols in the same set results in minimal SSA webs and almost
2984 total loss of precision. */
2986 tree
2987 get_mpt_for (tree sym)
2989 tree mpt;
2991 /* Don't create a new tag unnecessarily. */
2992 mpt = memory_partition (sym);
2993 if (mpt == NULL_TREE)
2995 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
2996 TREE_ADDRESSABLE (mpt) = 0;
2997 MTAG_GLOBAL (mpt) = 1;
2998 add_referenced_var (mpt);
2999 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
3000 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&operands_bitmap_obstack);
3001 set_memory_partition (sym, mpt);
3004 return mpt;
3008 /* Dump memory partition information to FILE. */
3010 void
3011 dump_memory_partitions (FILE *file)
3013 unsigned i, npart;
3014 unsigned long nsyms;
3015 tree mpt;
3017 fprintf (file, "\nMemory partitions\n\n");
3018 for (i = 0, npart = 0, nsyms = 0;
3019 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
3020 i++)
3022 if (mpt)
3024 bitmap syms = MPT_SYMBOLS (mpt);
3025 unsigned long n = bitmap_count_bits (syms);
3027 fprintf (file, "#%u: ", i);
3028 print_generic_expr (file, mpt, 0);
3029 fprintf (file, ": %lu elements: ", n);
3030 dump_decl_set (file, syms);
3031 npart++;
3032 nsyms += n;
3036 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
3040 /* Dump memory partition information to stderr. */
3042 void
3043 debug_memory_partitions (void)
3045 dump_memory_partitions (stderr);