1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars
;
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided
;
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided
;
95 /* Number of write-clobbers avoided because the variable can't escape to
97 unsigned int unescapable_clobbers_avoided
;
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers
;
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided
;
108 /* Flags to describe operand properties in helpers. */
110 /* By default, operands are loaded. */
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
123 #define opf_no_vops (1 << 1)
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
130 /* Array for building all the def operands. */
131 static VEC(tree
,heap
) *build_defs
;
133 /* Array for building all the use operands. */
134 static VEC(tree
,heap
) *build_uses
;
136 /* Set for building all the VDEF operands. */
137 static VEC(tree
,heap
) *build_vdefs
;
139 /* Set for building all the VUSE operands. */
140 static VEC(tree
,heap
) *build_vuses
;
142 /* Bitmap obstack for our datastructures that needs to survive across
143 compilations of multiple functions. */
144 static bitmap_obstack operands_bitmap_obstack
;
145 /* Set for building all the loaded symbols. */
146 static bitmap build_loads
;
148 /* Set for building all the stored symbols. */
149 static bitmap build_stores
;
151 static void get_expr_operands (tree
, tree
*, int);
153 /* Number of functions with initialized ssa_operands. */
154 static int n_initialized
= 0;
156 /* Statement change buffer. Data structure used to record state
157 information for statements. This is used to determine what needs
158 to be done in order to update the SSA web after a statement is
159 modified by a pass. If STMT is a statement that has just been
160 created, or needs to be folded via fold_stmt, or anything that
161 changes its physical structure then the pass should:
163 1- Call push_stmt_changes (&stmt) to record the current state of
164 STMT before any modifications are made.
166 2- Make all appropriate modifications to the statement.
168 3- Call pop_stmt_changes (&stmt) to find new symbols that
169 need to be put in SSA form, SSA name mappings for names that
170 have disappeared, recompute invariantness for address
171 expressions, cleanup EH information, etc.
173 If it is possible to determine that the statement was not modified,
174 instead of calling pop_stmt_changes it is quicker to call
175 discard_stmt_changes to avoid the expensive and unnecessary operand
176 re-scan and change comparison. */
180 /* Pointer to the statement being modified. */
183 /* If the statement references memory these are the sets of symbols
184 loaded and stored by the statement. */
189 typedef struct scb_d
*scb_t
;
191 DEF_VEC_ALLOC_P(scb_t
,heap
);
193 /* Stack of statement change buffers (SCB). Every call to
194 push_stmt_changes pushes a new buffer onto the stack. Calls to
195 pop_stmt_changes pop a buffer off of the stack and compute the set
196 of changes for the popped statement. */
197 static VEC(scb_t
,heap
) *scb_stack
;
199 /* Return the DECL_UID of the base variable of T. */
201 static inline unsigned
202 get_name_decl (tree t
)
204 if (TREE_CODE (t
) != SSA_NAME
)
207 return DECL_UID (SSA_NAME_VAR (t
));
211 /* Comparison function for qsort used in operand_build_sort_virtual. */
214 operand_build_cmp (const void *p
, const void *q
)
216 tree e1
= *((const tree
*)p
);
217 tree e2
= *((const tree
*)q
);
220 u1
= get_name_decl (e1
);
221 u2
= get_name_decl (e2
);
223 /* We want to sort in ascending order. They can never be equal. */
224 #ifdef ENABLE_CHECKING
225 gcc_assert (u1
!= u2
);
227 return (u1
> u2
? 1 : -1);
231 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
234 operand_build_sort_virtual (VEC(tree
,heap
) *list
)
236 int num
= VEC_length (tree
, list
);
243 if (get_name_decl (VEC_index (tree
, list
, 0))
244 > get_name_decl (VEC_index (tree
, list
, 1)))
246 /* Swap elements if in the wrong order. */
247 tree tmp
= VEC_index (tree
, list
, 0);
248 VEC_replace (tree
, list
, 0, VEC_index (tree
, list
, 1));
249 VEC_replace (tree
, list
, 1, tmp
);
254 /* There are 3 or more elements, call qsort. */
255 qsort (VEC_address (tree
, list
),
256 VEC_length (tree
, list
),
262 /* Return true if the SSA operands cache is active. */
265 ssa_operands_active (void)
267 return cfun
->gimple_df
&& gimple_ssa_operands (cfun
)->ops_active
;
271 /* VOPs are of variable sized, so the free list maps "free buckets" to the
284 Any VOPs larger than this are simply added to the largest bucket when they
288 /* Return the number of operands used in bucket BUCKET. */
291 vop_free_bucket_size (int bucket
)
293 #ifdef ENABLE_CHECKING
294 gcc_assert (bucket
>= 0 && bucket
< NUM_VOP_FREE_BUCKETS
);
298 return (bucket
- 13) * 8;
302 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
303 beyond the end of the bucket table, return -1. */
306 vop_free_bucket_index (int num
)
308 gcc_assert (num
> 0 && NUM_VOP_FREE_BUCKETS
> 16);
310 /* Sizes 1 through 16 use buckets 0-15. */
313 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
314 num
= 14 + (num
- 1) / 8;
315 if (num
>= NUM_VOP_FREE_BUCKETS
)
322 /* Initialize the VOP free buckets. */
325 init_vop_buckets (void)
329 for (x
= 0; x
< NUM_VOP_FREE_BUCKETS
; x
++)
330 gimple_ssa_operands (cfun
)->vop_free_buckets
[x
] = NULL
;
334 /* Add PTR to the appropriate VOP bucket. */
337 add_vop_to_freelist (voptype_p ptr
)
339 int bucket
= vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr
->usev
));
341 /* Too large, use the largest bucket so its not a complete throw away. */
343 bucket
= NUM_VOP_FREE_BUCKETS
- 1;
345 ptr
->next
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
346 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] = ptr
;
350 /* These are the sizes of the operand memory buffer which gets allocated each
351 time more operands space is required. The final value is the amount that is
352 allocated every time after that. */
354 #define OP_SIZE_INIT 0
356 #define OP_SIZE_2 110
357 #define OP_SIZE_3 511
359 /* Initialize the operand cache routines. */
362 init_ssa_operands (void)
364 if (!n_initialized
++)
366 build_defs
= VEC_alloc (tree
, heap
, 5);
367 build_uses
= VEC_alloc (tree
, heap
, 10);
368 build_vuses
= VEC_alloc (tree
, heap
, 25);
369 build_vdefs
= VEC_alloc (tree
, heap
, 25);
370 bitmap_obstack_initialize (&operands_bitmap_obstack
);
371 build_loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
372 build_stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
373 scb_stack
= VEC_alloc (scb_t
, heap
, 20);
376 gcc_assert (gimple_ssa_operands (cfun
)->operand_memory
== NULL
);
377 gcc_assert (gimple_ssa_operands (cfun
)->mpt_table
== NULL
);
378 gimple_ssa_operands (cfun
)->operand_memory_index
379 = gimple_ssa_operands (cfun
)->ssa_operand_mem_size
;
380 gimple_ssa_operands (cfun
)->ops_active
= true;
381 memset (&clobber_stats
, 0, sizeof (clobber_stats
));
383 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_INIT
;
387 /* Dispose of anything required by the operand routines. */
390 fini_ssa_operands (void)
392 struct ssa_operand_memory_d
*ptr
;
396 if (!--n_initialized
)
398 VEC_free (tree
, heap
, build_defs
);
399 VEC_free (tree
, heap
, build_uses
);
400 VEC_free (tree
, heap
, build_vdefs
);
401 VEC_free (tree
, heap
, build_vuses
);
402 BITMAP_FREE (build_loads
);
403 BITMAP_FREE (build_stores
);
405 /* The change buffer stack had better be empty. */
406 gcc_assert (VEC_length (scb_t
, scb_stack
) == 0);
407 VEC_free (scb_t
, heap
, scb_stack
);
411 gimple_ssa_operands (cfun
)->free_defs
= NULL
;
412 gimple_ssa_operands (cfun
)->free_uses
= NULL
;
414 while ((ptr
= gimple_ssa_operands (cfun
)->operand_memory
) != NULL
)
416 gimple_ssa_operands (cfun
)->operand_memory
417 = gimple_ssa_operands (cfun
)->operand_memory
->next
;
422 VEC_iterate (tree
, gimple_ssa_operands (cfun
)->mpt_table
, ix
, mpt
);
426 BITMAP_FREE (MPT_SYMBOLS (mpt
));
429 VEC_free (tree
, heap
, gimple_ssa_operands (cfun
)->mpt_table
);
431 gimple_ssa_operands (cfun
)->ops_active
= false;
434 bitmap_obstack_release (&operands_bitmap_obstack
);
435 if (dump_file
&& (dump_flags
& TDF_STATS
))
437 fprintf (dump_file
, "Original clobbered vars: %d\n",
438 clobber_stats
.clobbered_vars
);
439 fprintf (dump_file
, "Static write clobbers avoided: %d\n",
440 clobber_stats
.static_write_clobbers_avoided
);
441 fprintf (dump_file
, "Static read clobbers avoided: %d\n",
442 clobber_stats
.static_read_clobbers_avoided
);
443 fprintf (dump_file
, "Unescapable clobbers avoided: %d\n",
444 clobber_stats
.unescapable_clobbers_avoided
);
445 fprintf (dump_file
, "Original read-only clobbers: %d\n",
446 clobber_stats
.readonly_clobbers
);
447 fprintf (dump_file
, "Static read-only clobbers avoided: %d\n",
448 clobber_stats
.static_readonly_clobbers_avoided
);
453 /* Return memory for operands of SIZE chunks. */
456 ssa_operand_alloc (unsigned size
)
460 if (gimple_ssa_operands (cfun
)->operand_memory_index
+ size
461 >= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
463 struct ssa_operand_memory_d
*ptr
;
465 if (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
== OP_SIZE_INIT
)
466 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
467 = OP_SIZE_1
* sizeof (struct voptype_d
);
469 if (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
470 == OP_SIZE_1
* sizeof (struct voptype_d
))
471 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
472 = OP_SIZE_2
* sizeof (struct voptype_d
);
474 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
475 = OP_SIZE_3
* sizeof (struct voptype_d
);
477 /* Go right to the maximum size if the request is too large. */
478 if (size
> gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
479 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
480 = OP_SIZE_3
* sizeof (struct voptype_d
);
482 /* Fail if there is not enough space. If there are this many operands
483 required, first make sure there isn't a different problem causing this
484 many operands. If the decision is that this is OK, then we can
485 specially allocate a buffer just for this request. */
486 gcc_assert (size
<= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
);
488 ptr
= (struct ssa_operand_memory_d
*)
489 ggc_alloc (sizeof (struct ssa_operand_memory_d
)
490 + gimple_ssa_operands (cfun
)->ssa_operand_mem_size
- 1);
491 ptr
->next
= gimple_ssa_operands (cfun
)->operand_memory
;
492 gimple_ssa_operands (cfun
)->operand_memory
= ptr
;
493 gimple_ssa_operands (cfun
)->operand_memory_index
= 0;
495 ptr
= &(gimple_ssa_operands (cfun
)->operand_memory
496 ->mem
[gimple_ssa_operands (cfun
)->operand_memory_index
]);
497 gimple_ssa_operands (cfun
)->operand_memory_index
+= size
;
502 /* Allocate a DEF operand. */
504 static inline struct def_optype_d
*
507 struct def_optype_d
*ret
;
508 if (gimple_ssa_operands (cfun
)->free_defs
)
510 ret
= gimple_ssa_operands (cfun
)->free_defs
;
511 gimple_ssa_operands (cfun
)->free_defs
512 = gimple_ssa_operands (cfun
)->free_defs
->next
;
515 ret
= (struct def_optype_d
*)
516 ssa_operand_alloc (sizeof (struct def_optype_d
));
521 /* Allocate a USE operand. */
523 static inline struct use_optype_d
*
526 struct use_optype_d
*ret
;
527 if (gimple_ssa_operands (cfun
)->free_uses
)
529 ret
= gimple_ssa_operands (cfun
)->free_uses
;
530 gimple_ssa_operands (cfun
)->free_uses
531 = gimple_ssa_operands (cfun
)->free_uses
->next
;
534 ret
= (struct use_optype_d
*)
535 ssa_operand_alloc (sizeof (struct use_optype_d
));
540 /* Allocate a vop with NUM elements. */
542 static inline struct voptype_d
*
545 struct voptype_d
*ret
= NULL
;
548 int bucket
= vop_free_bucket_index (num
);
551 /* If there is a free operand, use it. */
552 if (gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] != NULL
)
554 ret
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
555 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] =
556 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
]->next
;
559 alloc_size
= vop_free_bucket_size(bucket
);
565 ret
= (struct voptype_d
*)ssa_operand_alloc (
566 sizeof (struct voptype_d
) + (alloc_size
- 1) * sizeof (vuse_element_t
));
568 VUSE_VECT_NUM_ELEM (ret
->usev
) = num
;
573 /* This routine makes sure that PTR is in an immediate use list, and makes
574 sure the stmt pointer is set to the current stmt. */
577 set_virtual_use_link (use_operand_p ptr
, tree stmt
)
579 /* fold_stmt may have changed the stmt pointers. */
580 if (ptr
->stmt
!= stmt
)
583 /* If this use isn't in a list, add it to the correct list. */
585 link_imm_use (ptr
, *(ptr
->use
));
589 /* Adds OP to the list of defs after LAST. */
591 static inline def_optype_p
592 add_def_op (tree
*op
, def_optype_p last
)
597 DEF_OP_PTR (new) = op
;
604 /* Adds OP to the list of uses of statement STMT after LAST. */
606 static inline use_optype_p
607 add_use_op (tree stmt
, tree
*op
, use_optype_p last
)
612 USE_OP_PTR (new)->use
= op
;
613 link_imm_use_stmt (USE_OP_PTR (new), *op
, stmt
);
620 /* Return a virtual op pointer with NUM elements which are all initialized to OP
621 and are linked into the immediate uses for STMT. The new vop is appended
624 static inline voptype_p
625 add_vop (tree stmt
, tree op
, int num
, voptype_p prev
)
630 new = alloc_vop (num
);
631 for (x
= 0; x
< num
; x
++)
633 VUSE_OP_PTR (new, x
)->prev
= NULL
;
634 SET_VUSE_OP (new, x
, op
);
635 VUSE_OP_PTR (new, x
)->use
= &new->usev
.uses
[x
].use_var
;
636 link_imm_use_stmt (VUSE_OP_PTR (new, x
), new->usev
.uses
[x
].use_var
, stmt
);
646 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
647 LAST to the new element. */
649 static inline voptype_p
650 add_vuse_op (tree stmt
, tree op
, int num
, voptype_p last
)
652 voptype_p
new = add_vop (stmt
, op
, num
, last
);
653 VDEF_RESULT (new) = NULL_TREE
;
658 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
659 LAST to the new element. */
661 static inline voptype_p
662 add_vdef_op (tree stmt
, tree op
, int num
, voptype_p last
)
664 voptype_p
new = add_vop (stmt
, op
, num
, last
);
665 VDEF_RESULT (new) = op
;
670 /* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
671 is the head of the operand list it belongs to. */
673 static inline struct voptype_d
*
674 realloc_vop (struct voptype_d
*ptr
, unsigned int num_elem
,
675 struct voptype_d
**root
)
679 struct voptype_d
*ret
, *tmp
;
681 if (VUSE_VECT_NUM_ELEM (ptr
->usev
) == num_elem
)
684 val
= VUSE_OP (ptr
, 0);
685 if (TREE_CODE (val
) == SSA_NAME
)
686 val
= SSA_NAME_VAR (val
);
688 stmt
= USE_STMT (VUSE_OP_PTR (ptr
, 0));
690 /* Delink all the existing uses. */
691 for (x
= 0; x
< VUSE_VECT_NUM_ELEM (ptr
->usev
); x
++)
693 use_operand_p use_p
= VUSE_OP_PTR (ptr
, x
);
694 delink_imm_use (use_p
);
697 /* If we want less space, simply use this one, and shrink the size. */
698 if (VUSE_VECT_NUM_ELEM (ptr
->usev
) > num_elem
)
700 VUSE_VECT_NUM_ELEM (ptr
->usev
) = num_elem
;
704 /* It is growing. Allocate a new one and replace the old one. */
705 ret
= add_vuse_op (stmt
, val
, num_elem
, ptr
);
707 /* Clear PTR and add its memory to the free list. */
708 lim
= VUSE_VECT_NUM_ELEM (ptr
->usev
);
710 sizeof (struct voptype_d
) + sizeof (vuse_element_t
) * (lim
- 1));
711 add_vop_to_freelist (ptr
);
713 /* Now simply remove the old one. */
721 tmp
!= NULL
&& tmp
->next
!= ptr
;
728 /* The pointer passed in isn't in STMT's VDEF lists. */
733 /* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
736 realloc_vdef (struct voptype_d
*ptr
, unsigned int num_elem
)
739 struct voptype_d
*ret
;
741 val
= VDEF_RESULT (ptr
);
742 stmt
= USE_STMT (VDEF_OP_PTR (ptr
, 0));
743 ret
= realloc_vop (ptr
, num_elem
, &(VDEF_OPS (stmt
)));
744 VDEF_RESULT (ret
) = val
;
749 /* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
752 realloc_vuse (struct voptype_d
*ptr
, unsigned int num_elem
)
755 struct voptype_d
*ret
;
757 stmt
= USE_STMT (VUSE_OP_PTR (ptr
, 0));
758 ret
= realloc_vop (ptr
, num_elem
, &(VUSE_OPS (stmt
)));
763 /* Takes elements from build_defs and turns them into def operands of STMT.
764 TODO -- Make build_defs VEC of tree *. */
767 finalize_ssa_defs (tree stmt
)
770 struct def_optype_d new_list
;
771 def_optype_p old_ops
, last
;
772 unsigned int num
= VEC_length (tree
, build_defs
);
774 /* There should only be a single real definition per assignment. */
775 gcc_assert ((stmt
&& TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
) || num
<= 1);
777 new_list
.next
= NULL
;
780 old_ops
= DEF_OPS (stmt
);
784 /* Check for the common case of 1 def that hasn't changed. */
785 if (old_ops
&& old_ops
->next
== NULL
&& num
== 1
786 && (tree
*) VEC_index (tree
, build_defs
, 0) == DEF_OP_PTR (old_ops
))
789 /* If there is anything in the old list, free it. */
792 old_ops
->next
= gimple_ssa_operands (cfun
)->free_defs
;
793 gimple_ssa_operands (cfun
)->free_defs
= old_ops
;
796 /* If there is anything remaining in the build_defs list, simply emit it. */
797 for ( ; new_i
< num
; new_i
++)
798 last
= add_def_op ((tree
*) VEC_index (tree
, build_defs
, new_i
), last
);
800 /* Now set the stmt's operands. */
801 DEF_OPS (stmt
) = new_list
.next
;
803 #ifdef ENABLE_CHECKING
807 for (ptr
= DEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
810 gcc_assert (x
== num
);
816 /* Takes elements from build_uses and turns them into use operands of STMT.
817 TODO -- Make build_uses VEC of tree *. */
820 finalize_ssa_uses (tree stmt
)
823 struct use_optype_d new_list
;
824 use_optype_p old_ops
, ptr
, last
;
826 #ifdef ENABLE_CHECKING
829 unsigned num
= VEC_length (tree
, build_uses
);
831 /* If the pointer to the operand is the statement itself, something is
832 wrong. It means that we are pointing to a local variable (the
833 initial call to update_stmt_operands does not pass a pointer to a
835 for (x
= 0; x
< num
; x
++)
836 gcc_assert (*((tree
*)VEC_index (tree
, build_uses
, x
)) != stmt
);
840 new_list
.next
= NULL
;
843 old_ops
= USE_OPS (stmt
);
845 /* If there is anything in the old list, free it. */
848 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
849 delink_imm_use (USE_OP_PTR (ptr
));
850 old_ops
->next
= gimple_ssa_operands (cfun
)->free_uses
;
851 gimple_ssa_operands (cfun
)->free_uses
= old_ops
;
854 /* Now create nodes for all the new nodes. */
855 for (new_i
= 0; new_i
< VEC_length (tree
, build_uses
); new_i
++)
856 last
= add_use_op (stmt
,
857 (tree
*) VEC_index (tree
, build_uses
, new_i
),
860 /* Now set the stmt's operands. */
861 USE_OPS (stmt
) = new_list
.next
;
863 #ifdef ENABLE_CHECKING
866 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
869 gcc_assert (x
== VEC_length (tree
, build_uses
));
875 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
876 STMT. FIXME, for now VDEF operators should have a single operand
880 finalize_ssa_vdefs (tree stmt
)
883 struct voptype_d new_list
;
884 voptype_p old_ops
, ptr
, last
;
885 stmt_ann_t ann
= stmt_ann (stmt
);
887 /* Set the symbols referenced by STMT. */
888 if (!bitmap_empty_p (build_stores
))
890 if (ann
->operands
.stores
== NULL
)
891 ann
->operands
.stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
893 bitmap_copy (ann
->operands
.stores
, build_stores
);
896 BITMAP_FREE (ann
->operands
.stores
);
898 /* If aliases have not been computed, do not instantiate a virtual
899 operator on STMT. Initially, we only compute the SSA form on
900 GIMPLE registers. The virtual SSA form is only computed after
901 alias analysis, so virtual operators will remain unrenamed and
902 the verifier will complain. However, alias analysis needs to
903 access symbol load/store information, so we need to compute
905 if (!gimple_aliases_computed_p (cfun
))
908 new_list
.next
= NULL
;
911 old_ops
= VDEF_OPS (stmt
);
913 while (old_ops
&& new_i
< VEC_length (tree
, build_vdefs
))
915 tree op
= VEC_index (tree
, build_vdefs
, new_i
);
916 unsigned new_uid
= get_name_decl (op
);
917 unsigned old_uid
= get_name_decl (VDEF_RESULT (old_ops
));
919 /* FIXME, for now each VDEF operator should have at most one
920 operand in their RHS. */
921 gcc_assert (VDEF_NUM (old_ops
) == 1);
923 if (old_uid
== new_uid
)
925 /* If the symbols are the same, reuse the existing operand. */
926 last
->next
= old_ops
;
928 old_ops
= old_ops
->next
;
930 set_virtual_use_link (VDEF_OP_PTR (last
, 0), stmt
);
933 else if (old_uid
< new_uid
)
935 /* If old is less than new, old goes to the free list. */
937 delink_imm_use (VDEF_OP_PTR (old_ops
, 0));
938 next
= old_ops
->next
;
939 add_vop_to_freelist (old_ops
);
944 /* This is a new operand. */
945 last
= add_vdef_op (stmt
, op
, 1, last
);
950 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
951 for ( ; new_i
< VEC_length (tree
, build_vdefs
); new_i
++)
952 last
= add_vdef_op (stmt
, VEC_index (tree
, build_vdefs
, new_i
), 1, last
);
954 /* If there is anything in the old list, free it. */
957 for (ptr
= old_ops
; ptr
; ptr
= last
)
960 delink_imm_use (VDEF_OP_PTR (ptr
, 0));
961 add_vop_to_freelist (ptr
);
965 /* Now set STMT's operands. */
966 VDEF_OPS (stmt
) = new_list
.next
;
968 #ifdef ENABLE_CHECKING
971 for (ptr
= VDEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
974 gcc_assert (x
== VEC_length (tree
, build_vdefs
));
980 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
984 finalize_ssa_vuse_ops (tree stmt
)
986 unsigned new_i
, old_i
;
987 voptype_p old_ops
, last
;
988 VEC(tree
,heap
) *new_ops
;
991 /* Set the symbols referenced by STMT. */
992 ann
= stmt_ann (stmt
);
993 if (!bitmap_empty_p (build_loads
))
995 if (ann
->operands
.loads
== NULL
)
996 ann
->operands
.loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
998 bitmap_copy (ann
->operands
.loads
, build_loads
);
1001 BITMAP_FREE (ann
->operands
.loads
);
1003 /* If aliases have not been computed, do not instantiate a virtual
1004 operator on STMT. Initially, we only compute the SSA form on
1005 GIMPLE registers. The virtual SSA form is only computed after
1006 alias analysis, so virtual operators will remain unrenamed and
1007 the verifier will complain. However, alias analysis needs to
1008 access symbol load/store information, so we need to compute
1010 if (!gimple_aliases_computed_p (cfun
))
1013 /* STMT should have at most one VUSE operator. */
1014 old_ops
= VUSE_OPS (stmt
);
1015 gcc_assert (old_ops
== NULL
|| old_ops
->next
== NULL
);
1020 && old_i
< VUSE_NUM (old_ops
)
1021 && new_i
< VEC_length (tree
, build_vuses
))
1023 tree new_op
= VEC_index (tree
, build_vuses
, new_i
);
1024 tree old_op
= VUSE_OP (old_ops
, old_i
);
1025 unsigned new_uid
= get_name_decl (new_op
);
1026 unsigned old_uid
= get_name_decl (old_op
);
1028 if (old_uid
== new_uid
)
1030 /* If the symbols are the same, reuse the existing operand. */
1031 VEC_safe_push (tree
, heap
, new_ops
, old_op
);
1035 else if (old_uid
< new_uid
)
1037 /* If OLD_UID is less than NEW_UID, the old operand has
1038 disappeared, skip to the next old operand. */
1043 /* This is a new operand. */
1044 VEC_safe_push (tree
, heap
, new_ops
, new_op
);
1049 /* If there is anything remaining in the build_vuses list, simply emit it. */
1050 for ( ; new_i
< VEC_length (tree
, build_vuses
); new_i
++)
1051 VEC_safe_push (tree
, heap
, new_ops
, VEC_index (tree
, build_vuses
, new_i
));
1053 /* If there is anything in the old list, free it. */
1056 for (old_i
= 0; old_i
< VUSE_NUM (old_ops
); old_i
++)
1057 delink_imm_use (VUSE_OP_PTR (old_ops
, old_i
));
1058 add_vop_to_freelist (old_ops
);
1059 VUSE_OPS (stmt
) = NULL
;
1062 /* If there are any operands, instantiate a VUSE operator for STMT. */
1068 last
= add_vuse_op (stmt
, NULL
, VEC_length (tree
, new_ops
), NULL
);
1070 for (i
= 0; VEC_iterate (tree
, new_ops
, i
, op
); i
++)
1071 SET_USE (VUSE_OP_PTR (last
, (int) i
), op
);
1073 VUSE_OPS (stmt
) = last
;
1076 #ifdef ENABLE_CHECKING
1080 if (VUSE_OPS (stmt
))
1082 gcc_assert (VUSE_OPS (stmt
)->next
== NULL
);
1083 x
= VUSE_NUM (VUSE_OPS (stmt
));
1088 gcc_assert (x
== VEC_length (tree
, build_vuses
));
1093 /* Return a new VUSE operand vector for STMT. */
1096 finalize_ssa_vuses (tree stmt
)
1098 unsigned num
, num_vdefs
;
1099 unsigned vuse_index
;
1101 /* Remove superfluous VUSE operands. If the statement already has a
1102 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1103 needed because VDEFs imply a VUSE of the variable. For instance,
1104 suppose that variable 'a' is pointed-to by p and q:
1110 The VUSE <a_2> is superfluous because it is implied by the
1112 num
= VEC_length (tree
, build_vuses
);
1113 num_vdefs
= VEC_length (tree
, build_vdefs
);
1115 if (num
> 0 && num_vdefs
> 0)
1116 for (vuse_index
= 0; vuse_index
< VEC_length (tree
, build_vuses
); )
1119 vuse
= VEC_index (tree
, build_vuses
, vuse_index
);
1120 if (TREE_CODE (vuse
) != SSA_NAME
)
1122 var_ann_t ann
= var_ann (vuse
);
1123 ann
->in_vuse_list
= 0;
1124 if (ann
->in_vdef_list
)
1126 VEC_ordered_remove (tree
, build_vuses
, vuse_index
);
1133 finalize_ssa_vuse_ops (stmt
);
1137 /* Clear the in_list bits and empty the build array for VDEFs and
1141 cleanup_build_arrays (void)
1146 for (i
= 0; VEC_iterate (tree
, build_vdefs
, i
, t
); i
++)
1147 if (TREE_CODE (t
) != SSA_NAME
)
1148 var_ann (t
)->in_vdef_list
= false;
1150 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, t
); i
++)
1151 if (TREE_CODE (t
) != SSA_NAME
)
1152 var_ann (t
)->in_vuse_list
= false;
1154 VEC_truncate (tree
, build_vdefs
, 0);
1155 VEC_truncate (tree
, build_vuses
, 0);
1156 VEC_truncate (tree
, build_defs
, 0);
1157 VEC_truncate (tree
, build_uses
, 0);
1158 bitmap_clear (build_loads
);
1159 bitmap_clear (build_stores
);
1163 /* Finalize all the build vectors, fill the new ones into INFO. */
1166 finalize_ssa_stmt_operands (tree stmt
)
1168 finalize_ssa_defs (stmt
);
1169 finalize_ssa_uses (stmt
);
1170 finalize_ssa_vdefs (stmt
);
1171 finalize_ssa_vuses (stmt
);
1172 cleanup_build_arrays ();
1176 /* Start the process of building up operands vectors in INFO. */
1179 start_ssa_stmt_operands (void)
1181 gcc_assert (VEC_length (tree
, build_defs
) == 0);
1182 gcc_assert (VEC_length (tree
, build_uses
) == 0);
1183 gcc_assert (VEC_length (tree
, build_vuses
) == 0);
1184 gcc_assert (VEC_length (tree
, build_vdefs
) == 0);
1185 gcc_assert (bitmap_empty_p (build_loads
));
1186 gcc_assert (bitmap_empty_p (build_stores
));
1190 /* Add DEF_P to the list of pointers to operands. */
1193 append_def (tree
*def_p
)
1195 VEC_safe_push (tree
, heap
, build_defs
, (tree
) def_p
);
1199 /* Add USE_P to the list of pointers to operands. */
1202 append_use (tree
*use_p
)
1204 VEC_safe_push (tree
, heap
, build_uses
, (tree
) use_p
);
1208 /* Add VAR to the set of variables that require a VDEF operator. */
1211 append_vdef (tree var
)
1215 if (TREE_CODE (var
) != SSA_NAME
)
1220 /* If VAR belongs to a memory partition, use it instead of VAR. */
1221 mpt
= memory_partition (var
);
1225 /* Don't allow duplicate entries. */
1226 ann
= get_var_ann (var
);
1227 if (ann
->in_vdef_list
)
1230 ann
->in_vdef_list
= true;
1234 sym
= SSA_NAME_VAR (var
);
1236 VEC_safe_push (tree
, heap
, build_vdefs
, var
);
1237 bitmap_set_bit (build_stores
, DECL_UID (sym
));
1241 /* Add VAR to the set of variables that require a VUSE operator. */
1244 append_vuse (tree var
)
1248 if (TREE_CODE (var
) != SSA_NAME
)
1253 /* If VAR belongs to a memory partition, use it instead of VAR. */
1254 mpt
= memory_partition (var
);
1258 /* Don't allow duplicate entries. */
1259 ann
= get_var_ann (var
);
1260 if (ann
->in_vuse_list
|| ann
->in_vdef_list
)
1263 ann
->in_vuse_list
= true;
1267 sym
= SSA_NAME_VAR (var
);
1269 VEC_safe_push (tree
, heap
, build_vuses
, var
);
1270 bitmap_set_bit (build_loads
, DECL_UID (sym
));
1274 /* REF is a tree that contains the entire pointer dereference
1275 expression, if available, or NULL otherwise. ALIAS is the variable
1276 we are asking if REF can access. OFFSET and SIZE come from the
1277 memory access expression that generated this virtual operand. */
1280 access_can_touch_variable (tree ref
, tree alias
, HOST_WIDE_INT offset
,
1283 bool offsetgtz
= offset
> 0;
1284 unsigned HOST_WIDE_INT uoffset
= (unsigned HOST_WIDE_INT
) offset
;
1285 tree base
= ref
? get_base_address (ref
) : NULL
;
1287 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1288 using a call-clobbered memory tag. By definition, call-clobbered
1289 memory tags can always touch .GLOBAL_VAR. */
1290 if (alias
== gimple_global_var (cfun
))
1293 /* If ALIAS is an SFT, it can't be touched if the offset
1294 and size of the access is not overlapping with the SFT offset and
1295 size. This is only true if we are accessing through a pointer
1296 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1297 be accessing through a pointer to some substruct of the
1298 structure, and if we try to prune there, we will have the wrong
1299 offset, and get the wrong answer.
1300 i.e., we can't prune without more work if we have something like
1306 const char *byte_op;
1314 foo = &targetm.asm_out.aligned_op;
1317 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1318 terms of SFT_PARENT_VAR, that is where it is.
1319 However, the access through the foo pointer will be at offset 0. */
1321 && TREE_CODE (alias
) == STRUCT_FIELD_TAG
1323 && TREE_TYPE (base
) == TREE_TYPE (SFT_PARENT_VAR (alias
))
1324 && !overlap_subvar (offset
, size
, alias
, NULL
))
1326 #ifdef ACCESS_DEBUGGING
1327 fprintf (stderr
, "Access to ");
1328 print_generic_expr (stderr
, ref
, 0);
1329 fprintf (stderr
, " may not touch ");
1330 print_generic_expr (stderr
, alias
, 0);
1331 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1336 /* Without strict aliasing, it is impossible for a component access
1337 through a pointer to touch a random variable, unless that
1338 variable *is* a structure or a pointer.
1340 That is, given p->c, and some random global variable b,
1341 there is no legal way that p->c could be an access to b.
1343 Without strict aliasing on, we consider it legal to do something
1346 struct foos { int l; };
1348 static struct foos *getfoo(void);
1351 struct foos *f = getfoo();
1358 static struct foos *getfoo(void)
1359 { return (struct foos *)&foo; }
1361 (taken from 20000623-1.c)
1363 The docs also say/imply that access through union pointers
1364 is legal (but *not* if you take the address of the union member,
1365 i.e. the inverse), such that you can do
1375 U *pretmp = (U*)&rv;
1379 To implement this, we just punt on accesses through union
1383 && flag_strict_aliasing
1384 && TREE_CODE (ref
) != INDIRECT_REF
1386 && (TREE_CODE (base
) != INDIRECT_REF
1387 || TREE_CODE (TREE_TYPE (base
)) != UNION_TYPE
)
1388 && !AGGREGATE_TYPE_P (TREE_TYPE (alias
))
1389 && TREE_CODE (TREE_TYPE (alias
)) != COMPLEX_TYPE
1390 && !var_ann (alias
)->is_heapvar
1391 /* When the struct has may_alias attached to it, we need not to
1393 && get_alias_set (base
))
1395 #ifdef ACCESS_DEBUGGING
1396 fprintf (stderr
, "Access to ");
1397 print_generic_expr (stderr
, ref
, 0);
1398 fprintf (stderr
, " may not touch ");
1399 print_generic_expr (stderr
, alias
, 0);
1400 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1405 /* If the offset of the access is greater than the size of one of
1406 the possible aliases, it can't be touching that alias, because it
1407 would be past the end of the structure. */
1409 && flag_strict_aliasing
1410 && TREE_CODE (ref
) != INDIRECT_REF
1412 && !POINTER_TYPE_P (TREE_TYPE (alias
))
1414 && DECL_SIZE (alias
)
1415 && TREE_CODE (DECL_SIZE (alias
)) == INTEGER_CST
1416 && uoffset
> TREE_INT_CST_LOW (DECL_SIZE (alias
)))
1418 #ifdef ACCESS_DEBUGGING
1419 fprintf (stderr
, "Access to ");
1420 print_generic_expr (stderr
, ref
, 0);
1421 fprintf (stderr
, " may not touch ");
1422 print_generic_expr (stderr
, alias
, 0);
1423 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1432 /* Add VAR to the virtual operands array. FLAGS is as in
1433 get_expr_operands. FULL_REF is a tree that contains the entire
1434 pointer dereference expression, if available, or NULL otherwise.
1435 OFFSET and SIZE come from the memory access expression that
1436 generated this virtual operand. */
1439 add_virtual_operand (tree var
, stmt_ann_t s_ann
, int flags
,
1440 tree full_ref
, HOST_WIDE_INT offset
,
1443 bitmap aliases
= NULL
;
1447 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1448 v_ann
= var_ann (sym
);
1450 /* Mark the statement as having memory operands. */
1451 s_ann
->references_memory
= true;
1453 /* Mark statements with volatile operands. Optimizers should back
1454 off from statements having volatile operands. */
1455 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1456 s_ann
->has_volatile_ops
= true;
1458 /* If the variable cannot be modified and this is a VDEF change
1459 it into a VUSE. This happens when read-only variables are marked
1460 call-clobbered and/or aliased to writable variables. So we only
1461 check that this only happens on non-specific stores.
1463 Note that if this is a specific store, i.e. associated with a
1464 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
1465 into validation problems.
1467 This can happen when programs cast away const, leaving us with a
1468 store to read-only memory. If the statement is actually executed
1469 at runtime, then the program is ill formed. If the statement is
1470 not executed then all is well. At the very least, we cannot ICE. */
1471 if ((flags
& opf_implicit
) && unmodifiable_var_p (var
))
1474 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1475 virtual operands, unless the caller has specifically requested
1476 not to add virtual operands (used when adding operands inside an
1477 ADDR_EXPR expression). */
1478 if (flags
& opf_no_vops
)
1482 aliases
= MTAG_ALIASES (var
);
1483 if (aliases
== NULL
)
1485 if (s_ann
&& !gimple_aliases_computed_p (cfun
))
1486 s_ann
->has_volatile_ops
= true;
1487 /* The variable is not aliased or it is an alias tag. */
1488 if (flags
& opf_def
)
1499 /* The variable is aliased. Add its aliases to the virtual
1501 gcc_assert (!bitmap_empty_p (aliases
));
1503 if (flags
& opf_def
)
1505 bool none_added
= true;
1506 EXECUTE_IF_SET_IN_BITMAP (aliases
, 0, i
, bi
)
1508 al
= referenced_var (i
);
1509 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1516 /* Even if no aliases have been added, we still need to
1517 establish def-use and use-def chains, lest
1518 transformations think that this is not a memory
1519 reference. For an example of this scenario, see
1520 testsuite/g++.dg/opt/cleanup1.C. */
1526 bool none_added
= true;
1527 EXECUTE_IF_SET_IN_BITMAP (aliases
, 0, i
, bi
)
1529 al
= referenced_var (i
);
1530 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1536 /* Even if no aliases have been added, we still need to
1537 establish def-use and use-def chains, lest
1538 transformations think that this is not a memory
1539 reference. For an example of this scenario, see
1540 testsuite/g++.dg/opt/cleanup1.C. */
1548 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1549 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1550 the statement's real operands, otherwise it is added to virtual
1554 add_stmt_operand (tree
*var_p
, stmt_ann_t s_ann
, int flags
)
1559 gcc_assert (SSA_VAR_P (*var_p
) && s_ann
);
1562 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1563 v_ann
= var_ann (sym
);
1565 /* Mark statements with volatile operands. */
1566 if (TREE_THIS_VOLATILE (sym
))
1567 s_ann
->has_volatile_ops
= true;
1569 if (is_gimple_reg (sym
))
1571 /* The variable is a GIMPLE register. Add it to real operands. */
1572 if (flags
& opf_def
)
1578 add_virtual_operand (var
, s_ann
, flags
, NULL_TREE
, 0, -1);
1582 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1583 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1585 STMT is the statement being processed, EXPR is the INDIRECT_REF
1588 FLAGS is as in get_expr_operands.
1590 FULL_REF contains the full pointer dereference expression, if we
1591 have it, or NULL otherwise.
1593 OFFSET and SIZE are the location of the access inside the
1594 dereferenced pointer, if known.
1596 RECURSE_ON_BASE should be set to true if we want to continue
1597 calling get_expr_operands on the base pointer, and false if
1598 something else will do it for us. */
1601 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
,
1603 HOST_WIDE_INT offset
, HOST_WIDE_INT size
,
1604 bool recurse_on_base
)
1606 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1608 stmt_ann_t s_ann
= stmt_ann (stmt
);
1610 s_ann
->references_memory
= true;
1611 if (s_ann
&& TREE_THIS_VOLATILE (expr
))
1612 s_ann
->has_volatile_ops
= true;
1614 if (SSA_VAR_P (ptr
))
1616 struct ptr_info_def
*pi
= NULL
;
1618 /* If PTR has flow-sensitive points-to information, use it. */
1619 if (TREE_CODE (ptr
) == SSA_NAME
1620 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1621 && pi
->name_mem_tag
)
1623 /* PTR has its own memory tag. Use it. */
1624 add_virtual_operand (pi
->name_mem_tag
, s_ann
, flags
,
1625 full_ref
, offset
, size
);
1629 /* If PTR is not an SSA_NAME or it doesn't have a name
1630 tag, use its symbol memory tag. */
1633 /* If we are emitting debugging dumps, display a warning if
1634 PTR is an SSA_NAME with no flow-sensitive alias
1635 information. That means that we may need to compute
1638 && TREE_CODE (ptr
) == SSA_NAME
1642 "NOTE: no flow-sensitive alias info for ");
1643 print_generic_expr (dump_file
, ptr
, dump_flags
);
1644 fprintf (dump_file
, " in ");
1645 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1648 if (TREE_CODE (ptr
) == SSA_NAME
)
1649 ptr
= SSA_NAME_VAR (ptr
);
1650 v_ann
= var_ann (ptr
);
1652 if (v_ann
->symbol_mem_tag
)
1653 add_virtual_operand (v_ann
->symbol_mem_tag
, s_ann
, flags
,
1654 full_ref
, offset
, size
);
1655 /* Aliasing information is missing; mark statement as volatile so we
1656 won't optimize it out too actively. */
1657 else if (s_ann
&& !gimple_aliases_computed_p (cfun
)
1658 && (flags
& opf_def
))
1659 s_ann
->has_volatile_ops
= true;
1662 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1664 /* If a constant is used as a pointer, we can't generate a real
1665 operand for it but we mark the statement volatile to prevent
1666 optimizations from messing things up. */
1668 s_ann
->has_volatile_ops
= true;
1673 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1677 /* If requested, add a USE operand for the base pointer. */
1678 if (recurse_on_base
)
1679 get_expr_operands (stmt
, pptr
, opf_use
);
1683 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1686 get_tmr_operands (tree stmt
, tree expr
, int flags
)
1689 HOST_WIDE_INT offset
, size
, maxsize
;
1691 stmt_ann_t s_ann
= stmt_ann (stmt
);
1693 /* This statement references memory. */
1694 s_ann
->references_memory
= 1;
1696 /* First record the real operands. */
1697 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_use
);
1698 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_use
);
1700 if (TMR_SYMBOL (expr
))
1701 add_to_addressable_set (TMR_SYMBOL (expr
), &s_ann
->addresses_taken
);
1703 tag
= TMR_TAG (expr
);
1706 /* Something weird, so ensure that we will be careful. */
1707 s_ann
->has_volatile_ops
= true;
1713 get_expr_operands (stmt
, &tag
, flags
);
1717 ref
= get_ref_base_and_extent (tag
, &offset
, &size
, &maxsize
);
1718 gcc_assert (ref
!= NULL_TREE
);
1719 svars
= get_subvars_for_var (ref
);
1720 for (sv
= svars
; sv
; sv
= sv
->next
)
1724 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
1725 add_stmt_operand (&sv
->var
, s_ann
, flags
);
1730 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1731 clobbered variables in the function. */
1734 add_call_clobber_ops (tree stmt
, tree callee
)
1738 stmt_ann_t s_ann
= stmt_ann (stmt
);
1739 bitmap not_read_b
, not_written_b
;
1741 /* Functions that are not const, pure or never return may clobber
1742 call-clobbered variables. */
1744 s_ann
->makes_clobbering_call
= true;
1746 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1747 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1748 if (gimple_global_var (cfun
))
1750 tree var
= gimple_global_var (cfun
);
1751 add_stmt_operand (&var
, s_ann
, opf_def
);
1755 /* Get info for local and module level statics. There is a bit
1756 set for each static if the call being processed does not read
1757 or write that variable. */
1758 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1759 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1761 /* Add a VDEF operand for every call clobbered variable. */
1762 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1764 tree var
= referenced_var_lookup (u
);
1765 unsigned int escape_mask
= var_ann (var
)->escape_mask
;
1766 tree real_var
= var
;
1770 /* Not read and not written are computed on regular vars, not
1771 subvars, so look at the parent var if this is an SFT. */
1772 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1773 real_var
= SFT_PARENT_VAR (var
);
1775 not_read
= not_read_b
? bitmap_bit_p (not_read_b
,
1776 DECL_UID (real_var
)) : false;
1777 not_written
= not_written_b
? bitmap_bit_p (not_written_b
,
1778 DECL_UID (real_var
)) : false;
1779 gcc_assert (!unmodifiable_var_p (var
));
1781 clobber_stats
.clobbered_vars
++;
1783 /* See if this variable is really clobbered by this function. */
1785 /* Trivial case: Things escaping only to pure/const are not
1786 clobbered by non-pure-const, and only read by pure/const. */
1787 if ((escape_mask
& ~(ESCAPE_TO_PURE_CONST
)) == 0)
1789 tree call
= get_call_expr_in (stmt
);
1790 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1792 add_stmt_operand (&var
, s_ann
, opf_use
);
1793 clobber_stats
.unescapable_clobbers_avoided
++;
1798 clobber_stats
.unescapable_clobbers_avoided
++;
1805 clobber_stats
.static_write_clobbers_avoided
++;
1807 add_stmt_operand (&var
, s_ann
, opf_use
);
1809 clobber_stats
.static_read_clobbers_avoided
++;
1812 add_virtual_operand (var
, s_ann
, opf_def
, NULL
, 0, -1);
1817 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1821 add_call_read_ops (tree stmt
, tree callee
)
1825 stmt_ann_t s_ann
= stmt_ann (stmt
);
1828 /* if the function is not pure, it may reference memory. Add
1829 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1830 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1831 if (gimple_global_var (cfun
))
1833 tree var
= gimple_global_var (cfun
);
1834 add_stmt_operand (&var
, s_ann
, opf_use
);
1838 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1840 /* Add a VUSE for each call-clobbered variable. */
1841 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1843 tree var
= referenced_var (u
);
1844 tree real_var
= var
;
1847 clobber_stats
.readonly_clobbers
++;
1849 /* Not read and not written are computed on regular vars, not
1850 subvars, so look at the parent var if this is an SFT. */
1852 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1853 real_var
= SFT_PARENT_VAR (var
);
1855 not_read
= not_read_b
? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1860 clobber_stats
.static_readonly_clobbers_avoided
++;
1864 add_stmt_operand (&var
, s_ann
, opf_use
| opf_implicit
);
1869 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1872 get_call_expr_operands (tree stmt
, tree expr
)
1875 int call_flags
= call_expr_flags (expr
);
1876 stmt_ann_t ann
= stmt_ann (stmt
);
1878 ann
->references_memory
= true;
1880 /* If aliases have been computed already, add VDEF or VUSE
1881 operands for all the symbols that have been found to be
1883 if (gimple_aliases_computed_p (cfun
)
1884 && !(call_flags
& ECF_NOVOPS
))
1886 /* A 'pure' or a 'const' function never call-clobbers anything.
1887 A 'noreturn' function might, but since we don't return anyway
1888 there is no point in recording that. */
1889 if (TREE_SIDE_EFFECTS (expr
)
1890 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1891 add_call_clobber_ops (stmt
, get_callee_fndecl (expr
));
1892 else if (!(call_flags
& ECF_CONST
))
1893 add_call_read_ops (stmt
, get_callee_fndecl (expr
));
1896 /* Find uses in the called function. */
1897 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_use
);
1899 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1900 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_use
);
1902 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
1906 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1909 get_asm_expr_operands (tree stmt
)
1913 const char **oconstraints
;
1914 const char *constraint
;
1915 bool allows_mem
, allows_reg
, is_inout
;
1918 s_ann
= stmt_ann (stmt
);
1919 noutputs
= list_length (ASM_OUTPUTS (stmt
));
1920 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
1922 /* Gather all output operands. */
1923 for (i
= 0, link
= ASM_OUTPUTS (stmt
); link
; i
++, link
= TREE_CHAIN (link
))
1925 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1926 oconstraints
[i
] = constraint
;
1927 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
1928 &allows_reg
, &is_inout
);
1930 /* This should have been split in gimplify_asm_expr. */
1931 gcc_assert (!allows_reg
|| !is_inout
);
1933 /* Memory operands are addressable. Note that STMT needs the
1934 address of this operand. */
1935 if (!allows_reg
&& allows_mem
)
1937 tree t
= get_base_address (TREE_VALUE (link
));
1938 if (t
&& DECL_P (t
) && s_ann
)
1939 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1942 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_def
);
1945 /* Gather all input operands. */
1946 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1948 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1949 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
1950 &allows_mem
, &allows_reg
);
1952 /* Memory operands are addressable. Note that STMT needs the
1953 address of this operand. */
1954 if (!allows_reg
&& allows_mem
)
1956 tree t
= get_base_address (TREE_VALUE (link
));
1957 if (t
&& DECL_P (t
) && s_ann
)
1958 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1961 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1964 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1965 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1966 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1971 s_ann
->references_memory
= true;
1973 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, i
, bi
)
1975 tree var
= referenced_var (i
);
1976 add_stmt_operand (&var
, s_ann
, opf_def
| opf_implicit
);
1979 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun
), 0, i
, bi
)
1981 tree var
= referenced_var (i
);
1983 /* Subvars are explicitly represented in this list, so we
1984 don't need the original to be added to the clobber ops,
1985 but the original *will* be in this list because we keep
1986 the addressability of the original variable up-to-date
1987 to avoid confusing the back-end. */
1988 if (var_can_have_subvars (var
)
1989 && get_subvars_for_var (var
) != NULL
)
1992 add_stmt_operand (&var
, s_ann
, opf_def
| opf_implicit
);
1999 /* Scan operands for the assignment expression EXPR in statement STMT. */
2002 get_modify_stmt_operands (tree stmt
, tree expr
)
2004 /* First get operands from the RHS. */
2005 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 1), opf_use
);
2007 /* For the LHS, use a regular definition (opf_def) for GIMPLE
2008 registers. If the LHS is a store to memory, we will need
2009 a preserving definition (VDEF).
2011 Preserving definitions are those that modify a part of an
2012 aggregate object for which no subvars have been computed (or the
2013 reference does not correspond exactly to one of them). Stores
2014 through a pointer are also represented with VDEF operators.
2016 We used to distinguish between preserving and killing definitions.
2017 We always emit preserving definitions now. */
2018 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 0), opf_def
);
2022 /* Recursively scan the expression pointed to by EXPR_P in statement
2023 STMT. FLAGS is one of the OPF_* constants modifying how to
2024 interpret the operands found. */
2027 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
2029 enum tree_code code
;
2030 enum tree_code_class
class;
2031 tree expr
= *expr_p
;
2032 stmt_ann_t s_ann
= stmt_ann (stmt
);
2037 code
= TREE_CODE (expr
);
2038 class = TREE_CODE_CLASS (code
);
2043 /* Taking the address of a variable does not represent a
2044 reference to it, but the fact that the statement takes its
2045 address will be of interest to some passes (e.g. alias
2047 add_to_addressable_set (TREE_OPERAND (expr
, 0), &s_ann
->addresses_taken
);
2049 /* If the address is invariant, there may be no interesting
2050 variable references inside. */
2051 if (is_gimple_min_invariant (expr
))
2054 /* Otherwise, there may be variables referenced inside but there
2055 should be no VUSEs created, since the referenced objects are
2056 not really accessed. The only operands that we should find
2057 here are ARRAY_REF indices which will always be real operands
2058 (GIMPLE does not allow non-registers as array indices). */
2059 flags
|= opf_no_vops
;
2060 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2064 case STRUCT_FIELD_TAG
:
2065 case SYMBOL_MEMORY_TAG
:
2066 case NAME_MEMORY_TAG
:
2067 add_stmt_operand (expr_p
, s_ann
, flags
);
2076 /* Add the subvars for a variable, if it has subvars, to DEFS
2077 or USES. Otherwise, add the variable itself. Whether it
2078 goes to USES or DEFS depends on the operand flags. */
2079 if (var_can_have_subvars (expr
)
2080 && (svars
= get_subvars_for_var (expr
)))
2083 for (sv
= svars
; sv
; sv
= sv
->next
)
2084 add_stmt_operand (&sv
->var
, s_ann
, flags
);
2087 add_stmt_operand (expr_p
, s_ann
, flags
);
2092 case MISALIGNED_INDIRECT_REF
:
2093 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2096 case ALIGN_INDIRECT_REF
:
2098 get_indirect_ref_operands (stmt
, expr
, flags
, NULL_TREE
, 0, -1, true);
2101 case TARGET_MEM_REF
:
2102 get_tmr_operands (stmt
, expr
, flags
);
2106 case ARRAY_RANGE_REF
:
2112 HOST_WIDE_INT offset
, size
, maxsize
;
2115 /* This component reference becomes an access to all of the
2116 subvariables it can touch, if we can determine that, but
2117 *NOT* the real one. If we can't determine which fields we
2118 could touch, the recursion will eventually get to a
2119 variable and add *all* of its subvars, or whatever is the
2120 minimum correct subset. */
2121 ref
= get_ref_base_and_extent (expr
, &offset
, &size
, &maxsize
);
2122 if (SSA_VAR_P (ref
) && get_subvars_for_var (ref
))
2125 subvar_t svars
= get_subvars_for_var (ref
);
2127 for (sv
= svars
; sv
; sv
= sv
->next
)
2131 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
2133 int subvar_flags
= flags
;
2135 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
2140 flags
|= opf_no_vops
;
2142 else if (TREE_CODE (ref
) == INDIRECT_REF
)
2144 get_indirect_ref_operands (stmt
, ref
, flags
, expr
, offset
,
2146 flags
|= opf_no_vops
;
2149 /* Even if we found subvars above we need to ensure to see
2150 immediate uses for d in s.a[d]. In case of s.a having
2151 a subvar or we would miss it otherwise. */
2152 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2154 if (code
== COMPONENT_REF
)
2156 if (s_ann
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
2157 s_ann
->has_volatile_ops
= true;
2158 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2160 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
2162 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2163 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2164 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_use
);
2170 case WITH_SIZE_EXPR
:
2171 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2172 and an rvalue reference to its second argument. */
2173 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2174 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2178 get_call_expr_operands (stmt
, expr
);
2183 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_use
);
2184 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2185 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2188 case GIMPLE_MODIFY_STMT
:
2189 get_modify_stmt_operands (stmt
, expr
);
2194 /* General aggregate CONSTRUCTORs have been decomposed, but they
2195 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2196 constructor_elt
*ce
;
2197 unsigned HOST_WIDE_INT idx
;
2200 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
2202 get_expr_operands (stmt
, &ce
->value
, opf_use
);
2208 case TRUTH_NOT_EXPR
:
2209 case VIEW_CONVERT_EXPR
:
2211 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2214 case TRUTH_AND_EXPR
:
2216 case TRUTH_XOR_EXPR
:
2222 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2223 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2228 case REALIGN_LOAD_EXPR
:
2230 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2231 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2232 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
2251 /* Expressions that make no memory references. */
2255 if (class == tcc_unary
)
2257 if (class == tcc_binary
|| class == tcc_comparison
)
2259 if (class == tcc_constant
|| class == tcc_type
)
2263 /* If we get here, something has gone wrong. */
2264 #ifdef ENABLE_CHECKING
2265 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
2267 fputs ("\n", stderr
);
2273 /* Parse STMT looking for operands. When finished, the various
2274 build_* operand vectors will have potential operands in them. */
2277 parse_ssa_operands (tree stmt
)
2279 enum tree_code code
;
2281 code
= TREE_CODE (stmt
);
2284 case GIMPLE_MODIFY_STMT
:
2285 get_modify_stmt_operands (stmt
, stmt
);
2289 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_use
);
2293 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_use
);
2297 get_asm_expr_operands (stmt
);
2301 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_use
);
2305 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_use
);
2309 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_use
);
2313 case CASE_LABEL_EXPR
:
2314 case TRY_CATCH_EXPR
:
2315 case TRY_FINALLY_EXPR
:
2316 case EH_FILTER_EXPR
:
2319 /* These nodes contain no variable references. */
2323 /* Notice that if get_expr_operands tries to use &STMT as the
2324 operand pointer (which may only happen for USE operands), we
2325 will fail in add_stmt_operand. This default will handle
2326 statements like empty statements, or CALL_EXPRs that may
2327 appear on the RHS of a statement or as statements themselves. */
2328 get_expr_operands (stmt
, &stmt
, opf_use
);
2334 /* Create an operands cache for STMT. */
2337 build_ssa_operands (tree stmt
)
2339 stmt_ann_t ann
= get_stmt_ann (stmt
);
2341 /* Initially assume that the statement has no volatile operands and
2342 makes no memory references. */
2343 ann
->has_volatile_ops
= false;
2344 ann
->references_memory
= false;
2345 /* Just clear the bitmap so we don't end up reallocating it over and over. */
2346 if (ann
->addresses_taken
)
2347 bitmap_clear (ann
->addresses_taken
);
2349 start_ssa_stmt_operands ();
2350 parse_ssa_operands (stmt
);
2351 operand_build_sort_virtual (build_vuses
);
2352 operand_build_sort_virtual (build_vdefs
);
2353 finalize_ssa_stmt_operands (stmt
);
2355 if (ann
->addresses_taken
&& bitmap_empty_p (ann
->addresses_taken
))
2356 ann
->addresses_taken
= NULL
;
2357 /* For added safety, assume that statements with volatile operands
2358 also reference memory. */
2359 if (ann
->has_volatile_ops
)
2360 ann
->references_memory
= true;
2364 /* Free any operands vectors in OPS. */
2367 free_ssa_operands (stmt_operands_p ops
)
2369 ops
->def_ops
= NULL
;
2370 ops
->use_ops
= NULL
;
2371 ops
->vdef_ops
= NULL
;
2372 ops
->vuse_ops
= NULL
;
2373 BITMAP_FREE (ops
->loads
);
2374 BITMAP_FREE (ops
->stores
);
2378 /* Get the operands of statement STMT. */
2381 update_stmt_operands (tree stmt
)
2383 stmt_ann_t ann
= get_stmt_ann (stmt
);
2385 /* If update_stmt_operands is called before SSA is initialized, do
2387 if (!ssa_operands_active ())
2390 /* The optimizers cannot handle statements that are nothing but a
2391 _DECL. This indicates a bug in the gimplifier. */
2392 gcc_assert (!SSA_VAR_P (stmt
));
2394 timevar_push (TV_TREE_OPS
);
2396 gcc_assert (ann
->modified
);
2397 build_ssa_operands (stmt
);
2400 timevar_pop (TV_TREE_OPS
);
2404 /* Copies virtual operands from SRC to DST. */
2407 copy_virtual_operands (tree dest
, tree src
)
2410 voptype_p src_vuses
, dest_vuses
;
2411 voptype_p src_vdefs
, dest_vdefs
;
2412 struct voptype_d vuse
;
2413 struct voptype_d vdef
;
2414 stmt_ann_t dest_ann
;
2416 VDEF_OPS (dest
) = NULL
;
2417 VUSE_OPS (dest
) = NULL
;
2419 dest_ann
= get_stmt_ann (dest
);
2420 BITMAP_FREE (dest_ann
->operands
.loads
);
2421 BITMAP_FREE (dest_ann
->operands
.stores
);
2423 if (LOADED_SYMS (src
))
2425 dest_ann
->operands
.loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
2426 bitmap_copy (dest_ann
->operands
.loads
, LOADED_SYMS (src
));
2429 if (STORED_SYMS (src
))
2431 dest_ann
->operands
.stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
2432 bitmap_copy (dest_ann
->operands
.stores
, STORED_SYMS (src
));
2435 /* Copy all the VUSE operators and corresponding operands. */
2437 for (src_vuses
= VUSE_OPS (src
); src_vuses
; src_vuses
= src_vuses
->next
)
2439 n
= VUSE_NUM (src_vuses
);
2440 dest_vuses
= add_vuse_op (dest
, NULL_TREE
, n
, dest_vuses
);
2441 for (i
= 0; i
< n
; i
++)
2442 SET_USE (VUSE_OP_PTR (dest_vuses
, i
), VUSE_OP (src_vuses
, i
));
2444 if (VUSE_OPS (dest
) == NULL
)
2445 VUSE_OPS (dest
) = vuse
.next
;
2448 /* Copy all the VDEF operators and corresponding operands. */
2450 for (src_vdefs
= VDEF_OPS (src
); src_vdefs
; src_vdefs
= src_vdefs
->next
)
2452 n
= VUSE_NUM (src_vdefs
);
2453 dest_vdefs
= add_vdef_op (dest
, NULL_TREE
, n
, dest_vdefs
);
2454 VDEF_RESULT (dest_vdefs
) = VDEF_RESULT (src_vdefs
);
2455 for (i
= 0; i
< n
; i
++)
2456 SET_USE (VUSE_OP_PTR (dest_vdefs
, i
), VUSE_OP (src_vdefs
, i
));
2458 if (VDEF_OPS (dest
) == NULL
)
2459 VDEF_OPS (dest
) = vdef
.next
;
2464 /* Specifically for use in DOM's expression analysis. Given a store, we
2465 create an artificial stmt which looks like a load from the store, this can
2466 be used to eliminate redundant loads. OLD_OPS are the operands from the
2467 store stmt, and NEW_STMT is the new load which represents a load of the
2471 create_ssa_artificial_load_stmt (tree new_stmt
, tree old_stmt
)
2475 use_operand_p use_p
;
2478 get_stmt_ann (new_stmt
);
2480 /* Process NEW_STMT looking for operands. */
2481 start_ssa_stmt_operands ();
2482 parse_ssa_operands (new_stmt
);
2484 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, op
); i
++)
2485 if (TREE_CODE (op
) != SSA_NAME
)
2486 var_ann (op
)->in_vuse_list
= false;
2488 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, op
); i
++)
2489 if (TREE_CODE (op
) != SSA_NAME
)
2490 var_ann (op
)->in_vdef_list
= false;
2492 /* Remove any virtual operands that were found. */
2493 VEC_truncate (tree
, build_vdefs
, 0);
2494 VEC_truncate (tree
, build_vuses
, 0);
2496 /* For each VDEF on the original statement, we want to create a
2497 VUSE of the VDEF result operand on the new statement. */
2498 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
, SSA_OP_VDEF
)
2501 finalize_ssa_stmt_operands (new_stmt
);
2503 /* All uses in this fake stmt must not be in the immediate use lists. */
2504 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
2505 delink_imm_use (use_p
);
2509 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2510 to test the validity of the swap operation. */
2513 swap_tree_operands (tree stmt
, tree
*exp0
, tree
*exp1
)
2519 /* If the operand cache is active, attempt to preserve the relative
2520 positions of these two operands in their respective immediate use
2522 if (ssa_operands_active () && op0
!= op1
)
2524 use_optype_p use0
, use1
, ptr
;
2527 /* Find the 2 operands in the cache, if they are there. */
2528 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2529 if (USE_OP_PTR (ptr
)->use
== exp0
)
2535 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2536 if (USE_OP_PTR (ptr
)->use
== exp1
)
2542 /* If both uses don't have operand entries, there isn't much we can do
2543 at this point. Presumably we don't need to worry about it. */
2546 tree
*tmp
= USE_OP_PTR (use1
)->use
;
2547 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
2548 USE_OP_PTR (use0
)->use
= tmp
;
2552 /* Now swap the data. */
2558 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2559 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2560 a single variable whose address has been taken or any other valid
2561 GIMPLE memory reference (structure reference, array, etc). If the
2562 base address of REF is a decl that has sub-variables, also add all
2563 of its sub-variables. */
2566 add_to_addressable_set (tree ref
, bitmap
*addresses_taken
)
2571 gcc_assert (addresses_taken
);
2573 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2574 as the only thing we take the address of. If VAR is a structure,
2575 taking the address of a field means that the whole structure may
2576 be referenced using pointer arithmetic. See PR 21407 and the
2577 ensuing mailing list discussion. */
2578 var
= get_base_address (ref
);
2579 if (var
&& SSA_VAR_P (var
))
2581 if (*addresses_taken
== NULL
)
2582 *addresses_taken
= BITMAP_GGC_ALLOC ();
2584 if (var_can_have_subvars (var
)
2585 && (svars
= get_subvars_for_var (var
)))
2588 for (sv
= svars
; sv
; sv
= sv
->next
)
2590 bitmap_set_bit (*addresses_taken
, DECL_UID (sv
->var
));
2591 TREE_ADDRESSABLE (sv
->var
) = 1;
2596 bitmap_set_bit (*addresses_taken
, DECL_UID (var
));
2597 TREE_ADDRESSABLE (var
) = 1;
2603 /* Scan the immediate_use list for VAR making sure its linked properly.
2604 Return TRUE if there is a problem and emit an error message to F. */
2607 verify_imm_links (FILE *f
, tree var
)
2609 use_operand_p ptr
, prev
, list
;
2612 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
2614 list
= &(SSA_NAME_IMM_USE_NODE (var
));
2615 gcc_assert (list
->use
== NULL
);
2617 if (list
->prev
== NULL
)
2619 gcc_assert (list
->next
== NULL
);
2625 for (ptr
= list
->next
; ptr
!= list
; )
2627 if (prev
!= ptr
->prev
)
2630 if (ptr
->use
== NULL
)
2631 goto error
; /* 2 roots, or SAFE guard node. */
2632 else if (*(ptr
->use
) != var
)
2638 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2640 if (count
++ > 50000000)
2644 /* Verify list in the other direction. */
2646 for (ptr
= list
->prev
; ptr
!= list
; )
2648 if (prev
!= ptr
->next
)
2662 if (ptr
->stmt
&& stmt_modified_p (ptr
->stmt
))
2664 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->stmt
);
2665 print_generic_stmt (f
, ptr
->stmt
, TDF_SLIM
);
2667 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
2669 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
2675 /* Dump all the immediate uses to FILE. */
2678 dump_immediate_uses_for (FILE *file
, tree var
)
2680 imm_use_iterator iter
;
2681 use_operand_p use_p
;
2683 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2685 print_generic_expr (file
, var
, TDF_SLIM
);
2686 fprintf (file
, " : -->");
2687 if (has_zero_uses (var
))
2688 fprintf (file
, " no uses.\n");
2690 if (has_single_use (var
))
2691 fprintf (file
, " single use.\n");
2693 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2695 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2697 if (use_p
->stmt
== NULL
&& use_p
->use
== NULL
)
2698 fprintf (file
, "***end of stmt iterator marker***\n");
2700 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2701 print_generic_stmt (file
, USE_STMT (use_p
), TDF_VOPS
|TDF_MEMSYMS
);
2703 print_generic_stmt (file
, USE_STMT (use_p
), TDF_SLIM
);
2705 fprintf(file
, "\n");
2709 /* Dump all the immediate uses to FILE. */
2712 dump_immediate_uses (FILE *file
)
2717 fprintf (file
, "Immediate_uses: \n\n");
2718 for (x
= 1; x
< num_ssa_names
; x
++)
2723 dump_immediate_uses_for (file
, var
);
2728 /* Dump def-use edges on stderr. */
2731 debug_immediate_uses (void)
2733 dump_immediate_uses (stderr
);
2737 /* Dump def-use edges on stderr. */
2740 debug_immediate_uses_for (tree var
)
2742 dump_immediate_uses_for (stderr
, var
);
2746 /* Create a new change buffer for the statement pointed by STMT_P and
2747 push the buffer into SCB_STACK. Each change buffer
2748 records state information needed to determine what changed in the
2749 statement. Mainly, this keeps track of symbols that may need to be
2750 put into SSA form, SSA name replacements and other information
2751 needed to keep the SSA form up to date. */
2754 push_stmt_changes (tree
*stmt_p
)
2761 /* It makes no sense to keep track of PHI nodes. */
2762 if (TREE_CODE (stmt
) == PHI_NODE
)
2765 buf
= xmalloc (sizeof *buf
);
2766 memset (buf
, 0, sizeof *buf
);
2768 buf
->stmt_p
= stmt_p
;
2770 if (stmt_references_memory_p (stmt
))
2775 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2777 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2778 if (buf
->loads
== NULL
)
2779 buf
->loads
= BITMAP_ALLOC (NULL
);
2780 bitmap_set_bit (buf
->loads
, DECL_UID (sym
));
2783 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2785 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2786 if (buf
->stores
== NULL
)
2787 buf
->stores
= BITMAP_ALLOC (NULL
);
2788 bitmap_set_bit (buf
->stores
, DECL_UID (sym
));
2792 VEC_safe_push (scb_t
, heap
, scb_stack
, buf
);
2796 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2797 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2800 mark_difference_for_renaming (bitmap s1
, bitmap s2
)
2802 if (s1
== NULL
&& s2
== NULL
)
2805 if (s1
&& s2
== NULL
)
2806 mark_set_for_renaming (s1
);
2807 else if (s1
== NULL
&& s2
)
2808 mark_set_for_renaming (s2
);
2809 else if (!bitmap_equal_p (s1
, s2
))
2811 bitmap t1
= BITMAP_ALLOC (NULL
);
2812 bitmap t2
= BITMAP_ALLOC (NULL
);
2814 bitmap_and_compl (t1
, s1
, s2
);
2815 bitmap_and_compl (t2
, s2
, s1
);
2816 bitmap_ior_into (t1
, t2
);
2817 mark_set_for_renaming (t1
);
2825 /* Pop the top SCB from SCB_STACK and act on the differences between
2826 what was recorded by push_stmt_changes and the current state of
2830 pop_stmt_changes (tree
*stmt_p
)
2834 bitmap loads
, stores
;
2839 /* It makes no sense to keep track of PHI nodes. */
2840 if (TREE_CODE (stmt
) == PHI_NODE
)
2843 buf
= VEC_pop (scb_t
, scb_stack
);
2844 gcc_assert (stmt_p
== buf
->stmt_p
);
2846 /* Force an operand re-scan on the statement and mark any newly
2847 exposed variables. */
2850 /* Determine whether any memory symbols need to be renamed. If the
2851 sets of loads and stores are different after the statement is
2852 modified, then the affected symbols need to be renamed.
2854 Note that it may be possible for the statement to not reference
2855 memory anymore, but we still need to act on the differences in
2856 the sets of symbols. */
2857 loads
= stores
= NULL
;
2858 if (stmt_references_memory_p (stmt
))
2863 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2865 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2867 loads
= BITMAP_ALLOC (NULL
);
2868 bitmap_set_bit (loads
, DECL_UID (sym
));
2871 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2873 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2875 stores
= BITMAP_ALLOC (NULL
);
2876 bitmap_set_bit (stores
, DECL_UID (sym
));
2880 /* If LOADS is different from BUF->LOADS, the affected
2881 symbols need to be marked for renaming. */
2882 mark_difference_for_renaming (loads
, buf
->loads
);
2884 /* Similarly for STORES and BUF->STORES. */
2885 mark_difference_for_renaming (stores
, buf
->stores
);
2887 /* Mark all the naked GIMPLE register operands for renaming. */
2888 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
|SSA_OP_USE
)
2890 mark_sym_for_renaming (op
);
2892 /* FIXME, need to add more finalizers here. Cleanup EH info,
2893 recompute invariants for address expressions, add
2894 SSA replacement mappings, etc. For instance, given
2895 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2898 # SMT.4_20 = VDEF <SMT.4_16>
2901 So, the VDEF will disappear, but instead of marking SMT.4 for
2902 renaming it would be far more efficient to establish a
2903 replacement mapping that would replace every reference of
2904 SMT.4_20 with SMT.4_16. */
2906 /* Free memory used by the buffer. */
2907 BITMAP_FREE (buf
->loads
);
2908 BITMAP_FREE (buf
->stores
);
2909 BITMAP_FREE (loads
);
2910 BITMAP_FREE (stores
);
2916 /* Discard the topmost change buffer from SCB_STACK. This is useful
2917 when the caller realized that it did not actually modified the
2918 statement. It avoids the expensive operand re-scan. */
2921 discard_stmt_changes (tree
*stmt_p
)
2926 /* It makes no sense to keep track of PHI nodes. */
2928 if (TREE_CODE (stmt
) == PHI_NODE
)
2931 buf
= VEC_pop (scb_t
, scb_stack
);
2932 gcc_assert (stmt_p
== buf
->stmt_p
);
2934 /* Free memory used by the buffer. */
2935 BITMAP_FREE (buf
->loads
);
2936 BITMAP_FREE (buf
->stores
);
2942 /* Returns true if statement STMT may access memory. */
2945 stmt_references_memory_p (tree stmt
)
2947 if (!gimple_ssa_operands (cfun
)->ops_active
|| TREE_CODE (stmt
) == PHI_NODE
)
2950 return stmt_ann (stmt
)->references_memory
;
2954 /* Return the memory partition tag (MPT) associated with memory
2955 symbol SYM. From a correctness standpoint, memory partitions can
2956 be assigned in any arbitrary fashion as long as this rule is
2957 observed: Given two memory partitions MPT.i and MPT.j, they must
2958 not contain symbols in common.
2960 Memory partitions are used when putting the program into Memory-SSA
2961 form. In particular, in Memory-SSA PHI nodes are not computed for
2962 individual memory symbols. They are computed for memory
2963 partitions. This reduces the amount of PHI nodes in the SSA graph
2964 at the expense of precision (i.e., it makes unrelated stores affect
2967 However, it is possible to increase precision by changing this
2968 partitioning scheme. For instance, if the partitioning scheme is
2969 such that get_mpt_for is the identity function (that is,
2970 get_mpt_for (s) = s), this will result in ultimate precision at the
2971 expense of huge SSA webs.
2973 At the other extreme, a partitioning scheme that groups all the
2974 symbols in the same set results in minimal SSA webs and almost
2975 total loss of precision. */
2978 get_mpt_for (tree sym
)
2982 /* Don't create a new tag unnecessarily. */
2983 mpt
= memory_partition (sym
);
2984 if (mpt
== NULL_TREE
)
2986 mpt
= create_tag_raw (MEMORY_PARTITION_TAG
, TREE_TYPE (sym
), "MPT");
2987 TREE_ADDRESSABLE (mpt
) = 0;
2988 MTAG_GLOBAL (mpt
) = 1;
2989 add_referenced_var (mpt
);
2990 VEC_safe_push (tree
, heap
, gimple_ssa_operands (cfun
)->mpt_table
, mpt
);
2991 MPT_SYMBOLS (mpt
) = BITMAP_ALLOC (&operands_bitmap_obstack
);
2992 set_memory_partition (sym
, mpt
);
2999 /* Dump memory partition information to FILE. */
3002 dump_memory_partitions (FILE *file
)
3005 unsigned long nsyms
;
3008 fprintf (file
, "\nMemory partitions\n\n");
3009 for (i
= 0, npart
= 0, nsyms
= 0;
3010 VEC_iterate (tree
, gimple_ssa_operands (cfun
)->mpt_table
, i
, mpt
);
3015 bitmap syms
= MPT_SYMBOLS (mpt
);
3016 unsigned long n
= bitmap_count_bits (syms
);
3018 fprintf (file
, "#%u: ", i
);
3019 print_generic_expr (file
, mpt
, 0);
3020 fprintf (file
, ": %lu elements: ", n
);
3021 dump_decl_set (file
, syms
);
3027 fprintf (file
, "\n%u memory partitions holding %lu symbols\n", npart
, nsyms
);
3031 /* Dump memory partition information to stderr. */
3034 debug_memory_partitions (void)
3036 dump_memory_partitions (stderr
);