1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars
;
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided
;
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided
;
95 /* Number of write-clobbers avoided because the variable can't escape to
97 unsigned int unescapable_clobbers_avoided
;
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers
;
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided
;
108 /* Flags to describe operand properties in helpers. */
110 /* By default, operands are loaded. */
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
123 #define opf_no_vops (1 << 1)
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
130 /* Array for building all the def operands. */
131 static VEC(tree
,heap
) *build_defs
;
133 /* Array for building all the use operands. */
134 static VEC(tree
,heap
) *build_uses
;
136 /* Set for building all the VDEF operands. */
137 static VEC(tree
,heap
) *build_vdefs
;
139 /* Set for building all the VUSE operands. */
140 static VEC(tree
,heap
) *build_vuses
;
142 /* Set for building all the loaded symbols. */
143 static bitmap build_loads
;
145 /* Set for building all the stored symbols. */
146 static bitmap build_stores
;
148 static void get_expr_operands (tree
, tree
*, int);
150 /* Number of functions with initialized ssa_operands. */
151 static int n_initialized
= 0;
153 /* Statement change buffer. Data structure used to record state
154 information for statements. This is used to determine what needs
155 to be done in order to update the SSA web after a statement is
156 modified by a pass. If STMT is a statement that has just been
157 created, or needs to be folded via fold_stmt, or anything that
158 changes its physical structure then the pass should:
160 1- Call push_stmt_changes (&stmt) to record the current state of
161 STMT before any modifications are made.
163 2- Make all appropriate modifications to the statement.
165 3- Call pop_stmt_changes (&stmt) to find new symbols that
166 need to be put in SSA form, SSA name mappings for names that
167 have disappeared, recompute invariantness for address
168 expressions, cleanup EH information, etc.
170 If it is possible to determine that the statement was not modified,
171 instead of calling pop_stmt_changes it is quicker to call
172 discard_stmt_changes to avoid the expensive and unnecessary operand
173 re-scan and change comparison. */
177 /* Pointer to the statement being modified. */
180 /* If the statement references memory these are the sets of symbols
181 loaded and stored by the statement. */
186 typedef struct scb_d
*scb_t
;
188 DEF_VEC_ALLOC_P(scb_t
,heap
);
190 /* Stack of statement change buffers (SCB). Every call to
191 push_stmt_changes pushes a new buffer onto the stack. Calls to
192 pop_stmt_changes pop a buffer off of the stack and compute the set
193 of changes for the popped statement. */
194 static VEC(scb_t
,heap
) *scb_stack
;
196 /* Return the DECL_UID of the base variable of T. */
198 static inline unsigned
199 get_name_decl (tree t
)
201 if (TREE_CODE (t
) != SSA_NAME
)
204 return DECL_UID (SSA_NAME_VAR (t
));
208 /* Comparison function for qsort used in operand_build_sort_virtual. */
211 operand_build_cmp (const void *p
, const void *q
)
213 tree e1
= *((const tree
*)p
);
214 tree e2
= *((const tree
*)q
);
217 u1
= get_name_decl (e1
);
218 u2
= get_name_decl (e2
);
220 /* We want to sort in ascending order. They can never be equal. */
221 #ifdef ENABLE_CHECKING
222 gcc_assert (u1
!= u2
);
224 return (u1
> u2
? 1 : -1);
228 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
231 operand_build_sort_virtual (VEC(tree
,heap
) *list
)
233 int num
= VEC_length (tree
, list
);
240 if (get_name_decl (VEC_index (tree
, list
, 0))
241 > get_name_decl (VEC_index (tree
, list
, 1)))
243 /* Swap elements if in the wrong order. */
244 tree tmp
= VEC_index (tree
, list
, 0);
245 VEC_replace (tree
, list
, 0, VEC_index (tree
, list
, 1));
246 VEC_replace (tree
, list
, 1, tmp
);
251 /* There are 3 or more elements, call qsort. */
252 qsort (VEC_address (tree
, list
),
253 VEC_length (tree
, list
),
259 /* Return true if the SSA operands cache is active. */
262 ssa_operands_active (void)
264 return cfun
->gimple_df
&& gimple_ssa_operands (cfun
)->ops_active
;
268 /* VOPs are of variable sized, so the free list maps "free buckets" to the
281 Any VOPs larger than this are simply added to the largest bucket when they
285 /* Return the number of operands used in bucket BUCKET. */
288 vop_free_bucket_size (int bucket
)
290 #ifdef ENABLE_CHECKING
291 gcc_assert (bucket
>= 0 && bucket
< NUM_VOP_FREE_BUCKETS
);
295 return (bucket
- 13) * 8;
299 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
300 beyond the end of the bucket table, return -1. */
303 vop_free_bucket_index (int num
)
305 gcc_assert (num
> 0);
307 /* Sizes 1 through 16 use buckets 0-15. */
310 /* Buckets 16 - 45 represent 17 through 256 in 8 unit chunks. */
312 return 14 + (num
- 1) / 8;
317 /* Initialize the VOP free buckets. */
320 init_vop_buckets (void)
324 for (x
= 0; x
< NUM_VOP_FREE_BUCKETS
; x
++)
325 gimple_ssa_operands (cfun
)->vop_free_buckets
[x
] = NULL
;
329 /* Add PTR to the appropriate VOP bucket. */
332 add_vop_to_freelist (voptype_p ptr
)
334 int bucket
= vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr
->usev
));
336 /* Too large, use the largest bucket so its not a complete throw away. */
338 bucket
= NUM_VOP_FREE_BUCKETS
- 1;
340 ptr
->next
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
341 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] = ptr
;
345 /* These are the sizes of the operand memory buffer which gets allocated each
346 time more operands space is required. The final value is the amount that is
347 allocated every time after that. */
349 #define OP_SIZE_INIT 0
351 #define OP_SIZE_2 110
352 #define OP_SIZE_3 511
354 /* Current size of the operand memory buffer. */
355 static unsigned int ssa_operand_mem_size
;
357 /* Initialize the operand cache routines. */
360 init_ssa_operands (void)
362 if (!n_initialized
++)
364 build_defs
= VEC_alloc (tree
, heap
, 5);
365 build_uses
= VEC_alloc (tree
, heap
, 10);
366 build_vuses
= VEC_alloc (tree
, heap
, 25);
367 build_vdefs
= VEC_alloc (tree
, heap
, 25);
368 build_loads
= BITMAP_ALLOC (NULL
);
369 build_stores
= BITMAP_ALLOC (NULL
);
370 scb_stack
= VEC_alloc (scb_t
, heap
, 20);
373 gcc_assert (gimple_ssa_operands (cfun
)->operand_memory
== NULL
);
374 gcc_assert (gimple_ssa_operands (cfun
)->mpt_table
== NULL
);
375 gimple_ssa_operands (cfun
)->operand_memory_index
= ssa_operand_mem_size
;
376 gimple_ssa_operands (cfun
)->ops_active
= true;
377 memset (&clobber_stats
, 0, sizeof (clobber_stats
));
379 ssa_operand_mem_size
= OP_SIZE_INIT
;
383 /* Dispose of anything required by the operand routines. */
386 fini_ssa_operands (void)
388 struct ssa_operand_memory_d
*ptr
;
392 if (!--n_initialized
)
394 VEC_free (tree
, heap
, build_defs
);
395 VEC_free (tree
, heap
, build_uses
);
396 VEC_free (tree
, heap
, build_vdefs
);
397 VEC_free (tree
, heap
, build_vuses
);
398 BITMAP_FREE (build_loads
);
399 BITMAP_FREE (build_stores
);
401 /* The change buffer stack had better be empty. */
402 gcc_assert (VEC_length (scb_t
, scb_stack
) == 0);
403 VEC_free (scb_t
, heap
, scb_stack
);
407 gimple_ssa_operands (cfun
)->free_defs
= NULL
;
408 gimple_ssa_operands (cfun
)->free_uses
= NULL
;
410 while ((ptr
= gimple_ssa_operands (cfun
)->operand_memory
) != NULL
)
412 gimple_ssa_operands (cfun
)->operand_memory
413 = gimple_ssa_operands (cfun
)->operand_memory
->next
;
418 VEC_iterate (tree
, gimple_ssa_operands (cfun
)->mpt_table
, ix
, mpt
);
422 BITMAP_FREE (MPT_SYMBOLS (mpt
));
425 VEC_free (tree
, heap
, gimple_ssa_operands (cfun
)->mpt_table
);
427 gimple_ssa_operands (cfun
)->ops_active
= false;
429 if (dump_file
&& (dump_flags
& TDF_STATS
))
431 fprintf (dump_file
, "Original clobbered vars: %d\n",
432 clobber_stats
.clobbered_vars
);
433 fprintf (dump_file
, "Static write clobbers avoided: %d\n",
434 clobber_stats
.static_write_clobbers_avoided
);
435 fprintf (dump_file
, "Static read clobbers avoided: %d\n",
436 clobber_stats
.static_read_clobbers_avoided
);
437 fprintf (dump_file
, "Unescapable clobbers avoided: %d\n",
438 clobber_stats
.unescapable_clobbers_avoided
);
439 fprintf (dump_file
, "Original read-only clobbers: %d\n",
440 clobber_stats
.readonly_clobbers
);
441 fprintf (dump_file
, "Static read-only clobbers avoided: %d\n",
442 clobber_stats
.static_readonly_clobbers_avoided
);
447 /* Return memory for operands of SIZE chunks. */
450 ssa_operand_alloc (unsigned size
)
454 if (gimple_ssa_operands (cfun
)->operand_memory_index
+ size
455 >= ssa_operand_mem_size
)
457 struct ssa_operand_memory_d
*ptr
;
459 if (ssa_operand_mem_size
== OP_SIZE_INIT
)
460 ssa_operand_mem_size
= OP_SIZE_1
* sizeof (struct voptype_d
);
462 if (ssa_operand_mem_size
== OP_SIZE_1
* sizeof (struct voptype_d
))
463 ssa_operand_mem_size
= OP_SIZE_2
* sizeof (struct voptype_d
);
465 ssa_operand_mem_size
= OP_SIZE_3
* sizeof (struct voptype_d
);
467 /* Go right to the maximum size if the request is too large. */
468 if (size
> ssa_operand_mem_size
)
469 ssa_operand_mem_size
= OP_SIZE_3
* sizeof (struct voptype_d
);
471 /* Fail if there is not enough space. If there are this many operands
472 required, first make sure there isn't a different problem causing this
473 many operands. If the decision is that this is OK, then we can
474 specially allocate a buffer just for this request. */
475 gcc_assert (size
<= ssa_operand_mem_size
);
477 ptr
= (struct ssa_operand_memory_d
*)
478 ggc_alloc (sizeof (struct ssa_operand_memory_d
)
479 + ssa_operand_mem_size
- 1);
480 ptr
->next
= gimple_ssa_operands (cfun
)->operand_memory
;
481 gimple_ssa_operands (cfun
)->operand_memory
= ptr
;
482 gimple_ssa_operands (cfun
)->operand_memory_index
= 0;
484 ptr
= &(gimple_ssa_operands (cfun
)->operand_memory
485 ->mem
[gimple_ssa_operands (cfun
)->operand_memory_index
]);
486 gimple_ssa_operands (cfun
)->operand_memory_index
+= size
;
491 /* Allocate a DEF operand. */
493 static inline struct def_optype_d
*
496 struct def_optype_d
*ret
;
497 if (gimple_ssa_operands (cfun
)->free_defs
)
499 ret
= gimple_ssa_operands (cfun
)->free_defs
;
500 gimple_ssa_operands (cfun
)->free_defs
501 = gimple_ssa_operands (cfun
)->free_defs
->next
;
504 ret
= (struct def_optype_d
*)
505 ssa_operand_alloc (sizeof (struct def_optype_d
));
510 /* Allocate a USE operand. */
512 static inline struct use_optype_d
*
515 struct use_optype_d
*ret
;
516 if (gimple_ssa_operands (cfun
)->free_uses
)
518 ret
= gimple_ssa_operands (cfun
)->free_uses
;
519 gimple_ssa_operands (cfun
)->free_uses
520 = gimple_ssa_operands (cfun
)->free_uses
->next
;
523 ret
= (struct use_optype_d
*)
524 ssa_operand_alloc (sizeof (struct use_optype_d
));
529 /* Allocate a vop with NUM elements. */
531 static inline struct voptype_d
*
534 struct voptype_d
*ret
= NULL
;
537 int bucket
= vop_free_bucket_index (num
);
540 /* If there is a free operand, use it. */
541 if (gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] != NULL
)
543 ret
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
544 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] =
545 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
]->next
;
548 alloc_size
= vop_free_bucket_size(bucket
);
554 ret
= (struct voptype_d
*)ssa_operand_alloc (
555 sizeof (struct voptype_d
) + (alloc_size
- 1) * sizeof (vuse_element_t
));
557 VUSE_VECT_NUM_ELEM (ret
->usev
) = num
;
562 /* This routine makes sure that PTR is in an immediate use list, and makes
563 sure the stmt pointer is set to the current stmt. */
566 set_virtual_use_link (use_operand_p ptr
, tree stmt
)
568 /* fold_stmt may have changed the stmt pointers. */
569 if (ptr
->stmt
!= stmt
)
572 /* If this use isn't in a list, add it to the correct list. */
574 link_imm_use (ptr
, *(ptr
->use
));
578 /* Adds OP to the list of defs after LAST. */
580 static inline def_optype_p
581 add_def_op (tree
*op
, def_optype_p last
)
586 DEF_OP_PTR (new) = op
;
593 /* Adds OP to the list of uses of statement STMT after LAST. */
595 static inline use_optype_p
596 add_use_op (tree stmt
, tree
*op
, use_optype_p last
)
601 USE_OP_PTR (new)->use
= op
;
602 link_imm_use_stmt (USE_OP_PTR (new), *op
, stmt
);
609 /* Return a virtual op pointer with NUM elements which are all initialized to OP
610 and are linked into the immediate uses for STMT. The new vop is appended
613 static inline voptype_p
614 add_vop (tree stmt
, tree op
, int num
, voptype_p prev
)
619 new = alloc_vop (num
);
620 for (x
= 0; x
< num
; x
++)
622 VUSE_OP_PTR (new, x
)->prev
= NULL
;
623 SET_VUSE_OP (new, x
, op
);
624 VUSE_OP_PTR (new, x
)->use
= &new->usev
.uses
[x
].use_var
;
625 link_imm_use_stmt (VUSE_OP_PTR (new, x
), new->usev
.uses
[x
].use_var
, stmt
);
635 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
636 LAST to the new element. */
638 static inline voptype_p
639 add_vuse_op (tree stmt
, tree op
, int num
, voptype_p last
)
641 voptype_p
new = add_vop (stmt
, op
, num
, last
);
642 VDEF_RESULT (new) = NULL_TREE
;
647 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
648 LAST to the new element. */
650 static inline voptype_p
651 add_vdef_op (tree stmt
, tree op
, int num
, voptype_p last
)
653 voptype_p
new = add_vop (stmt
, op
, num
, last
);
654 VDEF_RESULT (new) = op
;
659 /* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
660 is the head of the operand list it belongs to. */
662 static inline struct voptype_d
*
663 realloc_vop (struct voptype_d
*ptr
, int num_elem
, struct voptype_d
**root
)
667 struct voptype_d
*ret
, *tmp
;
669 if (VUSE_VECT_NUM_ELEM (ptr
->usev
) == num_elem
)
672 val
= VUSE_OP (ptr
, 0);
673 if (TREE_CODE (val
) == SSA_NAME
)
674 val
= SSA_NAME_VAR (val
);
676 stmt
= USE_STMT (VUSE_OP_PTR (ptr
, 0));
678 /* Delink all the existing uses. */
679 for (x
= 0; x
< VUSE_VECT_NUM_ELEM (ptr
->usev
); x
++)
681 use_operand_p use_p
= VUSE_OP_PTR (ptr
, x
);
682 delink_imm_use (use_p
);
685 /* If we want less space, simply use this one, and shrink the size. */
686 if (VUSE_VECT_NUM_ELEM (ptr
->usev
) > num_elem
)
688 VUSE_VECT_NUM_ELEM (ptr
->usev
) = num_elem
;
692 /* It is growing. Allocate a new one and replace the old one. */
693 ret
= add_vuse_op (stmt
, val
, num_elem
, ptr
);
695 /* Clear PTR and add its memory to the free list. */
696 lim
= VUSE_VECT_NUM_ELEM (ptr
->usev
);
698 sizeof (struct voptype_d
) + sizeof (vuse_element_t
) * (lim
- 1));
699 add_vop_to_freelist (ptr
);
701 /* Now simply remove the old one. */
709 tmp
!= NULL
&& tmp
->next
!= ptr
;
716 /* The pointer passed in isn't in STMT's VDEF lists. */
721 /* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
724 realloc_vdef (struct voptype_d
*ptr
, int num_elem
)
727 struct voptype_d
*ret
;
729 val
= VDEF_RESULT (ptr
);
730 stmt
= USE_STMT (VDEF_OP_PTR (ptr
, 0));
731 ret
= realloc_vop (ptr
, num_elem
, &(VDEF_OPS (stmt
)));
732 VDEF_RESULT (ret
) = val
;
737 /* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
740 realloc_vuse (struct voptype_d
*ptr
, int num_elem
)
743 struct voptype_d
*ret
;
745 stmt
= USE_STMT (VUSE_OP_PTR (ptr
, 0));
746 ret
= realloc_vop (ptr
, num_elem
, &(VUSE_OPS (stmt
)));
751 /* Takes elements from build_defs and turns them into def operands of STMT.
752 TODO -- Make build_defs VEC of tree *. */
755 finalize_ssa_defs (tree stmt
)
758 struct def_optype_d new_list
;
759 def_optype_p old_ops
, last
;
760 unsigned int num
= VEC_length (tree
, build_defs
);
762 /* There should only be a single real definition per assignment. */
763 gcc_assert ((stmt
&& TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
) || num
<= 1);
765 new_list
.next
= NULL
;
768 old_ops
= DEF_OPS (stmt
);
772 /* Check for the common case of 1 def that hasn't changed. */
773 if (old_ops
&& old_ops
->next
== NULL
&& num
== 1
774 && (tree
*) VEC_index (tree
, build_defs
, 0) == DEF_OP_PTR (old_ops
))
777 /* If there is anything in the old list, free it. */
780 old_ops
->next
= gimple_ssa_operands (cfun
)->free_defs
;
781 gimple_ssa_operands (cfun
)->free_defs
= old_ops
;
784 /* If there is anything remaining in the build_defs list, simply emit it. */
785 for ( ; new_i
< num
; new_i
++)
786 last
= add_def_op ((tree
*) VEC_index (tree
, build_defs
, new_i
), last
);
788 /* Now set the stmt's operands. */
789 DEF_OPS (stmt
) = new_list
.next
;
791 #ifdef ENABLE_CHECKING
795 for (ptr
= DEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
798 gcc_assert (x
== num
);
804 /* Takes elements from build_uses and turns them into use operands of STMT.
805 TODO -- Make build_uses VEC of tree *. */
808 finalize_ssa_uses (tree stmt
)
811 struct use_optype_d new_list
;
812 use_optype_p old_ops
, ptr
, last
;
814 #ifdef ENABLE_CHECKING
817 unsigned num
= VEC_length (tree
, build_uses
);
819 /* If the pointer to the operand is the statement itself, something is
820 wrong. It means that we are pointing to a local variable (the
821 initial call to update_stmt_operands does not pass a pointer to a
823 for (x
= 0; x
< num
; x
++)
824 gcc_assert (*((tree
*)VEC_index (tree
, build_uses
, x
)) != stmt
);
828 new_list
.next
= NULL
;
831 old_ops
= USE_OPS (stmt
);
833 /* If there is anything in the old list, free it. */
836 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
837 delink_imm_use (USE_OP_PTR (ptr
));
838 old_ops
->next
= gimple_ssa_operands (cfun
)->free_uses
;
839 gimple_ssa_operands (cfun
)->free_uses
= old_ops
;
842 /* Now create nodes for all the new nodes. */
843 for (new_i
= 0; new_i
< VEC_length (tree
, build_uses
); new_i
++)
844 last
= add_use_op (stmt
,
845 (tree
*) VEC_index (tree
, build_uses
, new_i
),
848 /* Now set the stmt's operands. */
849 USE_OPS (stmt
) = new_list
.next
;
851 #ifdef ENABLE_CHECKING
854 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
857 gcc_assert (x
== VEC_length (tree
, build_uses
));
863 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
864 STMT. FIXME, for now VDEF operators should have a single operand
868 finalize_ssa_vdefs (tree stmt
)
871 struct voptype_d new_list
;
872 voptype_p old_ops
, ptr
, last
;
873 stmt_ann_t ann
= stmt_ann (stmt
);
875 /* Set the symbols referenced by STMT. */
876 if (!bitmap_empty_p (build_stores
))
878 if (ann
->operands
.stores
== NULL
)
879 ann
->operands
.stores
= BITMAP_ALLOC (NULL
);
881 bitmap_copy (ann
->operands
.stores
, build_stores
);
884 BITMAP_FREE (ann
->operands
.stores
);
886 /* If aliases have not been computed, do not instantiate a virtual
887 operator on STMT. Initially, we only compute the SSA form on
888 GIMPLE registers. The virtual SSA form is only computed after
889 alias analysis, so virtual operators will remain unrenamed and
890 the verifier will complain. However, alias analysis needs to
891 access symbol load/store information, so we need to compute
893 if (!gimple_aliases_computed_p (cfun
))
896 new_list
.next
= NULL
;
899 old_ops
= VDEF_OPS (stmt
);
901 while (old_ops
&& new_i
< VEC_length (tree
, build_vdefs
))
903 tree op
= VEC_index (tree
, build_vdefs
, new_i
);
904 unsigned new_uid
= get_name_decl (op
);
905 unsigned old_uid
= get_name_decl (VDEF_RESULT (old_ops
));
907 /* FIXME, for now each VDEF operator should have at most one
908 operand in their RHS. */
909 gcc_assert (VDEF_NUM (old_ops
) == 1);
911 if (old_uid
== new_uid
)
913 /* If the symbols are the same, reuse the existing operand. */
914 last
->next
= old_ops
;
916 old_ops
= old_ops
->next
;
918 set_virtual_use_link (VDEF_OP_PTR (last
, 0), stmt
);
921 else if (old_uid
< new_uid
)
923 /* If old is less than new, old goes to the free list. */
925 delink_imm_use (VDEF_OP_PTR (old_ops
, 0));
926 next
= old_ops
->next
;
927 add_vop_to_freelist (old_ops
);
932 /* This is a new operand. */
933 last
= add_vdef_op (stmt
, op
, 1, last
);
938 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
939 for ( ; new_i
< VEC_length (tree
, build_vdefs
); new_i
++)
940 last
= add_vdef_op (stmt
, VEC_index (tree
, build_vdefs
, new_i
), 1, last
);
942 /* If there is anything in the old list, free it. */
945 for (ptr
= old_ops
; ptr
; ptr
= last
)
948 delink_imm_use (VDEF_OP_PTR (ptr
, 0));
949 add_vop_to_freelist (ptr
);
953 /* Now set STMT's operands. */
954 VDEF_OPS (stmt
) = new_list
.next
;
956 #ifdef ENABLE_CHECKING
959 for (ptr
= VDEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
962 gcc_assert (x
== VEC_length (tree
, build_vdefs
));
968 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
972 finalize_ssa_vuse_ops (tree stmt
)
976 voptype_p old_ops
, last
;
977 VEC(tree
,heap
) *new_ops
;
980 /* Set the symbols referenced by STMT. */
981 ann
= stmt_ann (stmt
);
982 if (!bitmap_empty_p (build_loads
))
984 if (ann
->operands
.loads
== NULL
)
985 ann
->operands
.loads
= BITMAP_ALLOC (NULL
);
987 bitmap_copy (ann
->operands
.loads
, build_loads
);
990 BITMAP_FREE (ann
->operands
.loads
);
992 /* If aliases have not been computed, do not instantiate a virtual
993 operator on STMT. Initially, we only compute the SSA form on
994 GIMPLE registers. The virtual SSA form is only computed after
995 alias analysis, so virtual operators will remain unrenamed and
996 the verifier will complain. However, alias analysis needs to
997 access symbol load/store information, so we need to compute
999 if (!gimple_aliases_computed_p (cfun
))
1002 /* STMT should have at most one VUSE operator. */
1003 old_ops
= VUSE_OPS (stmt
);
1004 gcc_assert (old_ops
== NULL
|| old_ops
->next
== NULL
);
1009 && old_i
< VUSE_NUM (old_ops
)
1010 && new_i
< VEC_length (tree
, build_vuses
))
1012 tree new_op
= VEC_index (tree
, build_vuses
, new_i
);
1013 tree old_op
= VUSE_OP (old_ops
, old_i
);
1014 unsigned new_uid
= get_name_decl (new_op
);
1015 unsigned old_uid
= get_name_decl (old_op
);
1017 if (old_uid
== new_uid
)
1019 /* If the symbols are the same, reuse the existing operand. */
1020 VEC_safe_push (tree
, heap
, new_ops
, old_op
);
1024 else if (old_uid
< new_uid
)
1026 /* If OLD_UID is less than NEW_UID, the old operand has
1027 disappeared, skip to the next old operand. */
1032 /* This is a new operand. */
1033 VEC_safe_push (tree
, heap
, new_ops
, new_op
);
1038 /* If there is anything remaining in the build_vuses list, simply emit it. */
1039 for ( ; new_i
< VEC_length (tree
, build_vuses
); new_i
++)
1040 VEC_safe_push (tree
, heap
, new_ops
, VEC_index (tree
, build_vuses
, new_i
));
1042 /* If there is anything in the old list, free it. */
1045 for (old_i
= 0; old_i
< VUSE_NUM (old_ops
); old_i
++)
1046 delink_imm_use (VUSE_OP_PTR (old_ops
, old_i
));
1047 add_vop_to_freelist (old_ops
);
1048 VUSE_OPS (stmt
) = NULL
;
1051 /* If there are any operands, instantiate a VUSE operator for STMT. */
1057 last
= add_vuse_op (stmt
, NULL
, VEC_length (tree
, new_ops
), NULL
);
1059 for (i
= 0; VEC_iterate (tree
, new_ops
, i
, op
); i
++)
1060 SET_USE (VUSE_OP_PTR (last
, (int) i
), op
);
1062 VUSE_OPS (stmt
) = last
;
1065 #ifdef ENABLE_CHECKING
1069 if (VUSE_OPS (stmt
))
1071 gcc_assert (VUSE_OPS (stmt
)->next
== NULL
);
1072 x
= VUSE_NUM (VUSE_OPS (stmt
));
1077 gcc_assert (x
== VEC_length (tree
, build_vuses
));
1082 /* Return a new VUSE operand vector for STMT. */
1085 finalize_ssa_vuses (tree stmt
)
1087 unsigned num
, num_vdefs
;
1088 unsigned vuse_index
;
1090 /* Remove superfluous VUSE operands. If the statement already has a
1091 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1092 needed because VDEFs imply a VUSE of the variable. For instance,
1093 suppose that variable 'a' is pointed-to by p and q:
1099 The VUSE <a_2> is superfluous because it is implied by the
1101 num
= VEC_length (tree
, build_vuses
);
1102 num_vdefs
= VEC_length (tree
, build_vdefs
);
1104 if (num
> 0 && num_vdefs
> 0)
1105 for (vuse_index
= 0; vuse_index
< VEC_length (tree
, build_vuses
); )
1108 vuse
= VEC_index (tree
, build_vuses
, vuse_index
);
1109 if (TREE_CODE (vuse
) != SSA_NAME
)
1111 var_ann_t ann
= var_ann (vuse
);
1112 ann
->in_vuse_list
= 0;
1113 if (ann
->in_vdef_list
)
1115 VEC_ordered_remove (tree
, build_vuses
, vuse_index
);
1122 finalize_ssa_vuse_ops (stmt
);
1126 /* Clear the in_list bits and empty the build array for VDEFs and
1130 cleanup_build_arrays (void)
1135 for (i
= 0; VEC_iterate (tree
, build_vdefs
, i
, t
); i
++)
1136 if (TREE_CODE (t
) != SSA_NAME
)
1137 var_ann (t
)->in_vdef_list
= false;
1139 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, t
); i
++)
1140 if (TREE_CODE (t
) != SSA_NAME
)
1141 var_ann (t
)->in_vuse_list
= false;
1143 VEC_truncate (tree
, build_vdefs
, 0);
1144 VEC_truncate (tree
, build_vuses
, 0);
1145 VEC_truncate (tree
, build_defs
, 0);
1146 VEC_truncate (tree
, build_uses
, 0);
1147 bitmap_clear (build_loads
);
1148 bitmap_clear (build_stores
);
1152 /* Finalize all the build vectors, fill the new ones into INFO. */
1155 finalize_ssa_stmt_operands (tree stmt
)
1157 finalize_ssa_defs (stmt
);
1158 finalize_ssa_uses (stmt
);
1159 finalize_ssa_vdefs (stmt
);
1160 finalize_ssa_vuses (stmt
);
1161 cleanup_build_arrays ();
1165 /* Start the process of building up operands vectors in INFO. */
1168 start_ssa_stmt_operands (void)
1170 gcc_assert (VEC_length (tree
, build_defs
) == 0);
1171 gcc_assert (VEC_length (tree
, build_uses
) == 0);
1172 gcc_assert (VEC_length (tree
, build_vuses
) == 0);
1173 gcc_assert (VEC_length (tree
, build_vdefs
) == 0);
1174 gcc_assert (bitmap_empty_p (build_loads
));
1175 gcc_assert (bitmap_empty_p (build_stores
));
1179 /* Add DEF_P to the list of pointers to operands. */
1182 append_def (tree
*def_p
)
1184 VEC_safe_push (tree
, heap
, build_defs
, (tree
) def_p
);
1188 /* Add USE_P to the list of pointers to operands. */
1191 append_use (tree
*use_p
)
1193 VEC_safe_push (tree
, heap
, build_uses
, (tree
) use_p
);
1197 /* Add VAR to the set of variables that require a VDEF operator. */
1200 append_vdef (tree var
)
1204 if (TREE_CODE (var
) != SSA_NAME
)
1209 /* If VAR belongs to a memory partition, use it instead of VAR. */
1210 mpt
= memory_partition (var
);
1214 /* Don't allow duplicate entries. */
1215 ann
= get_var_ann (var
);
1216 if (ann
->in_vdef_list
)
1219 ann
->in_vdef_list
= true;
1223 sym
= SSA_NAME_VAR (var
);
1225 VEC_safe_push (tree
, heap
, build_vdefs
, var
);
1226 bitmap_set_bit (build_stores
, DECL_UID (sym
));
1230 /* Add VAR to the set of variables that require a VUSE operator. */
1233 append_vuse (tree var
)
1237 if (TREE_CODE (var
) != SSA_NAME
)
1242 /* If VAR belongs to a memory partition, use it instead of VAR. */
1243 mpt
= memory_partition (var
);
1247 /* Don't allow duplicate entries. */
1248 ann
= get_var_ann (var
);
1249 if (ann
->in_vuse_list
|| ann
->in_vdef_list
)
1252 ann
->in_vuse_list
= true;
1256 sym
= SSA_NAME_VAR (var
);
1258 VEC_safe_push (tree
, heap
, build_vuses
, var
);
1259 bitmap_set_bit (build_loads
, DECL_UID (sym
));
1263 /* REF is a tree that contains the entire pointer dereference
1264 expression, if available, or NULL otherwise. ALIAS is the variable
1265 we are asking if REF can access. OFFSET and SIZE come from the
1266 memory access expression that generated this virtual operand. */
1269 access_can_touch_variable (tree ref
, tree alias
, HOST_WIDE_INT offset
,
1272 bool offsetgtz
= offset
> 0;
1273 unsigned HOST_WIDE_INT uoffset
= (unsigned HOST_WIDE_INT
) offset
;
1274 tree base
= ref
? get_base_address (ref
) : NULL
;
1276 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1277 using a call-clobbered memory tag. By definition, call-clobbered
1278 memory tags can always touch .GLOBAL_VAR. */
1279 if (alias
== gimple_global_var (cfun
))
1282 /* If ALIAS is an SFT, it can't be touched if the offset
1283 and size of the access is not overlapping with the SFT offset and
1284 size. This is only true if we are accessing through a pointer
1285 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1286 be accessing through a pointer to some substruct of the
1287 structure, and if we try to prune there, we will have the wrong
1288 offset, and get the wrong answer.
1289 i.e., we can't prune without more work if we have something like
1295 const char *byte_op;
1303 foo = &targetm.asm_out.aligned_op;
1306 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1307 terms of SFT_PARENT_VAR, that is where it is.
1308 However, the access through the foo pointer will be at offset 0. */
1310 && TREE_CODE (alias
) == STRUCT_FIELD_TAG
1312 && TREE_TYPE (base
) == TREE_TYPE (SFT_PARENT_VAR (alias
))
1313 && !overlap_subvar (offset
, size
, alias
, NULL
))
1315 #ifdef ACCESS_DEBUGGING
1316 fprintf (stderr
, "Access to ");
1317 print_generic_expr (stderr
, ref
, 0);
1318 fprintf (stderr
, " may not touch ");
1319 print_generic_expr (stderr
, alias
, 0);
1320 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1325 /* Without strict aliasing, it is impossible for a component access
1326 through a pointer to touch a random variable, unless that
1327 variable *is* a structure or a pointer.
1329 That is, given p->c, and some random global variable b,
1330 there is no legal way that p->c could be an access to b.
1332 Without strict aliasing on, we consider it legal to do something
1335 struct foos { int l; };
1337 static struct foos *getfoo(void);
1340 struct foos *f = getfoo();
1347 static struct foos *getfoo(void)
1348 { return (struct foos *)&foo; }
1350 (taken from 20000623-1.c)
1352 The docs also say/imply that access through union pointers
1353 is legal (but *not* if you take the address of the union member,
1354 i.e. the inverse), such that you can do
1364 U *pretmp = (U*)&rv;
1368 To implement this, we just punt on accesses through union
1372 && flag_strict_aliasing
1373 && TREE_CODE (ref
) != INDIRECT_REF
1375 && (TREE_CODE (base
) != INDIRECT_REF
1376 || TREE_CODE (TREE_TYPE (base
)) != UNION_TYPE
)
1377 && !AGGREGATE_TYPE_P (TREE_TYPE (alias
))
1378 && TREE_CODE (TREE_TYPE (alias
)) != COMPLEX_TYPE
1379 && !var_ann (alias
)->is_heapvar
1380 /* When the struct has may_alias attached to it, we need not to
1382 && get_alias_set (base
))
1384 #ifdef ACCESS_DEBUGGING
1385 fprintf (stderr
, "Access to ");
1386 print_generic_expr (stderr
, ref
, 0);
1387 fprintf (stderr
, " may not touch ");
1388 print_generic_expr (stderr
, alias
, 0);
1389 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1394 /* If the offset of the access is greater than the size of one of
1395 the possible aliases, it can't be touching that alias, because it
1396 would be past the end of the structure. */
1398 && flag_strict_aliasing
1399 && TREE_CODE (ref
) != INDIRECT_REF
1401 && !POINTER_TYPE_P (TREE_TYPE (alias
))
1403 && DECL_SIZE (alias
)
1404 && TREE_CODE (DECL_SIZE (alias
)) == INTEGER_CST
1405 && uoffset
> TREE_INT_CST_LOW (DECL_SIZE (alias
)))
1407 #ifdef ACCESS_DEBUGGING
1408 fprintf (stderr
, "Access to ");
1409 print_generic_expr (stderr
, ref
, 0);
1410 fprintf (stderr
, " may not touch ");
1411 print_generic_expr (stderr
, alias
, 0);
1412 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1421 /* Add VAR to the virtual operands array. FLAGS is as in
1422 get_expr_operands. FULL_REF is a tree that contains the entire
1423 pointer dereference expression, if available, or NULL otherwise.
1424 OFFSET and SIZE come from the memory access expression that
1425 generated this virtual operand. FOR_CLOBBER is true is this is
1426 adding a virtual operand for a call clobber. */
1429 add_virtual_operand (tree var
, stmt_ann_t s_ann
, int flags
,
1430 tree full_ref
, HOST_WIDE_INT offset
,
1431 HOST_WIDE_INT size
, bool for_clobber
)
1433 VEC(tree
,gc
) *aliases
;
1437 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1438 v_ann
= var_ann (sym
);
1440 /* Mark the statement as having memory operands. */
1441 s_ann
->references_memory
= true;
1443 /* Mark statements with volatile operands. Optimizers should back
1444 off from statements having volatile operands. */
1445 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1446 s_ann
->has_volatile_ops
= true;
1448 /* If the variable cannot be modified and this is a VDEF change
1449 it into a VUSE. This happens when read-only variables are marked
1450 call-clobbered and/or aliased to writable variables. So we only
1451 check that this only happens on non-specific stores.
1453 Note that if this is a specific store, i.e. associated with a
1454 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
1455 into validation problems.
1457 This can happen when programs cast away const, leaving us with a
1458 store to read-only memory. If the statement is actually executed
1459 at runtime, then the program is ill formed. If the statement is
1460 not executed then all is well. At the very least, we cannot ICE. */
1461 if ((flags
& opf_implicit
) && unmodifiable_var_p (var
))
1464 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1465 virtual operands, unless the caller has specifically requested
1466 not to add virtual operands (used when adding operands inside an
1467 ADDR_EXPR expression). */
1468 if (flags
& opf_no_vops
)
1471 aliases
= v_ann
->may_aliases
;
1472 if (aliases
== NULL
)
1474 /* The variable is not aliased or it is an alias tag. */
1475 if (flags
& opf_def
)
1485 /* The variable is aliased. Add its aliases to the virtual
1487 gcc_assert (VEC_length (tree
, aliases
) != 0);
1489 if (flags
& opf_def
)
1491 bool none_added
= true;
1493 for (i
= 0; VEC_iterate (tree
, aliases
, i
, al
); i
++)
1495 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1502 /* If the variable is also an alias tag, add a virtual
1503 operand for it, otherwise we will miss representing
1504 references to the members of the variable's alias set.
1505 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1507 It is also necessary to add bare defs on clobbers for
1508 SMT's, so that bare SMT uses caused by pruning all the
1509 aliases will link up properly with calls. In order to
1510 keep the number of these bare defs we add down to the
1511 minimum necessary, we keep track of which SMT's were used
1512 alone in statement vdefs or VUSEs. */
1513 if (v_ann
->is_aliased
1515 || (TREE_CODE (var
) == SYMBOL_MEMORY_TAG
1523 bool none_added
= true;
1524 for (i
= 0; VEC_iterate (tree
, aliases
, i
, al
); i
++)
1526 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1532 /* Similarly, append a virtual uses for VAR itself, when
1533 it is an alias tag. */
1534 if (v_ann
->is_aliased
|| none_added
)
1541 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1542 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1543 the statement's real operands, otherwise it is added to virtual
1547 add_stmt_operand (tree
*var_p
, stmt_ann_t s_ann
, int flags
)
1552 gcc_assert (SSA_VAR_P (*var_p
) && s_ann
);
1555 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1556 v_ann
= var_ann (sym
);
1558 /* Mark statements with volatile operands. */
1559 if (TREE_THIS_VOLATILE (sym
))
1560 s_ann
->has_volatile_ops
= true;
1562 if (is_gimple_reg (sym
))
1564 /* The variable is a GIMPLE register. Add it to real operands. */
1565 if (flags
& opf_def
)
1571 add_virtual_operand (var
, s_ann
, flags
, NULL_TREE
, 0, -1, false);
1575 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1576 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1578 STMT is the statement being processed, EXPR is the INDIRECT_REF
1581 FLAGS is as in get_expr_operands.
1583 FULL_REF contains the full pointer dereference expression, if we
1584 have it, or NULL otherwise.
1586 OFFSET and SIZE are the location of the access inside the
1587 dereferenced pointer, if known.
1589 RECURSE_ON_BASE should be set to true if we want to continue
1590 calling get_expr_operands on the base pointer, and false if
1591 something else will do it for us. */
1594 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
,
1596 HOST_WIDE_INT offset
, HOST_WIDE_INT size
,
1597 bool recurse_on_base
)
1599 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1601 stmt_ann_t s_ann
= stmt_ann (stmt
);
1603 s_ann
->references_memory
= true;
1605 if (SSA_VAR_P (ptr
))
1607 struct ptr_info_def
*pi
= NULL
;
1609 /* If PTR has flow-sensitive points-to information, use it. */
1610 if (TREE_CODE (ptr
) == SSA_NAME
1611 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1612 && pi
->name_mem_tag
)
1614 /* PTR has its own memory tag. Use it. */
1615 add_virtual_operand (pi
->name_mem_tag
, s_ann
, flags
,
1616 full_ref
, offset
, size
, false);
1620 /* If PTR is not an SSA_NAME or it doesn't have a name
1621 tag, use its symbol memory tag. */
1624 /* If we are emitting debugging dumps, display a warning if
1625 PTR is an SSA_NAME with no flow-sensitive alias
1626 information. That means that we may need to compute
1629 && TREE_CODE (ptr
) == SSA_NAME
1633 "NOTE: no flow-sensitive alias info for ");
1634 print_generic_expr (dump_file
, ptr
, dump_flags
);
1635 fprintf (dump_file
, " in ");
1636 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1639 if (TREE_CODE (ptr
) == SSA_NAME
)
1640 ptr
= SSA_NAME_VAR (ptr
);
1641 v_ann
= var_ann (ptr
);
1643 if (v_ann
->symbol_mem_tag
)
1644 add_virtual_operand (v_ann
->symbol_mem_tag
, s_ann
, flags
,
1645 full_ref
, offset
, size
, false);
1648 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1650 /* If a constant is used as a pointer, we can't generate a real
1651 operand for it but we mark the statement volatile to prevent
1652 optimizations from messing things up. */
1654 s_ann
->has_volatile_ops
= true;
1659 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1663 /* If requested, add a USE operand for the base pointer. */
1664 if (recurse_on_base
)
1665 get_expr_operands (stmt
, pptr
, opf_use
);
1669 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1672 get_tmr_operands (tree stmt
, tree expr
, int flags
)
1675 HOST_WIDE_INT offset
, size
, maxsize
;
1677 stmt_ann_t s_ann
= stmt_ann (stmt
);
1679 /* This statement references memory. */
1680 s_ann
->references_memory
= 1;
1682 /* First record the real operands. */
1683 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_use
);
1684 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_use
);
1686 if (TMR_SYMBOL (expr
))
1687 add_to_addressable_set (TMR_SYMBOL (expr
), &s_ann
->addresses_taken
);
1689 tag
= TMR_TAG (expr
);
1692 /* Something weird, so ensure that we will be careful. */
1693 s_ann
->has_volatile_ops
= true;
1699 get_expr_operands (stmt
, &tag
, flags
);
1703 ref
= get_ref_base_and_extent (tag
, &offset
, &size
, &maxsize
);
1704 gcc_assert (ref
!= NULL_TREE
);
1705 svars
= get_subvars_for_var (ref
);
1706 for (sv
= svars
; sv
; sv
= sv
->next
)
1710 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
1711 add_stmt_operand (&sv
->var
, s_ann
, flags
);
1716 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1717 clobbered variables in the function. */
1720 add_call_clobber_ops (tree stmt
, tree callee
)
1724 stmt_ann_t s_ann
= stmt_ann (stmt
);
1725 bitmap not_read_b
, not_written_b
;
1727 /* Functions that are not const, pure or never return may clobber
1728 call-clobbered variables. */
1730 s_ann
->makes_clobbering_call
= true;
1732 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1733 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1734 if (gimple_global_var (cfun
))
1736 tree var
= gimple_global_var (cfun
);
1737 add_stmt_operand (&var
, s_ann
, opf_def
);
1741 /* Get info for local and module level statics. There is a bit
1742 set for each static if the call being processed does not read
1743 or write that variable. */
1744 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1745 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1747 /* Add a VDEF operand for every call clobbered variable. */
1748 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1750 tree var
= referenced_var_lookup (u
);
1751 unsigned int escape_mask
= var_ann (var
)->escape_mask
;
1752 tree real_var
= var
;
1756 /* Not read and not written are computed on regular vars, not
1757 subvars, so look at the parent var if this is an SFT. */
1758 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1759 real_var
= SFT_PARENT_VAR (var
);
1761 not_read
= not_read_b
? bitmap_bit_p (not_read_b
,
1762 DECL_UID (real_var
)) : false;
1763 not_written
= not_written_b
? bitmap_bit_p (not_written_b
,
1764 DECL_UID (real_var
)) : false;
1765 gcc_assert (!unmodifiable_var_p (var
));
1767 clobber_stats
.clobbered_vars
++;
1769 /* See if this variable is really clobbered by this function. */
1771 /* Trivial case: Things escaping only to pure/const are not
1772 clobbered by non-pure-const, and only read by pure/const. */
1773 if ((escape_mask
& ~(ESCAPE_TO_PURE_CONST
)) == 0)
1775 tree call
= get_call_expr_in (stmt
);
1776 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1778 add_stmt_operand (&var
, s_ann
, opf_use
);
1779 clobber_stats
.unescapable_clobbers_avoided
++;
1784 clobber_stats
.unescapable_clobbers_avoided
++;
1791 clobber_stats
.static_write_clobbers_avoided
++;
1793 add_stmt_operand (&var
, s_ann
, opf_use
);
1795 clobber_stats
.static_read_clobbers_avoided
++;
1798 add_virtual_operand (var
, s_ann
, opf_def
, NULL
, 0, -1, true);
1803 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1807 add_call_read_ops (tree stmt
, tree callee
)
1811 stmt_ann_t s_ann
= stmt_ann (stmt
);
1814 /* if the function is not pure, it may reference memory. Add
1815 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1816 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1817 if (gimple_global_var (cfun
))
1819 tree var
= gimple_global_var (cfun
);
1820 add_stmt_operand (&var
, s_ann
, opf_use
);
1824 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1826 /* Add a VUSE for each call-clobbered variable. */
1827 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1829 tree var
= referenced_var (u
);
1830 tree real_var
= var
;
1833 clobber_stats
.readonly_clobbers
++;
1835 /* Not read and not written are computed on regular vars, not
1836 subvars, so look at the parent var if this is an SFT. */
1838 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1839 real_var
= SFT_PARENT_VAR (var
);
1841 not_read
= not_read_b
? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1846 clobber_stats
.static_readonly_clobbers_avoided
++;
1850 add_stmt_operand (&var
, s_ann
, opf_use
| opf_implicit
);
1855 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1858 get_call_expr_operands (tree stmt
, tree expr
)
1861 int call_flags
= call_expr_flags (expr
);
1862 stmt_ann_t ann
= stmt_ann (stmt
);
1864 ann
->references_memory
= true;
1866 /* If aliases have been computed already, add VDEF or VUSE
1867 operands for all the symbols that have been found to be
1869 if (gimple_aliases_computed_p (cfun
)
1870 && !(call_flags
& ECF_NOVOPS
))
1872 /* A 'pure' or a 'const' function never call-clobbers anything.
1873 A 'noreturn' function might, but since we don't return anyway
1874 there is no point in recording that. */
1875 if (TREE_SIDE_EFFECTS (expr
)
1876 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1877 add_call_clobber_ops (stmt
, get_callee_fndecl (expr
));
1878 else if (!(call_flags
& ECF_CONST
))
1879 add_call_read_ops (stmt
, get_callee_fndecl (expr
));
1882 /* Find uses in the called function. */
1883 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_use
);
1885 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1886 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_use
);
1888 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
1892 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1895 get_asm_expr_operands (tree stmt
)
1899 const char **oconstraints
;
1900 const char *constraint
;
1901 bool allows_mem
, allows_reg
, is_inout
;
1904 s_ann
= stmt_ann (stmt
);
1905 noutputs
= list_length (ASM_OUTPUTS (stmt
));
1906 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
1908 /* Gather all output operands. */
1909 for (i
= 0, link
= ASM_OUTPUTS (stmt
); link
; i
++, link
= TREE_CHAIN (link
))
1911 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1912 oconstraints
[i
] = constraint
;
1913 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
1914 &allows_reg
, &is_inout
);
1916 /* This should have been split in gimplify_asm_expr. */
1917 gcc_assert (!allows_reg
|| !is_inout
);
1919 /* Memory operands are addressable. Note that STMT needs the
1920 address of this operand. */
1921 if (!allows_reg
&& allows_mem
)
1923 tree t
= get_base_address (TREE_VALUE (link
));
1924 if (t
&& DECL_P (t
) && s_ann
)
1925 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1928 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_def
);
1931 /* Gather all input operands. */
1932 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1934 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1935 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
1936 &allows_mem
, &allows_reg
);
1938 /* Memory operands are addressable. Note that STMT needs the
1939 address of this operand. */
1940 if (!allows_reg
&& allows_mem
)
1942 tree t
= get_base_address (TREE_VALUE (link
));
1943 if (t
&& DECL_P (t
) && s_ann
)
1944 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1947 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1950 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1951 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1952 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1957 s_ann
->references_memory
= true;
1959 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, i
, bi
)
1961 tree var
= referenced_var (i
);
1962 add_stmt_operand (&var
, s_ann
, opf_def
| opf_implicit
);
1965 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun
), 0, i
, bi
)
1967 tree var
= referenced_var (i
);
1969 /* Subvars are explicitly represented in this list, so we
1970 don't need the original to be added to the clobber ops,
1971 but the original *will* be in this list because we keep
1972 the addressability of the original variable up-to-date
1973 to avoid confusing the back-end. */
1974 if (var_can_have_subvars (var
)
1975 && get_subvars_for_var (var
) != NULL
)
1978 add_stmt_operand (&var
, s_ann
, opf_def
| opf_implicit
);
1985 /* Scan operands for the assignment expression EXPR in statement STMT. */
1988 get_modify_stmt_operands (tree stmt
, tree expr
)
1990 /* First get operands from the RHS. */
1991 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 1), opf_use
);
1993 /* For the LHS, use a regular definition (opf_def) for GIMPLE
1994 registers. If the LHS is a store to memory, we will need
1995 a preserving definition (VDEF).
1997 Preserving definitions are those that modify a part of an
1998 aggregate object for which no subvars have been computed (or the
1999 reference does not correspond exactly to one of them). Stores
2000 through a pointer are also represented with VDEF operators.
2002 We used to distinguish between preserving and killing definitions.
2003 We always emit preserving definitions now. */
2004 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 0), opf_def
);
2008 /* Recursively scan the expression pointed to by EXPR_P in statement
2009 STMT. FLAGS is one of the OPF_* constants modifying how to
2010 interpret the operands found. */
2013 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
2015 enum tree_code code
;
2016 enum tree_code_class
class;
2017 tree expr
= *expr_p
;
2018 stmt_ann_t s_ann
= stmt_ann (stmt
);
2023 code
= TREE_CODE (expr
);
2024 class = TREE_CODE_CLASS (code
);
2029 /* Taking the address of a variable does not represent a
2030 reference to it, but the fact that the statement takes its
2031 address will be of interest to some passes (e.g. alias
2033 add_to_addressable_set (TREE_OPERAND (expr
, 0), &s_ann
->addresses_taken
);
2035 /* If the address is invariant, there may be no interesting
2036 variable references inside. */
2037 if (is_gimple_min_invariant (expr
))
2040 /* Otherwise, there may be variables referenced inside but there
2041 should be no VUSEs created, since the referenced objects are
2042 not really accessed. The only operands that we should find
2043 here are ARRAY_REF indices which will always be real operands
2044 (GIMPLE does not allow non-registers as array indices). */
2045 flags
|= opf_no_vops
;
2046 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2050 case STRUCT_FIELD_TAG
:
2051 case SYMBOL_MEMORY_TAG
:
2052 case NAME_MEMORY_TAG
:
2053 add_stmt_operand (expr_p
, s_ann
, flags
);
2062 /* Add the subvars for a variable, if it has subvars, to DEFS
2063 or USES. Otherwise, add the variable itself. Whether it
2064 goes to USES or DEFS depends on the operand flags. */
2065 if (var_can_have_subvars (expr
)
2066 && (svars
= get_subvars_for_var (expr
)))
2069 for (sv
= svars
; sv
; sv
= sv
->next
)
2070 add_stmt_operand (&sv
->var
, s_ann
, flags
);
2073 add_stmt_operand (expr_p
, s_ann
, flags
);
2078 case MISALIGNED_INDIRECT_REF
:
2079 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2082 case ALIGN_INDIRECT_REF
:
2084 get_indirect_ref_operands (stmt
, expr
, flags
, NULL_TREE
, 0, -1, true);
2087 case TARGET_MEM_REF
:
2088 get_tmr_operands (stmt
, expr
, flags
);
2092 case ARRAY_RANGE_REF
:
2098 HOST_WIDE_INT offset
, size
, maxsize
;
2101 /* This component reference becomes an access to all of the
2102 subvariables it can touch, if we can determine that, but
2103 *NOT* the real one. If we can't determine which fields we
2104 could touch, the recursion will eventually get to a
2105 variable and add *all* of its subvars, or whatever is the
2106 minimum correct subset. */
2107 ref
= get_ref_base_and_extent (expr
, &offset
, &size
, &maxsize
);
2108 if (SSA_VAR_P (ref
) && get_subvars_for_var (ref
))
2111 subvar_t svars
= get_subvars_for_var (ref
);
2113 for (sv
= svars
; sv
; sv
= sv
->next
)
2117 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
2119 int subvar_flags
= flags
;
2121 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
2126 flags
|= opf_no_vops
;
2128 else if (TREE_CODE (ref
) == INDIRECT_REF
)
2130 get_indirect_ref_operands (stmt
, ref
, flags
, expr
, offset
,
2132 flags
|= opf_no_vops
;
2135 /* Even if we found subvars above we need to ensure to see
2136 immediate uses for d in s.a[d]. In case of s.a having
2137 a subvar or we would miss it otherwise. */
2138 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2140 if (code
== COMPONENT_REF
)
2142 if (s_ann
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
2143 s_ann
->has_volatile_ops
= true;
2144 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2146 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
2148 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2149 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2150 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_use
);
2156 case WITH_SIZE_EXPR
:
2157 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2158 and an rvalue reference to its second argument. */
2159 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2160 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2164 get_call_expr_operands (stmt
, expr
);
2169 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_use
);
2170 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2171 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2174 case GIMPLE_MODIFY_STMT
:
2175 get_modify_stmt_operands (stmt
, expr
);
2180 /* General aggregate CONSTRUCTORs have been decomposed, but they
2181 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2182 constructor_elt
*ce
;
2183 unsigned HOST_WIDE_INT idx
;
2186 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
2188 get_expr_operands (stmt
, &ce
->value
, opf_use
);
2194 case TRUTH_NOT_EXPR
:
2195 case VIEW_CONVERT_EXPR
:
2197 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2200 case TRUTH_AND_EXPR
:
2202 case TRUTH_XOR_EXPR
:
2208 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2209 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2214 case REALIGN_LOAD_EXPR
:
2216 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2217 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2218 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
2237 /* Expressions that make no memory references. */
2241 if (class == tcc_unary
)
2243 if (class == tcc_binary
|| class == tcc_comparison
)
2245 if (class == tcc_constant
|| class == tcc_type
)
2249 /* If we get here, something has gone wrong. */
2250 #ifdef ENABLE_CHECKING
2251 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
2253 fputs ("\n", stderr
);
2259 /* Parse STMT looking for operands. When finished, the various
2260 build_* operand vectors will have potential operands in them. */
2263 parse_ssa_operands (tree stmt
)
2265 enum tree_code code
;
2267 code
= TREE_CODE (stmt
);
2270 case GIMPLE_MODIFY_STMT
:
2271 get_modify_stmt_operands (stmt
, stmt
);
2275 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_use
);
2279 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_use
);
2283 get_asm_expr_operands (stmt
);
2287 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_use
);
2291 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_use
);
2295 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_use
);
2299 case CASE_LABEL_EXPR
:
2300 case TRY_CATCH_EXPR
:
2301 case TRY_FINALLY_EXPR
:
2302 case EH_FILTER_EXPR
:
2305 /* These nodes contain no variable references. */
2309 /* Notice that if get_expr_operands tries to use &STMT as the
2310 operand pointer (which may only happen for USE operands), we
2311 will fail in add_stmt_operand. This default will handle
2312 statements like empty statements, or CALL_EXPRs that may
2313 appear on the RHS of a statement or as statements themselves. */
2314 get_expr_operands (stmt
, &stmt
, opf_use
);
2320 /* Create an operands cache for STMT. */
2323 build_ssa_operands (tree stmt
)
2325 stmt_ann_t ann
= get_stmt_ann (stmt
);
2327 /* Initially assume that the statement has no volatile operands and
2328 makes no memory references. */
2329 ann
->has_volatile_ops
= false;
2330 ann
->references_memory
= false;
2332 start_ssa_stmt_operands ();
2333 parse_ssa_operands (stmt
);
2334 operand_build_sort_virtual (build_vuses
);
2335 operand_build_sort_virtual (build_vdefs
);
2336 finalize_ssa_stmt_operands (stmt
);
2338 /* For added safety, assume that statements with volatile operands
2339 also reference memory. */
2340 if (ann
->has_volatile_ops
)
2341 ann
->references_memory
= true;
2345 /* Free any operands vectors in OPS. */
2348 free_ssa_operands (stmt_operands_p ops
)
2350 ops
->def_ops
= NULL
;
2351 ops
->use_ops
= NULL
;
2352 ops
->vdef_ops
= NULL
;
2353 ops
->vuse_ops
= NULL
;
2354 BITMAP_FREE (ops
->loads
);
2355 BITMAP_FREE (ops
->stores
);
2359 /* Get the operands of statement STMT. */
2362 update_stmt_operands (tree stmt
)
2364 stmt_ann_t ann
= get_stmt_ann (stmt
);
2366 /* If update_stmt_operands is called before SSA is initialized, do
2368 if (!ssa_operands_active ())
2371 /* The optimizers cannot handle statements that are nothing but a
2372 _DECL. This indicates a bug in the gimplifier. */
2373 gcc_assert (!SSA_VAR_P (stmt
));
2375 timevar_push (TV_TREE_OPS
);
2377 gcc_assert (ann
->modified
);
2378 build_ssa_operands (stmt
);
2381 timevar_pop (TV_TREE_OPS
);
2385 /* Copies virtual operands from SRC to DST. */
2388 copy_virtual_operands (tree dest
, tree src
)
2391 voptype_p src_vuses
, dest_vuses
;
2392 voptype_p src_vdefs
, dest_vdefs
;
2393 struct voptype_d vuse
;
2394 struct voptype_d vdef
;
2395 stmt_ann_t dest_ann
;
2397 VDEF_OPS (dest
) = NULL
;
2398 VUSE_OPS (dest
) = NULL
;
2400 dest_ann
= get_stmt_ann (dest
);
2401 BITMAP_FREE (dest_ann
->operands
.loads
);
2402 BITMAP_FREE (dest_ann
->operands
.stores
);
2404 if (LOADED_SYMS (src
))
2406 dest_ann
->operands
.loads
= BITMAP_ALLOC (NULL
);
2407 bitmap_copy (dest_ann
->operands
.loads
, LOADED_SYMS (src
));
2410 if (STORED_SYMS (src
))
2412 dest_ann
->operands
.stores
= BITMAP_ALLOC (NULL
);
2413 bitmap_copy (dest_ann
->operands
.stores
, STORED_SYMS (src
));
2416 /* Copy all the VUSE operators and corresponding operands. */
2418 for (src_vuses
= VUSE_OPS (src
); src_vuses
; src_vuses
= src_vuses
->next
)
2420 n
= VUSE_NUM (src_vuses
);
2421 dest_vuses
= add_vuse_op (dest
, NULL_TREE
, n
, dest_vuses
);
2422 for (i
= 0; i
< n
; i
++)
2423 SET_USE (VUSE_OP_PTR (dest_vuses
, i
), VUSE_OP (src_vuses
, i
));
2425 if (VUSE_OPS (dest
) == NULL
)
2426 VUSE_OPS (dest
) = vuse
.next
;
2429 /* Copy all the VDEF operators and corresponding operands. */
2431 for (src_vdefs
= VDEF_OPS (src
); src_vdefs
; src_vdefs
= src_vdefs
->next
)
2433 n
= VUSE_NUM (src_vdefs
);
2434 dest_vdefs
= add_vdef_op (dest
, NULL_TREE
, n
, dest_vdefs
);
2435 VDEF_RESULT (dest_vdefs
) = VDEF_RESULT (src_vdefs
);
2436 for (i
= 0; i
< n
; i
++)
2437 SET_USE (VUSE_OP_PTR (dest_vdefs
, i
), VUSE_OP (src_vdefs
, i
));
2439 if (VDEF_OPS (dest
) == NULL
)
2440 VDEF_OPS (dest
) = vdef
.next
;
2445 /* Specifically for use in DOM's expression analysis. Given a store, we
2446 create an artificial stmt which looks like a load from the store, this can
2447 be used to eliminate redundant loads. OLD_OPS are the operands from the
2448 store stmt, and NEW_STMT is the new load which represents a load of the
2452 create_ssa_artificial_load_stmt (tree new_stmt
, tree old_stmt
)
2456 use_operand_p use_p
;
2459 get_stmt_ann (new_stmt
);
2461 /* Process NEW_STMT looking for operands. */
2462 start_ssa_stmt_operands ();
2463 parse_ssa_operands (new_stmt
);
2465 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, op
); i
++)
2466 if (TREE_CODE (op
) != SSA_NAME
)
2467 var_ann (op
)->in_vuse_list
= false;
2469 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, op
); i
++)
2470 if (TREE_CODE (op
) != SSA_NAME
)
2471 var_ann (op
)->in_vdef_list
= false;
2473 /* Remove any virtual operands that were found. */
2474 VEC_truncate (tree
, build_vdefs
, 0);
2475 VEC_truncate (tree
, build_vuses
, 0);
2477 /* For each VDEF on the original statement, we want to create a
2478 VUSE of the VDEF result operand on the new statement. */
2479 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
, SSA_OP_VDEF
)
2482 finalize_ssa_stmt_operands (new_stmt
);
2484 /* All uses in this fake stmt must not be in the immediate use lists. */
2485 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
2486 delink_imm_use (use_p
);
2490 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2491 to test the validity of the swap operation. */
2494 swap_tree_operands (tree stmt
, tree
*exp0
, tree
*exp1
)
2500 /* If the operand cache is active, attempt to preserve the relative
2501 positions of these two operands in their respective immediate use
2503 if (ssa_operands_active () && op0
!= op1
)
2505 use_optype_p use0
, use1
, ptr
;
2508 /* Find the 2 operands in the cache, if they are there. */
2509 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2510 if (USE_OP_PTR (ptr
)->use
== exp0
)
2516 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2517 if (USE_OP_PTR (ptr
)->use
== exp1
)
2523 /* If both uses don't have operand entries, there isn't much we can do
2524 at this point. Presumably we don't need to worry about it. */
2527 tree
*tmp
= USE_OP_PTR (use1
)->use
;
2528 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
2529 USE_OP_PTR (use0
)->use
= tmp
;
2533 /* Now swap the data. */
2539 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2540 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2541 a single variable whose address has been taken or any other valid
2542 GIMPLE memory reference (structure reference, array, etc). If the
2543 base address of REF is a decl that has sub-variables, also add all
2544 of its sub-variables. */
2547 add_to_addressable_set (tree ref
, bitmap
*addresses_taken
)
2552 gcc_assert (addresses_taken
);
2554 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2555 as the only thing we take the address of. If VAR is a structure,
2556 taking the address of a field means that the whole structure may
2557 be referenced using pointer arithmetic. See PR 21407 and the
2558 ensuing mailing list discussion. */
2559 var
= get_base_address (ref
);
2560 if (var
&& SSA_VAR_P (var
))
2562 if (*addresses_taken
== NULL
)
2563 *addresses_taken
= BITMAP_GGC_ALLOC ();
2565 if (var_can_have_subvars (var
)
2566 && (svars
= get_subvars_for_var (var
)))
2569 for (sv
= svars
; sv
; sv
= sv
->next
)
2571 bitmap_set_bit (*addresses_taken
, DECL_UID (sv
->var
));
2572 TREE_ADDRESSABLE (sv
->var
) = 1;
2577 bitmap_set_bit (*addresses_taken
, DECL_UID (var
));
2578 TREE_ADDRESSABLE (var
) = 1;
2584 /* Scan the immediate_use list for VAR making sure its linked properly.
2585 Return TRUE if there is a problem and emit an error message to F. */
2588 verify_imm_links (FILE *f
, tree var
)
2590 use_operand_p ptr
, prev
, list
;
2593 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
2595 list
= &(SSA_NAME_IMM_USE_NODE (var
));
2596 gcc_assert (list
->use
== NULL
);
2598 if (list
->prev
== NULL
)
2600 gcc_assert (list
->next
== NULL
);
2606 for (ptr
= list
->next
; ptr
!= list
; )
2608 if (prev
!= ptr
->prev
)
2611 if (ptr
->use
== NULL
)
2612 goto error
; /* 2 roots, or SAFE guard node. */
2613 else if (*(ptr
->use
) != var
)
2619 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2621 if (count
++ > 50000000)
2625 /* Verify list in the other direction. */
2627 for (ptr
= list
->prev
; ptr
!= list
; )
2629 if (prev
!= ptr
->next
)
2643 if (ptr
->stmt
&& stmt_modified_p (ptr
->stmt
))
2645 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->stmt
);
2646 print_generic_stmt (f
, ptr
->stmt
, TDF_SLIM
);
2648 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
2650 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
2656 /* Dump all the immediate uses to FILE. */
2659 dump_immediate_uses_for (FILE *file
, tree var
)
2661 imm_use_iterator iter
;
2662 use_operand_p use_p
;
2664 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2666 print_generic_expr (file
, var
, TDF_SLIM
);
2667 fprintf (file
, " : -->");
2668 if (has_zero_uses (var
))
2669 fprintf (file
, " no uses.\n");
2671 if (has_single_use (var
))
2672 fprintf (file
, " single use.\n");
2674 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2676 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2678 if (use_p
->stmt
== NULL
&& use_p
->use
== NULL
)
2679 fprintf (file
, "***end of stmt iterator marker***\n");
2681 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2682 print_generic_stmt (file
, USE_STMT (use_p
), TDF_VOPS
|TDF_MEMSYMS
);
2684 print_generic_stmt (file
, USE_STMT (use_p
), TDF_SLIM
);
2686 fprintf(file
, "\n");
2690 /* Dump all the immediate uses to FILE. */
2693 dump_immediate_uses (FILE *file
)
2698 fprintf (file
, "Immediate_uses: \n\n");
2699 for (x
= 1; x
< num_ssa_names
; x
++)
2704 dump_immediate_uses_for (file
, var
);
2709 /* Dump def-use edges on stderr. */
2712 debug_immediate_uses (void)
2714 dump_immediate_uses (stderr
);
2718 /* Dump def-use edges on stderr. */
2721 debug_immediate_uses_for (tree var
)
2723 dump_immediate_uses_for (stderr
, var
);
2727 /* Create a new change buffer for the statement pointed by STMT_P and
2728 push the buffer into SCB_STACK. Each change buffer
2729 records state information needed to determine what changed in the
2730 statement. Mainly, this keeps track of symbols that may need to be
2731 put into SSA form, SSA name replacements and other information
2732 needed to keep the SSA form up to date. */
2735 push_stmt_changes (tree
*stmt_p
)
2742 /* It makes no sense to keep track of PHI nodes. */
2743 if (TREE_CODE (stmt
) == PHI_NODE
)
2746 buf
= xmalloc (sizeof *buf
);
2747 memset (buf
, 0, sizeof *buf
);
2749 buf
->stmt_p
= stmt_p
;
2751 if (stmt_references_memory_p (stmt
))
2756 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2758 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2759 if (buf
->loads
== NULL
)
2760 buf
->loads
= BITMAP_ALLOC (NULL
);
2761 bitmap_set_bit (buf
->loads
, DECL_UID (sym
));
2764 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2766 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2767 if (buf
->stores
== NULL
)
2768 buf
->stores
= BITMAP_ALLOC (NULL
);
2769 bitmap_set_bit (buf
->stores
, DECL_UID (sym
));
2773 VEC_safe_push (scb_t
, heap
, scb_stack
, buf
);
2777 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2778 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2781 mark_difference_for_renaming (bitmap s1
, bitmap s2
)
2783 if (s1
== NULL
&& s2
== NULL
)
2786 if (s1
&& s2
== NULL
)
2787 mark_set_for_renaming (s1
);
2788 else if (s1
== NULL
&& s2
)
2789 mark_set_for_renaming (s2
);
2790 else if (!bitmap_equal_p (s1
, s2
))
2792 bitmap t1
= BITMAP_ALLOC (NULL
);
2793 bitmap t2
= BITMAP_ALLOC (NULL
);
2795 bitmap_and_compl (t1
, s1
, s2
);
2796 bitmap_and_compl (t2
, s2
, s1
);
2797 bitmap_ior_into (t1
, t2
);
2798 mark_set_for_renaming (t1
);
2806 /* Pop the top SCB from SCB_STACK and act on the differences between
2807 what was recorded by push_stmt_changes and the current state of
2811 pop_stmt_changes (tree
*stmt_p
)
2815 bitmap loads
, stores
;
2820 /* It makes no sense to keep track of PHI nodes. */
2821 if (TREE_CODE (stmt
) == PHI_NODE
)
2824 buf
= VEC_pop (scb_t
, scb_stack
);
2825 gcc_assert (stmt_p
== buf
->stmt_p
);
2827 /* Force an operand re-scan on the statement and mark any newly
2828 exposed variables. */
2831 /* Determine whether any memory symbols need to be renamed. If the
2832 sets of loads and stores are different after the statement is
2833 modified, then the affected symbols need to be renamed.
2835 Note that it may be possible for the statement to not reference
2836 memory anymore, but we still need to act on the differences in
2837 the sets of symbols. */
2838 loads
= stores
= NULL
;
2839 if (stmt_references_memory_p (stmt
))
2844 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2846 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2848 loads
= BITMAP_ALLOC (NULL
);
2849 bitmap_set_bit (loads
, DECL_UID (sym
));
2852 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2854 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2856 stores
= BITMAP_ALLOC (NULL
);
2857 bitmap_set_bit (stores
, DECL_UID (sym
));
2861 /* If LOADS is different from BUF->LOADS, the affected
2862 symbols need to be marked for renaming. */
2863 mark_difference_for_renaming (loads
, buf
->loads
);
2865 /* Similarly for STORES and BUF->STORES. */
2866 mark_difference_for_renaming (stores
, buf
->stores
);
2868 /* Mark all the naked GIMPLE register operands for renaming. */
2869 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
|SSA_OP_USE
)
2871 mark_sym_for_renaming (op
);
2873 /* FIXME, need to add more finalizers here. Cleanup EH info,
2874 recompute invariants for address expressions, add
2875 SSA replacement mappings, etc. For instance, given
2876 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2879 # SMT.4_20 = VDEF <SMT.4_16>
2882 So, the VDEF will disappear, but instead of marking SMT.4 for
2883 renaming it would be far more efficient to establish a
2884 replacement mapping that would replace every reference of
2885 SMT.4_20 with SMT.4_16. */
2887 /* Free memory used by the buffer. */
2888 BITMAP_FREE (buf
->loads
);
2889 BITMAP_FREE (buf
->stores
);
2890 BITMAP_FREE (loads
);
2891 BITMAP_FREE (stores
);
2897 /* Discard the topmost change buffer from SCB_STACK. This is useful
2898 when the caller realized that it did not actually modified the
2899 statement. It avoids the expensive operand re-scan. */
2902 discard_stmt_changes (tree
*stmt_p
)
2907 /* It makes no sense to keep track of PHI nodes. */
2909 if (TREE_CODE (stmt
) == PHI_NODE
)
2912 buf
= VEC_pop (scb_t
, scb_stack
);
2913 gcc_assert (stmt_p
== buf
->stmt_p
);
2915 /* Free memory used by the buffer. */
2916 BITMAP_FREE (buf
->loads
);
2917 BITMAP_FREE (buf
->stores
);
2923 /* Returns true if statement STMT may access memory. */
2926 stmt_references_memory_p (tree stmt
)
2928 if (!gimple_ssa_operands (cfun
)->ops_active
|| TREE_CODE (stmt
) == PHI_NODE
)
2931 return stmt_ann (stmt
)->references_memory
;
2935 /* Return the memory partition tag (MPT) associated with memory
2936 symbol SYM. From a correctness standpoint, memory partitions can
2937 be assigned in any arbitrary fashion as long as this rule is
2938 observed: Given two memory partitions MPT.i and MPT.j, they must
2939 not contain symbols in common.
2941 Memory partitions are used when putting the program into Memory-SSA
2942 form. In particular, in Memory-SSA PHI nodes are not computed for
2943 individual memory symbols. They are computed for memory
2944 partitions. This reduces the amount of PHI nodes in the SSA graph
2945 at the expense of precision (i.e., it makes unrelated stores affect
2948 However, it is possible to increase precision by changing this
2949 partitioning scheme. For instance, if the partitioning scheme is
2950 such that get_mpt_for is the identity function (that is,
2951 get_mpt_for (s) = s), this will result in ultimate precision at the
2952 expense of huge SSA webs.
2954 At the other extreme, a partitioning scheme that groups all the
2955 symbols in the same set results in minimal SSA webs and almost
2956 total loss of precision. */
2959 get_mpt_for (tree sym
)
2963 /* Don't create a new tag unnecessarily. */
2964 mpt
= memory_partition (sym
);
2965 if (mpt
== NULL_TREE
)
2967 mpt
= create_tag_raw (MEMORY_PARTITION_TAG
, TREE_TYPE (sym
), "MPT");
2968 TREE_ADDRESSABLE (mpt
) = 0;
2969 MTAG_GLOBAL (mpt
) = 1;
2970 add_referenced_var (mpt
);
2971 VEC_safe_push (tree
, heap
, gimple_ssa_operands (cfun
)->mpt_table
, mpt
);
2972 MPT_SYMBOLS (mpt
) = BITMAP_ALLOC (NULL
);
2973 set_memory_partition (sym
, mpt
);
2980 /* Dump memory partition information to FILE. */
2983 dump_memory_partitions (FILE *file
)
2986 unsigned long nsyms
;
2989 fprintf (file
, "\nMemory partitions\n\n");
2990 for (i
= 0, npart
= 0, nsyms
= 0;
2991 VEC_iterate (tree
, gimple_ssa_operands (cfun
)->mpt_table
, i
, mpt
);
2996 bitmap syms
= MPT_SYMBOLS (mpt
);
2997 unsigned long n
= bitmap_count_bits (syms
);
2999 fprintf (file
, "#%u: ", i
);
3000 print_generic_expr (file
, mpt
, 0);
3001 fprintf (file
, ": %lu elements: ", n
);
3002 dump_decl_set (file
, syms
);
3008 fprintf (file
, "\n%u memory partitions holding %lu symbols\n", npart
, nsyms
);
3012 /* Dump memory partition information to stderr. */
3015 debug_memory_partitions (void)
3017 dump_memory_partitions (stderr
);