1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars
;
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided
;
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided
;
95 /* Number of write-clobbers avoided because the variable can't escape to
97 unsigned int unescapable_clobbers_avoided
;
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers
;
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided
;
108 /* Flags to describe operand properties in helpers. */
110 /* By default, operands are loaded. */
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
123 #define opf_no_vops (1 << 1)
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
130 /* Array for building all the def operands. */
131 static VEC(tree
,heap
) *build_defs
;
133 /* Array for building all the use operands. */
134 static VEC(tree
,heap
) *build_uses
;
136 /* Set for building all the VDEF operands. */
137 static VEC(tree
,heap
) *build_vdefs
;
139 /* Set for building all the VUSE operands. */
140 static VEC(tree
,heap
) *build_vuses
;
142 /* Bitmap obstack for our datastructures that needs to survive across
143 compilations of multiple functions. */
144 static bitmap_obstack operands_bitmap_obstack
;
146 /* Set for building all the loaded symbols. */
147 static bitmap build_loads
;
149 /* Set for building all the stored symbols. */
150 static bitmap build_stores
;
152 static void get_expr_operands (tree
, tree
*, int);
154 /* Number of functions with initialized ssa_operands. */
155 static int n_initialized
= 0;
157 /* Statement change buffer. Data structure used to record state
158 information for statements. This is used to determine what needs
159 to be done in order to update the SSA web after a statement is
160 modified by a pass. If STMT is a statement that has just been
161 created, or needs to be folded via fold_stmt, or anything that
162 changes its physical structure then the pass should:
164 1- Call push_stmt_changes (&stmt) to record the current state of
165 STMT before any modifications are made.
167 2- Make all appropriate modifications to the statement.
169 3- Call pop_stmt_changes (&stmt) to find new symbols that
170 need to be put in SSA form, SSA name mappings for names that
171 have disappeared, recompute invariantness for address
172 expressions, cleanup EH information, etc.
174 If it is possible to determine that the statement was not modified,
175 instead of calling pop_stmt_changes it is quicker to call
176 discard_stmt_changes to avoid the expensive and unnecessary operand
177 re-scan and change comparison. */
181 /* Pointer to the statement being modified. */
184 /* If the statement references memory these are the sets of symbols
185 loaded and stored by the statement. */
190 typedef struct scb_d
*scb_t
;
192 DEF_VEC_ALLOC_P(scb_t
,heap
);
194 /* Stack of statement change buffers (SCB). Every call to
195 push_stmt_changes pushes a new buffer onto the stack. Calls to
196 pop_stmt_changes pop a buffer off of the stack and compute the set
197 of changes for the popped statement. */
198 static VEC(scb_t
,heap
) *scb_stack
;
200 /* Return the DECL_UID of the base variable of T. */
202 static inline unsigned
203 get_name_decl (tree t
)
205 if (TREE_CODE (t
) != SSA_NAME
)
208 return DECL_UID (SSA_NAME_VAR (t
));
212 /* Comparison function for qsort used in operand_build_sort_virtual. */
215 operand_build_cmp (const void *p
, const void *q
)
217 tree e1
= *((const tree
*)p
);
218 tree e2
= *((const tree
*)q
);
221 u1
= get_name_decl (e1
);
222 u2
= get_name_decl (e2
);
224 /* We want to sort in ascending order. They can never be equal. */
225 #ifdef ENABLE_CHECKING
226 gcc_assert (u1
!= u2
);
228 return (u1
> u2
? 1 : -1);
232 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
235 operand_build_sort_virtual (VEC(tree
,heap
) *list
)
237 int num
= VEC_length (tree
, list
);
244 if (get_name_decl (VEC_index (tree
, list
, 0))
245 > get_name_decl (VEC_index (tree
, list
, 1)))
247 /* Swap elements if in the wrong order. */
248 tree tmp
= VEC_index (tree
, list
, 0);
249 VEC_replace (tree
, list
, 0, VEC_index (tree
, list
, 1));
250 VEC_replace (tree
, list
, 1, tmp
);
255 /* There are 3 or more elements, call qsort. */
256 qsort (VEC_address (tree
, list
),
257 VEC_length (tree
, list
),
263 /* Return true if the SSA operands cache is active. */
266 ssa_operands_active (void)
268 return cfun
->gimple_df
&& gimple_ssa_operands (cfun
)->ops_active
;
272 /* VOPs are of variable sized, so the free list maps "free buckets" to the
285 Any VOPs larger than this are simply added to the largest bucket when they
289 /* Return the number of operands used in bucket BUCKET. */
292 vop_free_bucket_size (int bucket
)
294 #ifdef ENABLE_CHECKING
295 gcc_assert (bucket
>= 0 && bucket
< NUM_VOP_FREE_BUCKETS
);
299 return (bucket
- 13) * 8;
303 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
304 beyond the end of the bucket table, return -1. */
307 vop_free_bucket_index (int num
)
309 gcc_assert (num
> 0 && NUM_VOP_FREE_BUCKETS
> 16);
311 /* Sizes 1 through 16 use buckets 0-15. */
314 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
315 num
= 14 + (num
- 1) / 8;
316 if (num
>= NUM_VOP_FREE_BUCKETS
)
323 /* Initialize the VOP free buckets. */
326 init_vop_buckets (void)
330 for (x
= 0; x
< NUM_VOP_FREE_BUCKETS
; x
++)
331 gimple_ssa_operands (cfun
)->vop_free_buckets
[x
] = NULL
;
335 /* Add PTR to the appropriate VOP bucket. */
338 add_vop_to_freelist (voptype_p ptr
)
340 int bucket
= vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr
->usev
));
342 /* Too large, use the largest bucket so its not a complete throw away. */
344 bucket
= NUM_VOP_FREE_BUCKETS
- 1;
346 ptr
->next
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
347 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] = ptr
;
351 /* These are the sizes of the operand memory buffer which gets allocated each
352 time more operands space is required. The final value is the amount that is
353 allocated every time after that. */
355 #define OP_SIZE_INIT 0
357 #define OP_SIZE_2 110
358 #define OP_SIZE_3 511
360 /* Initialize the operand cache routines. */
363 init_ssa_operands (void)
365 if (!n_initialized
++)
367 build_defs
= VEC_alloc (tree
, heap
, 5);
368 build_uses
= VEC_alloc (tree
, heap
, 10);
369 build_vuses
= VEC_alloc (tree
, heap
, 25);
370 build_vdefs
= VEC_alloc (tree
, heap
, 25);
371 bitmap_obstack_initialize (&operands_bitmap_obstack
);
372 build_loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
373 build_stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
374 scb_stack
= VEC_alloc (scb_t
, heap
, 20);
377 gcc_assert (gimple_ssa_operands (cfun
)->operand_memory
== NULL
);
378 gcc_assert (gimple_ssa_operands (cfun
)->mpt_table
== NULL
);
379 gimple_ssa_operands (cfun
)->operand_memory_index
380 = gimple_ssa_operands (cfun
)->ssa_operand_mem_size
;
381 gimple_ssa_operands (cfun
)->ops_active
= true;
382 memset (&clobber_stats
, 0, sizeof (clobber_stats
));
384 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_INIT
;
388 /* Dispose of anything required by the operand routines. */
391 fini_ssa_operands (void)
393 struct ssa_operand_memory_d
*ptr
;
397 if (!--n_initialized
)
399 VEC_free (tree
, heap
, build_defs
);
400 VEC_free (tree
, heap
, build_uses
);
401 VEC_free (tree
, heap
, build_vdefs
);
402 VEC_free (tree
, heap
, build_vuses
);
403 BITMAP_FREE (build_loads
);
404 BITMAP_FREE (build_stores
);
406 /* The change buffer stack had better be empty. */
407 gcc_assert (VEC_length (scb_t
, scb_stack
) == 0);
408 VEC_free (scb_t
, heap
, scb_stack
);
412 gimple_ssa_operands (cfun
)->free_defs
= NULL
;
413 gimple_ssa_operands (cfun
)->free_uses
= NULL
;
415 while ((ptr
= gimple_ssa_operands (cfun
)->operand_memory
) != NULL
)
417 gimple_ssa_operands (cfun
)->operand_memory
418 = gimple_ssa_operands (cfun
)->operand_memory
->next
;
423 VEC_iterate (tree
, gimple_ssa_operands (cfun
)->mpt_table
, ix
, mpt
);
427 BITMAP_FREE (MPT_SYMBOLS (mpt
));
430 VEC_free (tree
, heap
, gimple_ssa_operands (cfun
)->mpt_table
);
432 gimple_ssa_operands (cfun
)->ops_active
= false;
435 bitmap_obstack_release (&operands_bitmap_obstack
);
436 if (dump_file
&& (dump_flags
& TDF_STATS
))
438 fprintf (dump_file
, "Original clobbered vars: %d\n",
439 clobber_stats
.clobbered_vars
);
440 fprintf (dump_file
, "Static write clobbers avoided: %d\n",
441 clobber_stats
.static_write_clobbers_avoided
);
442 fprintf (dump_file
, "Static read clobbers avoided: %d\n",
443 clobber_stats
.static_read_clobbers_avoided
);
444 fprintf (dump_file
, "Unescapable clobbers avoided: %d\n",
445 clobber_stats
.unescapable_clobbers_avoided
);
446 fprintf (dump_file
, "Original read-only clobbers: %d\n",
447 clobber_stats
.readonly_clobbers
);
448 fprintf (dump_file
, "Static read-only clobbers avoided: %d\n",
449 clobber_stats
.static_readonly_clobbers_avoided
);
454 /* Return memory for operands of SIZE chunks. */
457 ssa_operand_alloc (unsigned size
)
461 if (gimple_ssa_operands (cfun
)->operand_memory_index
+ size
462 >= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
464 struct ssa_operand_memory_d
*ptr
;
466 if (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
== OP_SIZE_INIT
)
467 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
468 = OP_SIZE_1
* sizeof (struct voptype_d
);
470 if (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
471 == OP_SIZE_1
* sizeof (struct voptype_d
))
472 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
473 = OP_SIZE_2
* sizeof (struct voptype_d
);
475 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
476 = OP_SIZE_3
* sizeof (struct voptype_d
);
478 /* Go right to the maximum size if the request is too large. */
479 if (size
> gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
480 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
481 = OP_SIZE_3
* sizeof (struct voptype_d
);
483 /* Fail if there is not enough space. If there are this many operands
484 required, first make sure there isn't a different problem causing this
485 many operands. If the decision is that this is OK, then we can
486 specially allocate a buffer just for this request. */
487 gcc_assert (size
<= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
);
489 ptr
= (struct ssa_operand_memory_d
*)
490 ggc_alloc (sizeof (struct ssa_operand_memory_d
)
491 + gimple_ssa_operands (cfun
)->ssa_operand_mem_size
- 1);
492 ptr
->next
= gimple_ssa_operands (cfun
)->operand_memory
;
493 gimple_ssa_operands (cfun
)->operand_memory
= ptr
;
494 gimple_ssa_operands (cfun
)->operand_memory_index
= 0;
496 ptr
= &(gimple_ssa_operands (cfun
)->operand_memory
497 ->mem
[gimple_ssa_operands (cfun
)->operand_memory_index
]);
498 gimple_ssa_operands (cfun
)->operand_memory_index
+= size
;
503 /* Allocate a DEF operand. */
505 static inline struct def_optype_d
*
508 struct def_optype_d
*ret
;
509 if (gimple_ssa_operands (cfun
)->free_defs
)
511 ret
= gimple_ssa_operands (cfun
)->free_defs
;
512 gimple_ssa_operands (cfun
)->free_defs
513 = gimple_ssa_operands (cfun
)->free_defs
->next
;
516 ret
= (struct def_optype_d
*)
517 ssa_operand_alloc (sizeof (struct def_optype_d
));
522 /* Allocate a USE operand. */
524 static inline struct use_optype_d
*
527 struct use_optype_d
*ret
;
528 if (gimple_ssa_operands (cfun
)->free_uses
)
530 ret
= gimple_ssa_operands (cfun
)->free_uses
;
531 gimple_ssa_operands (cfun
)->free_uses
532 = gimple_ssa_operands (cfun
)->free_uses
->next
;
535 ret
= (struct use_optype_d
*)
536 ssa_operand_alloc (sizeof (struct use_optype_d
));
541 /* Allocate a vop with NUM elements. */
543 static inline struct voptype_d
*
546 struct voptype_d
*ret
= NULL
;
549 int bucket
= vop_free_bucket_index (num
);
552 /* If there is a free operand, use it. */
553 if (gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] != NULL
)
555 ret
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
556 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] =
557 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
]->next
;
560 alloc_size
= vop_free_bucket_size(bucket
);
566 ret
= (struct voptype_d
*)ssa_operand_alloc (
567 sizeof (struct voptype_d
) + (alloc_size
- 1) * sizeof (vuse_element_t
));
569 VUSE_VECT_NUM_ELEM (ret
->usev
) = num
;
574 /* This routine makes sure that PTR is in an immediate use list, and makes
575 sure the stmt pointer is set to the current stmt. */
578 set_virtual_use_link (use_operand_p ptr
, tree stmt
)
580 /* fold_stmt may have changed the stmt pointers. */
581 if (ptr
->stmt
!= stmt
)
584 /* If this use isn't in a list, add it to the correct list. */
586 link_imm_use (ptr
, *(ptr
->use
));
590 /* Adds OP to the list of defs after LAST. */
592 static inline def_optype_p
593 add_def_op (tree
*op
, def_optype_p last
)
598 DEF_OP_PTR (new) = op
;
605 /* Adds OP to the list of uses of statement STMT after LAST. */
607 static inline use_optype_p
608 add_use_op (tree stmt
, tree
*op
, use_optype_p last
)
613 USE_OP_PTR (new)->use
= op
;
614 link_imm_use_stmt (USE_OP_PTR (new), *op
, stmt
);
621 /* Return a virtual op pointer with NUM elements which are all initialized to OP
622 and are linked into the immediate uses for STMT. The new vop is appended
625 static inline voptype_p
626 add_vop (tree stmt
, tree op
, int num
, voptype_p prev
)
631 new = alloc_vop (num
);
632 for (x
= 0; x
< num
; x
++)
634 VUSE_OP_PTR (new, x
)->prev
= NULL
;
635 SET_VUSE_OP (new, x
, op
);
636 VUSE_OP_PTR (new, x
)->use
= &new->usev
.uses
[x
].use_var
;
637 link_imm_use_stmt (VUSE_OP_PTR (new, x
), new->usev
.uses
[x
].use_var
, stmt
);
647 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
648 LAST to the new element. */
650 static inline voptype_p
651 add_vuse_op (tree stmt
, tree op
, int num
, voptype_p last
)
653 voptype_p
new = add_vop (stmt
, op
, num
, last
);
654 VDEF_RESULT (new) = NULL_TREE
;
659 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
660 LAST to the new element. */
662 static inline voptype_p
663 add_vdef_op (tree stmt
, tree op
, int num
, voptype_p last
)
665 voptype_p
new = add_vop (stmt
, op
, num
, last
);
666 VDEF_RESULT (new) = op
;
671 /* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
672 is the head of the operand list it belongs to. */
674 static inline struct voptype_d
*
675 realloc_vop (struct voptype_d
*ptr
, unsigned int num_elem
,
676 struct voptype_d
**root
)
680 struct voptype_d
*ret
, *tmp
;
682 if (VUSE_VECT_NUM_ELEM (ptr
->usev
) == num_elem
)
685 val
= VUSE_OP (ptr
, 0);
686 if (TREE_CODE (val
) == SSA_NAME
)
687 val
= SSA_NAME_VAR (val
);
689 stmt
= USE_STMT (VUSE_OP_PTR (ptr
, 0));
691 /* Delink all the existing uses. */
692 for (x
= 0; x
< VUSE_VECT_NUM_ELEM (ptr
->usev
); x
++)
694 use_operand_p use_p
= VUSE_OP_PTR (ptr
, x
);
695 delink_imm_use (use_p
);
698 /* If we want less space, simply use this one, and shrink the size. */
699 if (VUSE_VECT_NUM_ELEM (ptr
->usev
) > num_elem
)
701 VUSE_VECT_NUM_ELEM (ptr
->usev
) = num_elem
;
705 /* It is growing. Allocate a new one and replace the old one. */
706 ret
= add_vuse_op (stmt
, val
, num_elem
, ptr
);
708 /* Clear PTR and add its memory to the free list. */
709 lim
= VUSE_VECT_NUM_ELEM (ptr
->usev
);
711 sizeof (struct voptype_d
) + sizeof (vuse_element_t
) * (lim
- 1));
712 add_vop_to_freelist (ptr
);
714 /* Now simply remove the old one. */
722 tmp
!= NULL
&& tmp
->next
!= ptr
;
729 /* The pointer passed in isn't in STMT's VDEF lists. */
734 /* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
737 realloc_vdef (struct voptype_d
*ptr
, unsigned int num_elem
)
740 struct voptype_d
*ret
;
742 val
= VDEF_RESULT (ptr
);
743 stmt
= USE_STMT (VDEF_OP_PTR (ptr
, 0));
744 ret
= realloc_vop (ptr
, num_elem
, &(VDEF_OPS (stmt
)));
745 VDEF_RESULT (ret
) = val
;
750 /* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
753 realloc_vuse (struct voptype_d
*ptr
, unsigned int num_elem
)
756 struct voptype_d
*ret
;
758 stmt
= USE_STMT (VUSE_OP_PTR (ptr
, 0));
759 ret
= realloc_vop (ptr
, num_elem
, &(VUSE_OPS (stmt
)));
764 /* Takes elements from build_defs and turns them into def operands of STMT.
765 TODO -- Make build_defs VEC of tree *. */
768 finalize_ssa_defs (tree stmt
)
771 struct def_optype_d new_list
;
772 def_optype_p old_ops
, last
;
773 unsigned int num
= VEC_length (tree
, build_defs
);
775 /* There should only be a single real definition per assignment. */
776 gcc_assert ((stmt
&& TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
) || num
<= 1);
778 new_list
.next
= NULL
;
781 old_ops
= DEF_OPS (stmt
);
785 /* Check for the common case of 1 def that hasn't changed. */
786 if (old_ops
&& old_ops
->next
== NULL
&& num
== 1
787 && (tree
*) VEC_index (tree
, build_defs
, 0) == DEF_OP_PTR (old_ops
))
790 /* If there is anything in the old list, free it. */
793 old_ops
->next
= gimple_ssa_operands (cfun
)->free_defs
;
794 gimple_ssa_operands (cfun
)->free_defs
= old_ops
;
797 /* If there is anything remaining in the build_defs list, simply emit it. */
798 for ( ; new_i
< num
; new_i
++)
799 last
= add_def_op ((tree
*) VEC_index (tree
, build_defs
, new_i
), last
);
801 /* Now set the stmt's operands. */
802 DEF_OPS (stmt
) = new_list
.next
;
804 #ifdef ENABLE_CHECKING
808 for (ptr
= DEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
811 gcc_assert (x
== num
);
817 /* Takes elements from build_uses and turns them into use operands of STMT.
818 TODO -- Make build_uses VEC of tree *. */
821 finalize_ssa_uses (tree stmt
)
824 struct use_optype_d new_list
;
825 use_optype_p old_ops
, ptr
, last
;
827 #ifdef ENABLE_CHECKING
830 unsigned num
= VEC_length (tree
, build_uses
);
832 /* If the pointer to the operand is the statement itself, something is
833 wrong. It means that we are pointing to a local variable (the
834 initial call to update_stmt_operands does not pass a pointer to a
836 for (x
= 0; x
< num
; x
++)
837 gcc_assert (*((tree
*)VEC_index (tree
, build_uses
, x
)) != stmt
);
841 new_list
.next
= NULL
;
844 old_ops
= USE_OPS (stmt
);
846 /* If there is anything in the old list, free it. */
849 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
850 delink_imm_use (USE_OP_PTR (ptr
));
851 old_ops
->next
= gimple_ssa_operands (cfun
)->free_uses
;
852 gimple_ssa_operands (cfun
)->free_uses
= old_ops
;
855 /* Now create nodes for all the new nodes. */
856 for (new_i
= 0; new_i
< VEC_length (tree
, build_uses
); new_i
++)
857 last
= add_use_op (stmt
,
858 (tree
*) VEC_index (tree
, build_uses
, new_i
),
861 /* Now set the stmt's operands. */
862 USE_OPS (stmt
) = new_list
.next
;
864 #ifdef ENABLE_CHECKING
867 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
870 gcc_assert (x
== VEC_length (tree
, build_uses
));
876 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
877 STMT. FIXME, for now VDEF operators should have a single operand
881 finalize_ssa_vdefs (tree stmt
)
884 struct voptype_d new_list
;
885 voptype_p old_ops
, ptr
, last
;
886 stmt_ann_t ann
= stmt_ann (stmt
);
888 /* Set the symbols referenced by STMT. */
889 if (!bitmap_empty_p (build_stores
))
891 if (ann
->operands
.stores
== NULL
)
892 ann
->operands
.stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
894 bitmap_copy (ann
->operands
.stores
, build_stores
);
897 BITMAP_FREE (ann
->operands
.stores
);
899 /* If aliases have not been computed, do not instantiate a virtual
900 operator on STMT. Initially, we only compute the SSA form on
901 GIMPLE registers. The virtual SSA form is only computed after
902 alias analysis, so virtual operators will remain unrenamed and
903 the verifier will complain. However, alias analysis needs to
904 access symbol load/store information, so we need to compute
906 if (!gimple_aliases_computed_p (cfun
))
909 new_list
.next
= NULL
;
912 old_ops
= VDEF_OPS (stmt
);
914 while (old_ops
&& new_i
< VEC_length (tree
, build_vdefs
))
916 tree op
= VEC_index (tree
, build_vdefs
, new_i
);
917 unsigned new_uid
= get_name_decl (op
);
918 unsigned old_uid
= get_name_decl (VDEF_RESULT (old_ops
));
920 /* FIXME, for now each VDEF operator should have at most one
921 operand in their RHS. */
922 gcc_assert (VDEF_NUM (old_ops
) == 1);
924 if (old_uid
== new_uid
)
926 /* If the symbols are the same, reuse the existing operand. */
927 last
->next
= old_ops
;
929 old_ops
= old_ops
->next
;
931 set_virtual_use_link (VDEF_OP_PTR (last
, 0), stmt
);
934 else if (old_uid
< new_uid
)
936 /* If old is less than new, old goes to the free list. */
938 delink_imm_use (VDEF_OP_PTR (old_ops
, 0));
939 next
= old_ops
->next
;
940 add_vop_to_freelist (old_ops
);
945 /* This is a new operand. */
946 last
= add_vdef_op (stmt
, op
, 1, last
);
951 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
952 for ( ; new_i
< VEC_length (tree
, build_vdefs
); new_i
++)
953 last
= add_vdef_op (stmt
, VEC_index (tree
, build_vdefs
, new_i
), 1, last
);
955 /* If there is anything in the old list, free it. */
958 for (ptr
= old_ops
; ptr
; ptr
= last
)
961 delink_imm_use (VDEF_OP_PTR (ptr
, 0));
962 add_vop_to_freelist (ptr
);
966 /* Now set STMT's operands. */
967 VDEF_OPS (stmt
) = new_list
.next
;
969 #ifdef ENABLE_CHECKING
972 for (ptr
= VDEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
975 gcc_assert (x
== VEC_length (tree
, build_vdefs
));
981 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
985 finalize_ssa_vuse_ops (tree stmt
)
987 unsigned new_i
, old_i
;
988 voptype_p old_ops
, last
;
989 VEC(tree
,heap
) *new_ops
;
992 /* Set the symbols referenced by STMT. */
993 ann
= stmt_ann (stmt
);
994 if (!bitmap_empty_p (build_loads
))
996 if (ann
->operands
.loads
== NULL
)
997 ann
->operands
.loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
999 bitmap_copy (ann
->operands
.loads
, build_loads
);
1002 BITMAP_FREE (ann
->operands
.loads
);
1004 /* If aliases have not been computed, do not instantiate a virtual
1005 operator on STMT. Initially, we only compute the SSA form on
1006 GIMPLE registers. The virtual SSA form is only computed after
1007 alias analysis, so virtual operators will remain unrenamed and
1008 the verifier will complain. However, alias analysis needs to
1009 access symbol load/store information, so we need to compute
1011 if (!gimple_aliases_computed_p (cfun
))
1014 /* STMT should have at most one VUSE operator. */
1015 old_ops
= VUSE_OPS (stmt
);
1016 gcc_assert (old_ops
== NULL
|| old_ops
->next
== NULL
);
1021 && old_i
< VUSE_NUM (old_ops
)
1022 && new_i
< VEC_length (tree
, build_vuses
))
1024 tree new_op
= VEC_index (tree
, build_vuses
, new_i
);
1025 tree old_op
= VUSE_OP (old_ops
, old_i
);
1026 unsigned new_uid
= get_name_decl (new_op
);
1027 unsigned old_uid
= get_name_decl (old_op
);
1029 if (old_uid
== new_uid
)
1031 /* If the symbols are the same, reuse the existing operand. */
1032 VEC_safe_push (tree
, heap
, new_ops
, old_op
);
1036 else if (old_uid
< new_uid
)
1038 /* If OLD_UID is less than NEW_UID, the old operand has
1039 disappeared, skip to the next old operand. */
1044 /* This is a new operand. */
1045 VEC_safe_push (tree
, heap
, new_ops
, new_op
);
1050 /* If there is anything remaining in the build_vuses list, simply emit it. */
1051 for ( ; new_i
< VEC_length (tree
, build_vuses
); new_i
++)
1052 VEC_safe_push (tree
, heap
, new_ops
, VEC_index (tree
, build_vuses
, new_i
));
1054 /* If there is anything in the old list, free it. */
1057 for (old_i
= 0; old_i
< VUSE_NUM (old_ops
); old_i
++)
1058 delink_imm_use (VUSE_OP_PTR (old_ops
, old_i
));
1059 add_vop_to_freelist (old_ops
);
1060 VUSE_OPS (stmt
) = NULL
;
1063 /* If there are any operands, instantiate a VUSE operator for STMT. */
1069 last
= add_vuse_op (stmt
, NULL
, VEC_length (tree
, new_ops
), NULL
);
1071 for (i
= 0; VEC_iterate (tree
, new_ops
, i
, op
); i
++)
1072 SET_USE (VUSE_OP_PTR (last
, (int) i
), op
);
1074 VUSE_OPS (stmt
) = last
;
1075 VEC_free (tree
, heap
, new_ops
);
1078 #ifdef ENABLE_CHECKING
1082 if (VUSE_OPS (stmt
))
1084 gcc_assert (VUSE_OPS (stmt
)->next
== NULL
);
1085 x
= VUSE_NUM (VUSE_OPS (stmt
));
1090 gcc_assert (x
== VEC_length (tree
, build_vuses
));
1095 /* Return a new VUSE operand vector for STMT. */
1098 finalize_ssa_vuses (tree stmt
)
1100 unsigned num
, num_vdefs
;
1101 unsigned vuse_index
;
1103 /* Remove superfluous VUSE operands. If the statement already has a
1104 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1105 needed because VDEFs imply a VUSE of the variable. For instance,
1106 suppose that variable 'a' is pointed-to by p and q:
1112 The VUSE <a_2> is superfluous because it is implied by the
1114 num
= VEC_length (tree
, build_vuses
);
1115 num_vdefs
= VEC_length (tree
, build_vdefs
);
1117 if (num
> 0 && num_vdefs
> 0)
1118 for (vuse_index
= 0; vuse_index
< VEC_length (tree
, build_vuses
); )
1121 vuse
= VEC_index (tree
, build_vuses
, vuse_index
);
1122 if (TREE_CODE (vuse
) != SSA_NAME
)
1124 var_ann_t ann
= var_ann (vuse
);
1125 ann
->in_vuse_list
= 0;
1126 if (ann
->in_vdef_list
)
1128 VEC_ordered_remove (tree
, build_vuses
, vuse_index
);
1135 finalize_ssa_vuse_ops (stmt
);
1139 /* Clear the in_list bits and empty the build array for VDEFs and
1143 cleanup_build_arrays (void)
1148 for (i
= 0; VEC_iterate (tree
, build_vdefs
, i
, t
); i
++)
1149 if (TREE_CODE (t
) != SSA_NAME
)
1150 var_ann (t
)->in_vdef_list
= false;
1152 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, t
); i
++)
1153 if (TREE_CODE (t
) != SSA_NAME
)
1154 var_ann (t
)->in_vuse_list
= false;
1156 VEC_truncate (tree
, build_vdefs
, 0);
1157 VEC_truncate (tree
, build_vuses
, 0);
1158 VEC_truncate (tree
, build_defs
, 0);
1159 VEC_truncate (tree
, build_uses
, 0);
1160 bitmap_clear (build_loads
);
1161 bitmap_clear (build_stores
);
1165 /* Finalize all the build vectors, fill the new ones into INFO. */
1168 finalize_ssa_stmt_operands (tree stmt
)
1170 finalize_ssa_defs (stmt
);
1171 finalize_ssa_uses (stmt
);
1172 finalize_ssa_vdefs (stmt
);
1173 finalize_ssa_vuses (stmt
);
1174 cleanup_build_arrays ();
1178 /* Start the process of building up operands vectors in INFO. */
1181 start_ssa_stmt_operands (void)
1183 gcc_assert (VEC_length (tree
, build_defs
) == 0);
1184 gcc_assert (VEC_length (tree
, build_uses
) == 0);
1185 gcc_assert (VEC_length (tree
, build_vuses
) == 0);
1186 gcc_assert (VEC_length (tree
, build_vdefs
) == 0);
1187 gcc_assert (bitmap_empty_p (build_loads
));
1188 gcc_assert (bitmap_empty_p (build_stores
));
1192 /* Add DEF_P to the list of pointers to operands. */
1195 append_def (tree
*def_p
)
1197 VEC_safe_push (tree
, heap
, build_defs
, (tree
) def_p
);
1201 /* Add USE_P to the list of pointers to operands. */
1204 append_use (tree
*use_p
)
1206 VEC_safe_push (tree
, heap
, build_uses
, (tree
) use_p
);
1210 /* Add VAR to the set of variables that require a VDEF operator. */
1213 append_vdef (tree var
)
1217 if (TREE_CODE (var
) != SSA_NAME
)
1222 /* If VAR belongs to a memory partition, use it instead of VAR. */
1223 mpt
= memory_partition (var
);
1227 /* Don't allow duplicate entries. */
1228 ann
= get_var_ann (var
);
1229 if (ann
->in_vdef_list
)
1232 ann
->in_vdef_list
= true;
1236 sym
= SSA_NAME_VAR (var
);
1238 VEC_safe_push (tree
, heap
, build_vdefs
, var
);
1239 bitmap_set_bit (build_stores
, DECL_UID (sym
));
1243 /* Add VAR to the set of variables that require a VUSE operator. */
1246 append_vuse (tree var
)
1250 if (TREE_CODE (var
) != SSA_NAME
)
1255 /* If VAR belongs to a memory partition, use it instead of VAR. */
1256 mpt
= memory_partition (var
);
1260 /* Don't allow duplicate entries. */
1261 ann
= get_var_ann (var
);
1262 if (ann
->in_vuse_list
|| ann
->in_vdef_list
)
1265 ann
->in_vuse_list
= true;
1269 sym
= SSA_NAME_VAR (var
);
1271 VEC_safe_push (tree
, heap
, build_vuses
, var
);
1272 bitmap_set_bit (build_loads
, DECL_UID (sym
));
1276 /* REF is a tree that contains the entire pointer dereference
1277 expression, if available, or NULL otherwise. ALIAS is the variable
1278 we are asking if REF can access. OFFSET and SIZE come from the
1279 memory access expression that generated this virtual operand. */
1282 access_can_touch_variable (tree ref
, tree alias
, HOST_WIDE_INT offset
,
1285 bool offsetgtz
= offset
> 0;
1286 unsigned HOST_WIDE_INT uoffset
= (unsigned HOST_WIDE_INT
) offset
;
1287 tree base
= ref
? get_base_address (ref
) : NULL
;
1289 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1290 using a call-clobbered memory tag. By definition, call-clobbered
1291 memory tags can always touch .GLOBAL_VAR. */
1292 if (alias
== gimple_global_var (cfun
))
1295 /* If ALIAS is an SFT, it can't be touched if the offset
1296 and size of the access is not overlapping with the SFT offset and
1297 size. This is only true if we are accessing through a pointer
1298 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1299 be accessing through a pointer to some substruct of the
1300 structure, and if we try to prune there, we will have the wrong
1301 offset, and get the wrong answer.
1302 i.e., we can't prune without more work if we have something like
1308 const char *byte_op;
1316 foo = &targetm.asm_out.aligned_op;
1319 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1320 terms of SFT_PARENT_VAR, that is where it is.
1321 However, the access through the foo pointer will be at offset 0. */
1323 && TREE_CODE (alias
) == STRUCT_FIELD_TAG
1325 && TREE_TYPE (base
) == TREE_TYPE (SFT_PARENT_VAR (alias
))
1326 && !overlap_subvar (offset
, size
, alias
, NULL
))
1328 #ifdef ACCESS_DEBUGGING
1329 fprintf (stderr
, "Access to ");
1330 print_generic_expr (stderr
, ref
, 0);
1331 fprintf (stderr
, " may not touch ");
1332 print_generic_expr (stderr
, alias
, 0);
1333 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1338 /* Without strict aliasing, it is impossible for a component access
1339 through a pointer to touch a random variable, unless that
1340 variable *is* a structure or a pointer.
1342 That is, given p->c, and some random global variable b,
1343 there is no legal way that p->c could be an access to b.
1345 Without strict aliasing on, we consider it legal to do something
1348 struct foos { int l; };
1350 static struct foos *getfoo(void);
1353 struct foos *f = getfoo();
1360 static struct foos *getfoo(void)
1361 { return (struct foos *)&foo; }
1363 (taken from 20000623-1.c)
1365 The docs also say/imply that access through union pointers
1366 is legal (but *not* if you take the address of the union member,
1367 i.e. the inverse), such that you can do
1377 U *pretmp = (U*)&rv;
1381 To implement this, we just punt on accesses through union
1385 && flag_strict_aliasing
1386 && TREE_CODE (ref
) != INDIRECT_REF
1388 && (TREE_CODE (base
) != INDIRECT_REF
1389 || TREE_CODE (TREE_TYPE (base
)) != UNION_TYPE
)
1390 && !AGGREGATE_TYPE_P (TREE_TYPE (alias
))
1391 && TREE_CODE (TREE_TYPE (alias
)) != COMPLEX_TYPE
1392 && !var_ann (alias
)->is_heapvar
1393 /* When the struct has may_alias attached to it, we need not to
1395 && get_alias_set (base
))
1397 #ifdef ACCESS_DEBUGGING
1398 fprintf (stderr
, "Access to ");
1399 print_generic_expr (stderr
, ref
, 0);
1400 fprintf (stderr
, " may not touch ");
1401 print_generic_expr (stderr
, alias
, 0);
1402 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1407 /* If the offset of the access is greater than the size of one of
1408 the possible aliases, it can't be touching that alias, because it
1409 would be past the end of the structure. */
1411 && flag_strict_aliasing
1412 && TREE_CODE (ref
) != INDIRECT_REF
1414 && !POINTER_TYPE_P (TREE_TYPE (alias
))
1416 && DECL_SIZE (alias
)
1417 && TREE_CODE (DECL_SIZE (alias
)) == INTEGER_CST
1418 && uoffset
> TREE_INT_CST_LOW (DECL_SIZE (alias
)))
1420 #ifdef ACCESS_DEBUGGING
1421 fprintf (stderr
, "Access to ");
1422 print_generic_expr (stderr
, ref
, 0);
1423 fprintf (stderr
, " may not touch ");
1424 print_generic_expr (stderr
, alias
, 0);
1425 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1434 /* Add VAR to the virtual operands array. FLAGS is as in
1435 get_expr_operands. FULL_REF is a tree that contains the entire
1436 pointer dereference expression, if available, or NULL otherwise.
1437 OFFSET and SIZE come from the memory access expression that
1438 generated this virtual operand. IS_CALL_SITE is true if the
1439 affected statement is a call site. */
1442 add_virtual_operand (tree var
, stmt_ann_t s_ann
, int flags
,
1443 tree full_ref
, HOST_WIDE_INT offset
,
1444 HOST_WIDE_INT size
, bool is_call_site
)
1446 bitmap aliases
= NULL
;
1450 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1451 v_ann
= var_ann (sym
);
1453 /* Mark the statement as having memory operands. */
1454 s_ann
->references_memory
= true;
1456 /* Mark statements with volatile operands. Optimizers should back
1457 off from statements having volatile operands. */
1458 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1459 s_ann
->has_volatile_ops
= true;
1461 /* If the variable cannot be modified and this is a VDEF change
1462 it into a VUSE. This happens when read-only variables are marked
1463 call-clobbered and/or aliased to writable variables. So we only
1464 check that this only happens on non-specific stores.
1466 Note that if this is a specific store, i.e. associated with a
1467 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
1468 into validation problems.
1470 This can happen when programs cast away const, leaving us with a
1471 store to read-only memory. If the statement is actually executed
1472 at runtime, then the program is ill formed. If the statement is
1473 not executed then all is well. At the very least, we cannot ICE. */
1474 if ((flags
& opf_implicit
) && unmodifiable_var_p (var
))
1477 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1478 virtual operands, unless the caller has specifically requested
1479 not to add virtual operands (used when adding operands inside an
1480 ADDR_EXPR expression). */
1481 if (flags
& opf_no_vops
)
1485 aliases
= MTAG_ALIASES (var
);
1487 if (aliases
== NULL
)
1489 if (s_ann
&& !gimple_aliases_computed_p (cfun
))
1490 s_ann
->has_volatile_ops
= true;
1492 /* The variable is not aliased or it is an alias tag. */
1493 if (flags
& opf_def
)
1504 /* The variable is aliased. Add its aliases to the virtual
1506 gcc_assert (!bitmap_empty_p (aliases
));
1508 if (flags
& opf_def
)
1510 bool none_added
= true;
1511 EXECUTE_IF_SET_IN_BITMAP (aliases
, 0, i
, bi
)
1513 al
= referenced_var (i
);
1514 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1517 /* Call-clobbered tags may have non-call-clobbered
1518 symbols in their alias sets. Ignore them if we are
1519 adding VOPs for a call site. */
1520 if (is_call_site
&& !is_call_clobbered (al
))
1527 /* If the variable is also an alias tag, add a virtual
1528 operand for it, otherwise we will miss representing
1529 references to the members of the variable's alias set.
1530 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1532 It is also necessary to add bare defs on clobbers for
1533 SMT's, so that bare SMT uses caused by pruning all the
1534 aliases will link up properly with calls. In order to
1535 keep the number of these bare defs we add down to the
1536 minimum necessary, we keep track of which SMT's were used
1537 alone in statement vdefs or VUSEs. */
1539 || (TREE_CODE (var
) == SYMBOL_MEMORY_TAG
1547 bool none_added
= true;
1548 EXECUTE_IF_SET_IN_BITMAP (aliases
, 0, i
, bi
)
1550 al
= referenced_var (i
);
1551 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1554 /* Call-clobbered tags may have non-call-clobbered
1555 symbols in their alias sets. Ignore them if we are
1556 adding VOPs for a call site. */
1557 if (is_call_site
&& !is_call_clobbered (al
))
1564 /* Even if no aliases have been added, we still need to
1565 establish def-use and use-def chains, lest
1566 transformations think that this is not a memory
1567 reference. For an example of this scenario, see
1568 testsuite/g++.dg/opt/cleanup1.C. */
1576 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1577 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1578 the statement's real operands, otherwise it is added to virtual
1582 add_stmt_operand (tree
*var_p
, stmt_ann_t s_ann
, int flags
)
1587 gcc_assert (SSA_VAR_P (*var_p
) && s_ann
);
1590 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1591 v_ann
= var_ann (sym
);
1593 /* Mark statements with volatile operands. */
1594 if (TREE_THIS_VOLATILE (sym
))
1595 s_ann
->has_volatile_ops
= true;
1597 if (is_gimple_reg (sym
))
1599 /* The variable is a GIMPLE register. Add it to real operands. */
1600 if (flags
& opf_def
)
1606 add_virtual_operand (var
, s_ann
, flags
, NULL_TREE
, 0, -1, false);
1610 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1611 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1613 STMT is the statement being processed, EXPR is the INDIRECT_REF
1616 FLAGS is as in get_expr_operands.
1618 FULL_REF contains the full pointer dereference expression, if we
1619 have it, or NULL otherwise.
1621 OFFSET and SIZE are the location of the access inside the
1622 dereferenced pointer, if known.
1624 RECURSE_ON_BASE should be set to true if we want to continue
1625 calling get_expr_operands on the base pointer, and false if
1626 something else will do it for us. */
1629 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
,
1631 HOST_WIDE_INT offset
, HOST_WIDE_INT size
,
1632 bool recurse_on_base
)
1634 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1636 stmt_ann_t s_ann
= stmt_ann (stmt
);
1638 s_ann
->references_memory
= true;
1639 if (s_ann
&& TREE_THIS_VOLATILE (expr
))
1640 s_ann
->has_volatile_ops
= true;
1642 if (SSA_VAR_P (ptr
))
1644 struct ptr_info_def
*pi
= NULL
;
1646 /* If PTR has flow-sensitive points-to information, use it. */
1647 if (TREE_CODE (ptr
) == SSA_NAME
1648 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1649 && pi
->name_mem_tag
)
1651 /* PTR has its own memory tag. Use it. */
1652 add_virtual_operand (pi
->name_mem_tag
, s_ann
, flags
,
1653 full_ref
, offset
, size
, false);
1657 /* If PTR is not an SSA_NAME or it doesn't have a name
1658 tag, use its symbol memory tag. */
1661 /* If we are emitting debugging dumps, display a warning if
1662 PTR is an SSA_NAME with no flow-sensitive alias
1663 information. That means that we may need to compute
1666 && TREE_CODE (ptr
) == SSA_NAME
1670 "NOTE: no flow-sensitive alias info for ");
1671 print_generic_expr (dump_file
, ptr
, dump_flags
);
1672 fprintf (dump_file
, " in ");
1673 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1676 if (TREE_CODE (ptr
) == SSA_NAME
)
1677 ptr
= SSA_NAME_VAR (ptr
);
1678 v_ann
= var_ann (ptr
);
1680 if (v_ann
->symbol_mem_tag
)
1681 add_virtual_operand (v_ann
->symbol_mem_tag
, s_ann
, flags
,
1682 full_ref
, offset
, size
, false);
1684 /* Aliasing information is missing; mark statement as
1685 volatile so we won't optimize it out too actively. */
1687 && !gimple_aliases_computed_p (cfun
)
1688 && (flags
& opf_def
))
1689 s_ann
->has_volatile_ops
= true;
1692 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1694 /* If a constant is used as a pointer, we can't generate a real
1695 operand for it but we mark the statement volatile to prevent
1696 optimizations from messing things up. */
1698 s_ann
->has_volatile_ops
= true;
1703 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1707 /* If requested, add a USE operand for the base pointer. */
1708 if (recurse_on_base
)
1709 get_expr_operands (stmt
, pptr
, opf_use
);
1713 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1716 get_tmr_operands (tree stmt
, tree expr
, int flags
)
1719 HOST_WIDE_INT offset
, size
, maxsize
;
1721 stmt_ann_t s_ann
= stmt_ann (stmt
);
1723 /* This statement references memory. */
1724 s_ann
->references_memory
= 1;
1726 /* First record the real operands. */
1727 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_use
);
1728 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_use
);
1730 if (TMR_SYMBOL (expr
))
1731 add_to_addressable_set (TMR_SYMBOL (expr
), &s_ann
->addresses_taken
);
1733 tag
= TMR_TAG (expr
);
1736 /* Something weird, so ensure that we will be careful. */
1737 s_ann
->has_volatile_ops
= true;
1743 get_expr_operands (stmt
, &tag
, flags
);
1747 ref
= get_ref_base_and_extent (tag
, &offset
, &size
, &maxsize
);
1748 gcc_assert (ref
!= NULL_TREE
);
1749 svars
= get_subvars_for_var (ref
);
1750 for (sv
= svars
; sv
; sv
= sv
->next
)
1754 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
1755 add_stmt_operand (&sv
->var
, s_ann
, flags
);
1760 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1761 clobbered variables in the function. */
1764 add_call_clobber_ops (tree stmt
, tree callee
)
1768 stmt_ann_t s_ann
= stmt_ann (stmt
);
1769 bitmap not_read_b
, not_written_b
;
1771 /* Functions that are not const, pure or never return may clobber
1772 call-clobbered variables. */
1774 s_ann
->makes_clobbering_call
= true;
1776 /* If we created .GLOBAL_VAR earlier, just use it. */
1777 if (gimple_global_var (cfun
))
1779 tree var
= gimple_global_var (cfun
);
1780 add_virtual_operand (var
, s_ann
, opf_def
, NULL
, 0, -1, true);
1784 /* Get info for local and module level statics. There is a bit
1785 set for each static if the call being processed does not read
1786 or write that variable. */
1787 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1788 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1790 /* Add a VDEF operand for every call clobbered variable. */
1791 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1793 tree var
= referenced_var_lookup (u
);
1794 unsigned int escape_mask
= var_ann (var
)->escape_mask
;
1795 tree real_var
= var
;
1799 /* Not read and not written are computed on regular vars, not
1800 subvars, so look at the parent var if this is an SFT. */
1801 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1802 real_var
= SFT_PARENT_VAR (var
);
1804 not_read
= not_read_b
1805 ? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1808 not_written
= not_written_b
1809 ? bitmap_bit_p (not_written_b
, DECL_UID (real_var
))
1811 gcc_assert (!unmodifiable_var_p (var
));
1813 clobber_stats
.clobbered_vars
++;
1815 /* See if this variable is really clobbered by this function. */
1817 /* Trivial case: Things escaping only to pure/const are not
1818 clobbered by non-pure-const, and only read by pure/const. */
1819 if ((escape_mask
& ~(ESCAPE_TO_PURE_CONST
)) == 0)
1821 tree call
= get_call_expr_in (stmt
);
1822 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1824 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1825 clobber_stats
.unescapable_clobbers_avoided
++;
1830 clobber_stats
.unescapable_clobbers_avoided
++;
1837 clobber_stats
.static_write_clobbers_avoided
++;
1839 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1841 clobber_stats
.static_read_clobbers_avoided
++;
1844 add_virtual_operand (var
, s_ann
, opf_def
, NULL
, 0, -1, true);
1849 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1853 add_call_read_ops (tree stmt
, tree callee
)
1857 stmt_ann_t s_ann
= stmt_ann (stmt
);
1860 /* if the function is not pure, it may reference memory. Add
1861 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1862 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1863 if (gimple_global_var (cfun
))
1865 tree var
= gimple_global_var (cfun
);
1866 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1870 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1872 /* Add a VUSE for each call-clobbered variable. */
1873 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1875 tree var
= referenced_var (u
);
1876 tree real_var
= var
;
1879 clobber_stats
.readonly_clobbers
++;
1881 /* Not read and not written are computed on regular vars, not
1882 subvars, so look at the parent var if this is an SFT. */
1884 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1885 real_var
= SFT_PARENT_VAR (var
);
1887 not_read
= not_read_b
? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1892 clobber_stats
.static_readonly_clobbers_avoided
++;
1896 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1901 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1904 get_call_expr_operands (tree stmt
, tree expr
)
1906 int call_flags
= call_expr_flags (expr
);
1908 stmt_ann_t ann
= stmt_ann (stmt
);
1910 ann
->references_memory
= true;
1912 /* If aliases have been computed already, add VDEF or VUSE
1913 operands for all the symbols that have been found to be
1915 if (gimple_aliases_computed_p (cfun
)
1916 && !(call_flags
& ECF_NOVOPS
))
1918 /* A 'pure' or a 'const' function never call-clobbers anything.
1919 A 'noreturn' function might, but since we don't return anyway
1920 there is no point in recording that. */
1921 if (TREE_SIDE_EFFECTS (expr
)
1922 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1923 add_call_clobber_ops (stmt
, get_callee_fndecl (expr
));
1924 else if (!(call_flags
& ECF_CONST
))
1925 add_call_read_ops (stmt
, get_callee_fndecl (expr
));
1928 /* Find uses in the called function. */
1929 get_expr_operands (stmt
, &CALL_EXPR_FN (expr
), opf_use
);
1930 nargs
= call_expr_nargs (expr
);
1931 for (i
= 0; i
< nargs
; i
++)
1932 get_expr_operands (stmt
, &CALL_EXPR_ARG (expr
, i
), opf_use
);
1934 get_expr_operands (stmt
, &CALL_EXPR_STATIC_CHAIN (expr
), opf_use
);
1938 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1941 get_asm_expr_operands (tree stmt
)
1945 const char **oconstraints
;
1946 const char *constraint
;
1947 bool allows_mem
, allows_reg
, is_inout
;
1950 s_ann
= stmt_ann (stmt
);
1951 noutputs
= list_length (ASM_OUTPUTS (stmt
));
1952 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
1954 /* Gather all output operands. */
1955 for (i
= 0, link
= ASM_OUTPUTS (stmt
); link
; i
++, link
= TREE_CHAIN (link
))
1957 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1958 oconstraints
[i
] = constraint
;
1959 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
1960 &allows_reg
, &is_inout
);
1962 /* This should have been split in gimplify_asm_expr. */
1963 gcc_assert (!allows_reg
|| !is_inout
);
1965 /* Memory operands are addressable. Note that STMT needs the
1966 address of this operand. */
1967 if (!allows_reg
&& allows_mem
)
1969 tree t
= get_base_address (TREE_VALUE (link
));
1970 if (t
&& DECL_P (t
) && s_ann
)
1971 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1974 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_def
);
1977 /* Gather all input operands. */
1978 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1980 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1981 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
1982 &allows_mem
, &allows_reg
);
1984 /* Memory operands are addressable. Note that STMT needs the
1985 address of this operand. */
1986 if (!allows_reg
&& allows_mem
)
1988 tree t
= get_base_address (TREE_VALUE (link
));
1989 if (t
&& DECL_P (t
) && s_ann
)
1990 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1993 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1996 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1997 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1998 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
2003 s_ann
->references_memory
= true;
2005 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, i
, bi
)
2007 tree var
= referenced_var (i
);
2008 add_stmt_operand (&var
, s_ann
, opf_def
| opf_implicit
);
2011 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun
), 0, i
, bi
)
2013 tree var
= referenced_var (i
);
2015 /* Subvars are explicitly represented in this list, so we
2016 don't need the original to be added to the clobber ops,
2017 but the original *will* be in this list because we keep
2018 the addressability of the original variable up-to-date
2019 to avoid confusing the back-end. */
2020 if (var_can_have_subvars (var
)
2021 && get_subvars_for_var (var
) != NULL
)
2024 add_stmt_operand (&var
, s_ann
, opf_def
| opf_implicit
);
2031 /* Scan operands for the assignment expression EXPR in statement STMT. */
2034 get_modify_stmt_operands (tree stmt
, tree expr
)
2036 /* First get operands from the RHS. */
2037 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 1), opf_use
);
2039 /* For the LHS, use a regular definition (opf_def) for GIMPLE
2040 registers. If the LHS is a store to memory, we will need
2041 a preserving definition (VDEF).
2043 Preserving definitions are those that modify a part of an
2044 aggregate object for which no subvars have been computed (or the
2045 reference does not correspond exactly to one of them). Stores
2046 through a pointer are also represented with VDEF operators.
2048 We used to distinguish between preserving and killing definitions.
2049 We always emit preserving definitions now. */
2050 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 0), opf_def
);
2054 /* Recursively scan the expression pointed to by EXPR_P in statement
2055 STMT. FLAGS is one of the OPF_* constants modifying how to
2056 interpret the operands found. */
2059 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
2061 enum tree_code code
;
2062 enum tree_code_class
class;
2063 tree expr
= *expr_p
;
2064 stmt_ann_t s_ann
= stmt_ann (stmt
);
2069 code
= TREE_CODE (expr
);
2070 class = TREE_CODE_CLASS (code
);
2075 /* Taking the address of a variable does not represent a
2076 reference to it, but the fact that the statement takes its
2077 address will be of interest to some passes (e.g. alias
2079 add_to_addressable_set (TREE_OPERAND (expr
, 0), &s_ann
->addresses_taken
);
2081 /* If the address is invariant, there may be no interesting
2082 variable references inside. */
2083 if (is_gimple_min_invariant (expr
))
2086 /* Otherwise, there may be variables referenced inside but there
2087 should be no VUSEs created, since the referenced objects are
2088 not really accessed. The only operands that we should find
2089 here are ARRAY_REF indices which will always be real operands
2090 (GIMPLE does not allow non-registers as array indices). */
2091 flags
|= opf_no_vops
;
2092 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2096 case STRUCT_FIELD_TAG
:
2097 case SYMBOL_MEMORY_TAG
:
2098 case NAME_MEMORY_TAG
:
2099 add_stmt_operand (expr_p
, s_ann
, flags
);
2108 /* Add the subvars for a variable, if it has subvars, to DEFS
2109 or USES. Otherwise, add the variable itself. Whether it
2110 goes to USES or DEFS depends on the operand flags. */
2111 if (var_can_have_subvars (expr
)
2112 && (svars
= get_subvars_for_var (expr
)))
2115 for (sv
= svars
; sv
; sv
= sv
->next
)
2116 add_stmt_operand (&sv
->var
, s_ann
, flags
);
2119 add_stmt_operand (expr_p
, s_ann
, flags
);
2124 case MISALIGNED_INDIRECT_REF
:
2125 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2128 case ALIGN_INDIRECT_REF
:
2130 get_indirect_ref_operands (stmt
, expr
, flags
, NULL_TREE
, 0, -1, true);
2133 case TARGET_MEM_REF
:
2134 get_tmr_operands (stmt
, expr
, flags
);
2138 case ARRAY_RANGE_REF
:
2144 HOST_WIDE_INT offset
, size
, maxsize
;
2147 /* This component reference becomes an access to all of the
2148 subvariables it can touch, if we can determine that, but
2149 *NOT* the real one. If we can't determine which fields we
2150 could touch, the recursion will eventually get to a
2151 variable and add *all* of its subvars, or whatever is the
2152 minimum correct subset. */
2153 ref
= get_ref_base_and_extent (expr
, &offset
, &size
, &maxsize
);
2154 if (SSA_VAR_P (ref
) && get_subvars_for_var (ref
))
2157 subvar_t svars
= get_subvars_for_var (ref
);
2159 for (sv
= svars
; sv
; sv
= sv
->next
)
2163 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
2165 int subvar_flags
= flags
;
2167 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
2172 flags
|= opf_no_vops
;
2174 else if (TREE_CODE (ref
) == INDIRECT_REF
)
2176 get_indirect_ref_operands (stmt
, ref
, flags
, expr
, offset
,
2178 flags
|= opf_no_vops
;
2181 /* Even if we found subvars above we need to ensure to see
2182 immediate uses for d in s.a[d]. In case of s.a having
2183 a subvar or we would miss it otherwise. */
2184 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2186 if (code
== COMPONENT_REF
)
2188 if (s_ann
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
2189 s_ann
->has_volatile_ops
= true;
2190 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2192 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
2194 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2195 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2196 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_use
);
2202 case WITH_SIZE_EXPR
:
2203 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2204 and an rvalue reference to its second argument. */
2205 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2206 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2210 get_call_expr_operands (stmt
, expr
);
2215 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_use
);
2216 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2217 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2220 case GIMPLE_MODIFY_STMT
:
2221 get_modify_stmt_operands (stmt
, expr
);
2226 /* General aggregate CONSTRUCTORs have been decomposed, but they
2227 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2228 constructor_elt
*ce
;
2229 unsigned HOST_WIDE_INT idx
;
2232 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
2234 get_expr_operands (stmt
, &ce
->value
, opf_use
);
2240 case TRUTH_NOT_EXPR
:
2241 case VIEW_CONVERT_EXPR
:
2243 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2246 case TRUTH_AND_EXPR
:
2248 case TRUTH_XOR_EXPR
:
2254 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2255 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2260 case REALIGN_LOAD_EXPR
:
2262 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2263 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2264 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
2283 /* Expressions that make no memory references. */
2287 if (class == tcc_unary
)
2289 if (class == tcc_binary
|| class == tcc_comparison
)
2291 if (class == tcc_constant
|| class == tcc_type
)
2295 /* If we get here, something has gone wrong. */
2296 #ifdef ENABLE_CHECKING
2297 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
2299 fputs ("\n", stderr
);
2305 /* Parse STMT looking for operands. When finished, the various
2306 build_* operand vectors will have potential operands in them. */
2309 parse_ssa_operands (tree stmt
)
2311 enum tree_code code
;
2313 code
= TREE_CODE (stmt
);
2316 case GIMPLE_MODIFY_STMT
:
2317 get_modify_stmt_operands (stmt
, stmt
);
2321 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_use
);
2325 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_use
);
2329 get_asm_expr_operands (stmt
);
2333 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_use
);
2337 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_use
);
2341 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_use
);
2345 case CASE_LABEL_EXPR
:
2346 case TRY_CATCH_EXPR
:
2347 case TRY_FINALLY_EXPR
:
2348 case EH_FILTER_EXPR
:
2351 /* These nodes contain no variable references. */
2355 /* Notice that if get_expr_operands tries to use &STMT as the
2356 operand pointer (which may only happen for USE operands), we
2357 will fail in add_stmt_operand. This default will handle
2358 statements like empty statements, or CALL_EXPRs that may
2359 appear on the RHS of a statement or as statements themselves. */
2360 get_expr_operands (stmt
, &stmt
, opf_use
);
2366 /* Create an operands cache for STMT. */
2369 build_ssa_operands (tree stmt
)
2371 stmt_ann_t ann
= get_stmt_ann (stmt
);
2373 /* Initially assume that the statement has no volatile operands and
2374 makes no memory references. */
2375 ann
->has_volatile_ops
= false;
2376 ann
->references_memory
= false;
2377 /* Just clear the bitmap so we don't end up reallocating it over and over. */
2378 if (ann
->addresses_taken
)
2379 bitmap_clear (ann
->addresses_taken
);
2381 start_ssa_stmt_operands ();
2382 parse_ssa_operands (stmt
);
2383 operand_build_sort_virtual (build_vuses
);
2384 operand_build_sort_virtual (build_vdefs
);
2385 finalize_ssa_stmt_operands (stmt
);
2387 if (ann
->addresses_taken
&& bitmap_empty_p (ann
->addresses_taken
))
2388 ann
->addresses_taken
= NULL
;
2389 /* For added safety, assume that statements with volatile operands
2390 also reference memory. */
2391 if (ann
->has_volatile_ops
)
2392 ann
->references_memory
= true;
2396 /* Free any operands vectors in OPS. */
2399 free_ssa_operands (stmt_operands_p ops
)
2401 ops
->def_ops
= NULL
;
2402 ops
->use_ops
= NULL
;
2403 ops
->vdef_ops
= NULL
;
2404 ops
->vuse_ops
= NULL
;
2405 BITMAP_FREE (ops
->loads
);
2406 BITMAP_FREE (ops
->stores
);
2410 /* Get the operands of statement STMT. */
2413 update_stmt_operands (tree stmt
)
2415 stmt_ann_t ann
= get_stmt_ann (stmt
);
2417 /* If update_stmt_operands is called before SSA is initialized, do
2419 if (!ssa_operands_active ())
2422 /* The optimizers cannot handle statements that are nothing but a
2423 _DECL. This indicates a bug in the gimplifier. */
2424 gcc_assert (!SSA_VAR_P (stmt
));
2426 timevar_push (TV_TREE_OPS
);
2428 gcc_assert (ann
->modified
);
2429 build_ssa_operands (stmt
);
2432 timevar_pop (TV_TREE_OPS
);
2436 /* Copies virtual operands from SRC to DST. */
2439 copy_virtual_operands (tree dest
, tree src
)
2442 voptype_p src_vuses
, dest_vuses
;
2443 voptype_p src_vdefs
, dest_vdefs
;
2444 struct voptype_d vuse
;
2445 struct voptype_d vdef
;
2446 stmt_ann_t dest_ann
;
2448 VDEF_OPS (dest
) = NULL
;
2449 VUSE_OPS (dest
) = NULL
;
2451 dest_ann
= get_stmt_ann (dest
);
2452 BITMAP_FREE (dest_ann
->operands
.loads
);
2453 BITMAP_FREE (dest_ann
->operands
.stores
);
2455 if (LOADED_SYMS (src
))
2457 dest_ann
->operands
.loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
2458 bitmap_copy (dest_ann
->operands
.loads
, LOADED_SYMS (src
));
2461 if (STORED_SYMS (src
))
2463 dest_ann
->operands
.stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
2464 bitmap_copy (dest_ann
->operands
.stores
, STORED_SYMS (src
));
2467 /* Copy all the VUSE operators and corresponding operands. */
2469 for (src_vuses
= VUSE_OPS (src
); src_vuses
; src_vuses
= src_vuses
->next
)
2471 n
= VUSE_NUM (src_vuses
);
2472 dest_vuses
= add_vuse_op (dest
, NULL_TREE
, n
, dest_vuses
);
2473 for (i
= 0; i
< n
; i
++)
2474 SET_USE (VUSE_OP_PTR (dest_vuses
, i
), VUSE_OP (src_vuses
, i
));
2476 if (VUSE_OPS (dest
) == NULL
)
2477 VUSE_OPS (dest
) = vuse
.next
;
2480 /* Copy all the VDEF operators and corresponding operands. */
2482 for (src_vdefs
= VDEF_OPS (src
); src_vdefs
; src_vdefs
= src_vdefs
->next
)
2484 n
= VUSE_NUM (src_vdefs
);
2485 dest_vdefs
= add_vdef_op (dest
, NULL_TREE
, n
, dest_vdefs
);
2486 VDEF_RESULT (dest_vdefs
) = VDEF_RESULT (src_vdefs
);
2487 for (i
= 0; i
< n
; i
++)
2488 SET_USE (VUSE_OP_PTR (dest_vdefs
, i
), VUSE_OP (src_vdefs
, i
));
2490 if (VDEF_OPS (dest
) == NULL
)
2491 VDEF_OPS (dest
) = vdef
.next
;
2496 /* Specifically for use in DOM's expression analysis. Given a store, we
2497 create an artificial stmt which looks like a load from the store, this can
2498 be used to eliminate redundant loads. OLD_OPS are the operands from the
2499 store stmt, and NEW_STMT is the new load which represents a load of the
2503 create_ssa_artificial_load_stmt (tree new_stmt
, tree old_stmt
)
2507 use_operand_p use_p
;
2510 get_stmt_ann (new_stmt
);
2512 /* Process NEW_STMT looking for operands. */
2513 start_ssa_stmt_operands ();
2514 parse_ssa_operands (new_stmt
);
2516 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, op
); i
++)
2517 if (TREE_CODE (op
) != SSA_NAME
)
2518 var_ann (op
)->in_vuse_list
= false;
2520 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, op
); i
++)
2521 if (TREE_CODE (op
) != SSA_NAME
)
2522 var_ann (op
)->in_vdef_list
= false;
2524 /* Remove any virtual operands that were found. */
2525 VEC_truncate (tree
, build_vdefs
, 0);
2526 VEC_truncate (tree
, build_vuses
, 0);
2528 /* For each VDEF on the original statement, we want to create a
2529 VUSE of the VDEF result operand on the new statement. */
2530 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
, SSA_OP_VDEF
)
2533 finalize_ssa_stmt_operands (new_stmt
);
2535 /* All uses in this fake stmt must not be in the immediate use lists. */
2536 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
2537 delink_imm_use (use_p
);
2541 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2542 to test the validity of the swap operation. */
2545 swap_tree_operands (tree stmt
, tree
*exp0
, tree
*exp1
)
2551 /* If the operand cache is active, attempt to preserve the relative
2552 positions of these two operands in their respective immediate use
2554 if (ssa_operands_active () && op0
!= op1
)
2556 use_optype_p use0
, use1
, ptr
;
2559 /* Find the 2 operands in the cache, if they are there. */
2560 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2561 if (USE_OP_PTR (ptr
)->use
== exp0
)
2567 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2568 if (USE_OP_PTR (ptr
)->use
== exp1
)
2574 /* If both uses don't have operand entries, there isn't much we can do
2575 at this point. Presumably we don't need to worry about it. */
2578 tree
*tmp
= USE_OP_PTR (use1
)->use
;
2579 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
2580 USE_OP_PTR (use0
)->use
= tmp
;
2584 /* Now swap the data. */
2590 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2591 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2592 a single variable whose address has been taken or any other valid
2593 GIMPLE memory reference (structure reference, array, etc). If the
2594 base address of REF is a decl that has sub-variables, also add all
2595 of its sub-variables. */
2598 add_to_addressable_set (tree ref
, bitmap
*addresses_taken
)
2603 gcc_assert (addresses_taken
);
2605 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2606 as the only thing we take the address of. If VAR is a structure,
2607 taking the address of a field means that the whole structure may
2608 be referenced using pointer arithmetic. See PR 21407 and the
2609 ensuing mailing list discussion. */
2610 var
= get_base_address (ref
);
2611 if (var
&& SSA_VAR_P (var
))
2613 if (*addresses_taken
== NULL
)
2614 *addresses_taken
= BITMAP_GGC_ALLOC ();
2616 if (var_can_have_subvars (var
)
2617 && (svars
= get_subvars_for_var (var
)))
2620 for (sv
= svars
; sv
; sv
= sv
->next
)
2622 bitmap_set_bit (*addresses_taken
, DECL_UID (sv
->var
));
2623 TREE_ADDRESSABLE (sv
->var
) = 1;
2628 bitmap_set_bit (*addresses_taken
, DECL_UID (var
));
2629 TREE_ADDRESSABLE (var
) = 1;
2635 /* Scan the immediate_use list for VAR making sure its linked properly.
2636 Return TRUE if there is a problem and emit an error message to F. */
2639 verify_imm_links (FILE *f
, tree var
)
2641 use_operand_p ptr
, prev
, list
;
2644 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
2646 list
= &(SSA_NAME_IMM_USE_NODE (var
));
2647 gcc_assert (list
->use
== NULL
);
2649 if (list
->prev
== NULL
)
2651 gcc_assert (list
->next
== NULL
);
2657 for (ptr
= list
->next
; ptr
!= list
; )
2659 if (prev
!= ptr
->prev
)
2662 if (ptr
->use
== NULL
)
2663 goto error
; /* 2 roots, or SAFE guard node. */
2664 else if (*(ptr
->use
) != var
)
2670 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2672 if (count
++ > 50000000)
2676 /* Verify list in the other direction. */
2678 for (ptr
= list
->prev
; ptr
!= list
; )
2680 if (prev
!= ptr
->next
)
2694 if (ptr
->stmt
&& stmt_modified_p (ptr
->stmt
))
2696 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->stmt
);
2697 print_generic_stmt (f
, ptr
->stmt
, TDF_SLIM
);
2699 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
2701 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
2707 /* Dump all the immediate uses to FILE. */
2710 dump_immediate_uses_for (FILE *file
, tree var
)
2712 imm_use_iterator iter
;
2713 use_operand_p use_p
;
2715 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2717 print_generic_expr (file
, var
, TDF_SLIM
);
2718 fprintf (file
, " : -->");
2719 if (has_zero_uses (var
))
2720 fprintf (file
, " no uses.\n");
2722 if (has_single_use (var
))
2723 fprintf (file
, " single use.\n");
2725 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2727 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2729 if (use_p
->stmt
== NULL
&& use_p
->use
== NULL
)
2730 fprintf (file
, "***end of stmt iterator marker***\n");
2732 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2733 print_generic_stmt (file
, USE_STMT (use_p
), TDF_VOPS
|TDF_MEMSYMS
);
2735 print_generic_stmt (file
, USE_STMT (use_p
), TDF_SLIM
);
2737 fprintf(file
, "\n");
2741 /* Dump all the immediate uses to FILE. */
2744 dump_immediate_uses (FILE *file
)
2749 fprintf (file
, "Immediate_uses: \n\n");
2750 for (x
= 1; x
< num_ssa_names
; x
++)
2755 dump_immediate_uses_for (file
, var
);
2760 /* Dump def-use edges on stderr. */
2763 debug_immediate_uses (void)
2765 dump_immediate_uses (stderr
);
2769 /* Dump def-use edges on stderr. */
2772 debug_immediate_uses_for (tree var
)
2774 dump_immediate_uses_for (stderr
, var
);
2778 /* Create a new change buffer for the statement pointed by STMT_P and
2779 push the buffer into SCB_STACK. Each change buffer
2780 records state information needed to determine what changed in the
2781 statement. Mainly, this keeps track of symbols that may need to be
2782 put into SSA form, SSA name replacements and other information
2783 needed to keep the SSA form up to date. */
2786 push_stmt_changes (tree
*stmt_p
)
2793 /* It makes no sense to keep track of PHI nodes. */
2794 if (TREE_CODE (stmt
) == PHI_NODE
)
2797 buf
= xmalloc (sizeof *buf
);
2798 memset (buf
, 0, sizeof *buf
);
2800 buf
->stmt_p
= stmt_p
;
2802 if (stmt_references_memory_p (stmt
))
2807 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2809 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2810 if (buf
->loads
== NULL
)
2811 buf
->loads
= BITMAP_ALLOC (NULL
);
2812 bitmap_set_bit (buf
->loads
, DECL_UID (sym
));
2815 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2817 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2818 if (buf
->stores
== NULL
)
2819 buf
->stores
= BITMAP_ALLOC (NULL
);
2820 bitmap_set_bit (buf
->stores
, DECL_UID (sym
));
2824 VEC_safe_push (scb_t
, heap
, scb_stack
, buf
);
2828 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2829 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2832 mark_difference_for_renaming (bitmap s1
, bitmap s2
)
2834 if (s1
== NULL
&& s2
== NULL
)
2837 if (s1
&& s2
== NULL
)
2838 mark_set_for_renaming (s1
);
2839 else if (s1
== NULL
&& s2
)
2840 mark_set_for_renaming (s2
);
2841 else if (!bitmap_equal_p (s1
, s2
))
2843 bitmap t1
= BITMAP_ALLOC (NULL
);
2844 bitmap t2
= BITMAP_ALLOC (NULL
);
2846 bitmap_and_compl (t1
, s1
, s2
);
2847 bitmap_and_compl (t2
, s2
, s1
);
2848 bitmap_ior_into (t1
, t2
);
2849 mark_set_for_renaming (t1
);
2857 /* Pop the top SCB from SCB_STACK and act on the differences between
2858 what was recorded by push_stmt_changes and the current state of
2862 pop_stmt_changes (tree
*stmt_p
)
2866 bitmap loads
, stores
;
2871 /* It makes no sense to keep track of PHI nodes. */
2872 if (TREE_CODE (stmt
) == PHI_NODE
)
2875 buf
= VEC_pop (scb_t
, scb_stack
);
2876 gcc_assert (stmt_p
== buf
->stmt_p
);
2878 /* Force an operand re-scan on the statement and mark any newly
2879 exposed variables. */
2882 /* Determine whether any memory symbols need to be renamed. If the
2883 sets of loads and stores are different after the statement is
2884 modified, then the affected symbols need to be renamed.
2886 Note that it may be possible for the statement to not reference
2887 memory anymore, but we still need to act on the differences in
2888 the sets of symbols. */
2889 loads
= stores
= NULL
;
2890 if (stmt_references_memory_p (stmt
))
2895 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2897 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2899 loads
= BITMAP_ALLOC (NULL
);
2900 bitmap_set_bit (loads
, DECL_UID (sym
));
2903 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2905 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2907 stores
= BITMAP_ALLOC (NULL
);
2908 bitmap_set_bit (stores
, DECL_UID (sym
));
2912 /* If LOADS is different from BUF->LOADS, the affected
2913 symbols need to be marked for renaming. */
2914 mark_difference_for_renaming (loads
, buf
->loads
);
2916 /* Similarly for STORES and BUF->STORES. */
2917 mark_difference_for_renaming (stores
, buf
->stores
);
2919 /* Mark all the naked GIMPLE register operands for renaming. */
2920 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
|SSA_OP_USE
)
2922 mark_sym_for_renaming (op
);
2924 /* FIXME, need to add more finalizers here. Cleanup EH info,
2925 recompute invariants for address expressions, add
2926 SSA replacement mappings, etc. For instance, given
2927 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2930 # SMT.4_20 = VDEF <SMT.4_16>
2933 So, the VDEF will disappear, but instead of marking SMT.4 for
2934 renaming it would be far more efficient to establish a
2935 replacement mapping that would replace every reference of
2936 SMT.4_20 with SMT.4_16. */
2938 /* Free memory used by the buffer. */
2939 BITMAP_FREE (buf
->loads
);
2940 BITMAP_FREE (buf
->stores
);
2941 BITMAP_FREE (loads
);
2942 BITMAP_FREE (stores
);
2948 /* Discard the topmost change buffer from SCB_STACK. This is useful
2949 when the caller realized that it did not actually modified the
2950 statement. It avoids the expensive operand re-scan. */
2953 discard_stmt_changes (tree
*stmt_p
)
2958 /* It makes no sense to keep track of PHI nodes. */
2960 if (TREE_CODE (stmt
) == PHI_NODE
)
2963 buf
= VEC_pop (scb_t
, scb_stack
);
2964 gcc_assert (stmt_p
== buf
->stmt_p
);
2966 /* Free memory used by the buffer. */
2967 BITMAP_FREE (buf
->loads
);
2968 BITMAP_FREE (buf
->stores
);
2974 /* Returns true if statement STMT may access memory. */
2977 stmt_references_memory_p (tree stmt
)
2979 if (!gimple_ssa_operands (cfun
)->ops_active
|| TREE_CODE (stmt
) == PHI_NODE
)
2982 return stmt_ann (stmt
)->references_memory
;