1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars
;
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided
;
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided
;
95 /* Number of write-clobbers avoided because the variable can't escape to
97 unsigned int unescapable_clobbers_avoided
;
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers
;
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided
;
108 /* Flags to describe operand properties in helpers. */
110 /* By default, operands are loaded. */
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
123 #define opf_no_vops (1 << 1)
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
130 /* Array for building all the def operands. */
131 static VEC(tree
,heap
) *build_defs
;
133 /* Array for building all the use operands. */
134 static VEC(tree
,heap
) *build_uses
;
136 /* Set for building all the VDEF operands. */
137 static VEC(tree
,heap
) *build_vdefs
;
139 /* Set for building all the VUSE operands. */
140 static VEC(tree
,heap
) *build_vuses
;
142 /* Bitmap obstack for our datastructures that needs to survive across
143 compilations of multiple functions. */
144 static bitmap_obstack operands_bitmap_obstack
;
146 /* Set for building all the loaded symbols. */
147 static bitmap build_loads
;
149 /* Set for building all the stored symbols. */
150 static bitmap build_stores
;
152 static void get_expr_operands (tree
, tree
*, int);
154 /* Number of functions with initialized ssa_operands. */
155 static int n_initialized
= 0;
157 /* Statement change buffer. Data structure used to record state
158 information for statements. This is used to determine what needs
159 to be done in order to update the SSA web after a statement is
160 modified by a pass. If STMT is a statement that has just been
161 created, or needs to be folded via fold_stmt, or anything that
162 changes its physical structure then the pass should:
164 1- Call push_stmt_changes (&stmt) to record the current state of
165 STMT before any modifications are made.
167 2- Make all appropriate modifications to the statement.
169 3- Call pop_stmt_changes (&stmt) to find new symbols that
170 need to be put in SSA form, SSA name mappings for names that
171 have disappeared, recompute invariantness for address
172 expressions, cleanup EH information, etc.
174 If it is possible to determine that the statement was not modified,
175 instead of calling pop_stmt_changes it is quicker to call
176 discard_stmt_changes to avoid the expensive and unnecessary operand
177 re-scan and change comparison. */
181 /* Pointer to the statement being modified. */
184 /* If the statement references memory these are the sets of symbols
185 loaded and stored by the statement. */
190 typedef struct scb_d
*scb_t
;
192 DEF_VEC_ALLOC_P(scb_t
,heap
);
194 /* Stack of statement change buffers (SCB). Every call to
195 push_stmt_changes pushes a new buffer onto the stack. Calls to
196 pop_stmt_changes pop a buffer off of the stack and compute the set
197 of changes for the popped statement. */
198 static VEC(scb_t
,heap
) *scb_stack
;
200 /* Return the DECL_UID of the base variable of T. */
202 static inline unsigned
203 get_name_decl (tree t
)
205 if (TREE_CODE (t
) != SSA_NAME
)
208 return DECL_UID (SSA_NAME_VAR (t
));
212 /* Comparison function for qsort used in operand_build_sort_virtual. */
215 operand_build_cmp (const void *p
, const void *q
)
217 tree e1
= *((const tree
*)p
);
218 tree e2
= *((const tree
*)q
);
221 u1
= get_name_decl (e1
);
222 u2
= get_name_decl (e2
);
224 /* We want to sort in ascending order. They can never be equal. */
225 #ifdef ENABLE_CHECKING
226 gcc_assert (u1
!= u2
);
228 return (u1
> u2
? 1 : -1);
232 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
235 operand_build_sort_virtual (VEC(tree
,heap
) *list
)
237 int num
= VEC_length (tree
, list
);
244 if (get_name_decl (VEC_index (tree
, list
, 0))
245 > get_name_decl (VEC_index (tree
, list
, 1)))
247 /* Swap elements if in the wrong order. */
248 tree tmp
= VEC_index (tree
, list
, 0);
249 VEC_replace (tree
, list
, 0, VEC_index (tree
, list
, 1));
250 VEC_replace (tree
, list
, 1, tmp
);
255 /* There are 3 or more elements, call qsort. */
256 qsort (VEC_address (tree
, list
),
257 VEC_length (tree
, list
),
263 /* Return true if the SSA operands cache is active. */
266 ssa_operands_active (void)
268 return cfun
->gimple_df
&& gimple_ssa_operands (cfun
)->ops_active
;
272 /* VOPs are of variable sized, so the free list maps "free buckets" to the
285 Any VOPs larger than this are simply added to the largest bucket when they
289 /* Return the number of operands used in bucket BUCKET. */
292 vop_free_bucket_size (int bucket
)
294 #ifdef ENABLE_CHECKING
295 gcc_assert (bucket
>= 0 && bucket
< NUM_VOP_FREE_BUCKETS
);
299 return (bucket
- 13) * 8;
303 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
304 beyond the end of the bucket table, return -1. */
307 vop_free_bucket_index (int num
)
309 gcc_assert (num
> 0 && NUM_VOP_FREE_BUCKETS
> 16);
311 /* Sizes 1 through 16 use buckets 0-15. */
314 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
315 num
= 14 + (num
- 1) / 8;
316 if (num
>= NUM_VOP_FREE_BUCKETS
)
323 /* Initialize the VOP free buckets. */
326 init_vop_buckets (void)
330 for (x
= 0; x
< NUM_VOP_FREE_BUCKETS
; x
++)
331 gimple_ssa_operands (cfun
)->vop_free_buckets
[x
] = NULL
;
335 /* Add PTR to the appropriate VOP bucket. */
338 add_vop_to_freelist (voptype_p ptr
)
340 int bucket
= vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr
->usev
));
342 /* Too large, use the largest bucket so its not a complete throw away. */
344 bucket
= NUM_VOP_FREE_BUCKETS
- 1;
346 ptr
->next
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
347 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] = ptr
;
351 /* These are the sizes of the operand memory buffer which gets allocated each
352 time more operands space is required. The final value is the amount that is
353 allocated every time after that. */
355 #define OP_SIZE_INIT 0
357 #define OP_SIZE_2 110
358 #define OP_SIZE_3 511
360 /* Initialize the operand cache routines. */
363 init_ssa_operands (void)
365 if (!n_initialized
++)
367 build_defs
= VEC_alloc (tree
, heap
, 5);
368 build_uses
= VEC_alloc (tree
, heap
, 10);
369 build_vuses
= VEC_alloc (tree
, heap
, 25);
370 build_vdefs
= VEC_alloc (tree
, heap
, 25);
371 bitmap_obstack_initialize (&operands_bitmap_obstack
);
372 build_loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
373 build_stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
374 scb_stack
= VEC_alloc (scb_t
, heap
, 20);
377 gcc_assert (gimple_ssa_operands (cfun
)->operand_memory
== NULL
);
378 gcc_assert (gimple_ssa_operands (cfun
)->mpt_table
== NULL
);
379 gimple_ssa_operands (cfun
)->operand_memory_index
380 = gimple_ssa_operands (cfun
)->ssa_operand_mem_size
;
381 gimple_ssa_operands (cfun
)->ops_active
= true;
382 memset (&clobber_stats
, 0, sizeof (clobber_stats
));
384 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_INIT
;
388 /* Dispose of anything required by the operand routines. */
391 fini_ssa_operands (void)
393 struct ssa_operand_memory_d
*ptr
;
397 if (!--n_initialized
)
399 VEC_free (tree
, heap
, build_defs
);
400 VEC_free (tree
, heap
, build_uses
);
401 VEC_free (tree
, heap
, build_vdefs
);
402 VEC_free (tree
, heap
, build_vuses
);
403 BITMAP_FREE (build_loads
);
404 BITMAP_FREE (build_stores
);
406 /* The change buffer stack had better be empty. */
407 gcc_assert (VEC_length (scb_t
, scb_stack
) == 0);
408 VEC_free (scb_t
, heap
, scb_stack
);
412 gimple_ssa_operands (cfun
)->free_defs
= NULL
;
413 gimple_ssa_operands (cfun
)->free_uses
= NULL
;
415 while ((ptr
= gimple_ssa_operands (cfun
)->operand_memory
) != NULL
)
417 gimple_ssa_operands (cfun
)->operand_memory
418 = gimple_ssa_operands (cfun
)->operand_memory
->next
;
423 VEC_iterate (tree
, gimple_ssa_operands (cfun
)->mpt_table
, ix
, mpt
);
427 BITMAP_FREE (MPT_SYMBOLS (mpt
));
430 VEC_free (tree
, heap
, gimple_ssa_operands (cfun
)->mpt_table
);
432 gimple_ssa_operands (cfun
)->ops_active
= false;
435 bitmap_obstack_release (&operands_bitmap_obstack
);
436 if (dump_file
&& (dump_flags
& TDF_STATS
))
438 fprintf (dump_file
, "Original clobbered vars: %d\n",
439 clobber_stats
.clobbered_vars
);
440 fprintf (dump_file
, "Static write clobbers avoided: %d\n",
441 clobber_stats
.static_write_clobbers_avoided
);
442 fprintf (dump_file
, "Static read clobbers avoided: %d\n",
443 clobber_stats
.static_read_clobbers_avoided
);
444 fprintf (dump_file
, "Unescapable clobbers avoided: %d\n",
445 clobber_stats
.unescapable_clobbers_avoided
);
446 fprintf (dump_file
, "Original read-only clobbers: %d\n",
447 clobber_stats
.readonly_clobbers
);
448 fprintf (dump_file
, "Static read-only clobbers avoided: %d\n",
449 clobber_stats
.static_readonly_clobbers_avoided
);
454 /* Return memory for operands of SIZE chunks. */
457 ssa_operand_alloc (unsigned size
)
461 if (gimple_ssa_operands (cfun
)->operand_memory_index
+ size
462 >= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
464 struct ssa_operand_memory_d
*ptr
;
466 if (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
== OP_SIZE_INIT
)
467 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
468 = OP_SIZE_1
* sizeof (struct voptype_d
);
470 if (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
471 == OP_SIZE_1
* sizeof (struct voptype_d
))
472 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
473 = OP_SIZE_2
* sizeof (struct voptype_d
);
475 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
476 = OP_SIZE_3
* sizeof (struct voptype_d
);
478 /* Go right to the maximum size if the request is too large. */
479 if (size
> gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
480 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
481 = OP_SIZE_3
* sizeof (struct voptype_d
);
483 /* Fail if there is not enough space. If there are this many operands
484 required, first make sure there isn't a different problem causing this
485 many operands. If the decision is that this is OK, then we can
486 specially allocate a buffer just for this request. */
487 gcc_assert (size
<= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
);
489 ptr
= (struct ssa_operand_memory_d
*)
490 ggc_alloc (sizeof (struct ssa_operand_memory_d
)
491 + gimple_ssa_operands (cfun
)->ssa_operand_mem_size
- 1);
492 ptr
->next
= gimple_ssa_operands (cfun
)->operand_memory
;
493 gimple_ssa_operands (cfun
)->operand_memory
= ptr
;
494 gimple_ssa_operands (cfun
)->operand_memory_index
= 0;
496 ptr
= &(gimple_ssa_operands (cfun
)->operand_memory
497 ->mem
[gimple_ssa_operands (cfun
)->operand_memory_index
]);
498 gimple_ssa_operands (cfun
)->operand_memory_index
+= size
;
503 /* Allocate a DEF operand. */
505 static inline struct def_optype_d
*
508 struct def_optype_d
*ret
;
509 if (gimple_ssa_operands (cfun
)->free_defs
)
511 ret
= gimple_ssa_operands (cfun
)->free_defs
;
512 gimple_ssa_operands (cfun
)->free_defs
513 = gimple_ssa_operands (cfun
)->free_defs
->next
;
516 ret
= (struct def_optype_d
*)
517 ssa_operand_alloc (sizeof (struct def_optype_d
));
522 /* Allocate a USE operand. */
524 static inline struct use_optype_d
*
527 struct use_optype_d
*ret
;
528 if (gimple_ssa_operands (cfun
)->free_uses
)
530 ret
= gimple_ssa_operands (cfun
)->free_uses
;
531 gimple_ssa_operands (cfun
)->free_uses
532 = gimple_ssa_operands (cfun
)->free_uses
->next
;
535 ret
= (struct use_optype_d
*)
536 ssa_operand_alloc (sizeof (struct use_optype_d
));
541 /* Allocate a vop with NUM elements. */
543 static inline struct voptype_d
*
546 struct voptype_d
*ret
= NULL
;
549 int bucket
= vop_free_bucket_index (num
);
552 /* If there is a free operand, use it. */
553 if (gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] != NULL
)
555 ret
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
556 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] =
557 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
]->next
;
560 alloc_size
= vop_free_bucket_size(bucket
);
566 ret
= (struct voptype_d
*)ssa_operand_alloc (
567 sizeof (struct voptype_d
) + (alloc_size
- 1) * sizeof (vuse_element_t
));
569 VUSE_VECT_NUM_ELEM (ret
->usev
) = num
;
574 /* This routine makes sure that PTR is in an immediate use list, and makes
575 sure the stmt pointer is set to the current stmt. */
578 set_virtual_use_link (use_operand_p ptr
, tree stmt
)
580 /* fold_stmt may have changed the stmt pointers. */
581 if (ptr
->stmt
!= stmt
)
584 /* If this use isn't in a list, add it to the correct list. */
586 link_imm_use (ptr
, *(ptr
->use
));
590 /* Adds OP to the list of defs after LAST. */
592 static inline def_optype_p
593 add_def_op (tree
*op
, def_optype_p last
)
598 DEF_OP_PTR (new) = op
;
605 /* Adds OP to the list of uses of statement STMT after LAST. */
607 static inline use_optype_p
608 add_use_op (tree stmt
, tree
*op
, use_optype_p last
)
613 USE_OP_PTR (new)->use
= op
;
614 link_imm_use_stmt (USE_OP_PTR (new), *op
, stmt
);
621 /* Return a virtual op pointer with NUM elements which are all initialized to OP
622 and are linked into the immediate uses for STMT. The new vop is appended
625 static inline voptype_p
626 add_vop (tree stmt
, tree op
, int num
, voptype_p prev
)
631 new = alloc_vop (num
);
632 for (x
= 0; x
< num
; x
++)
634 VUSE_OP_PTR (new, x
)->prev
= NULL
;
635 SET_VUSE_OP (new, x
, op
);
636 VUSE_OP_PTR (new, x
)->use
= &new->usev
.uses
[x
].use_var
;
637 link_imm_use_stmt (VUSE_OP_PTR (new, x
), new->usev
.uses
[x
].use_var
, stmt
);
647 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
648 LAST to the new element. */
650 static inline voptype_p
651 add_vuse_op (tree stmt
, tree op
, int num
, voptype_p last
)
653 voptype_p
new = add_vop (stmt
, op
, num
, last
);
654 VDEF_RESULT (new) = NULL_TREE
;
659 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
660 LAST to the new element. */
662 static inline voptype_p
663 add_vdef_op (tree stmt
, tree op
, int num
, voptype_p last
)
665 voptype_p
new = add_vop (stmt
, op
, num
, last
);
666 VDEF_RESULT (new) = op
;
671 /* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
672 is the head of the operand list it belongs to. */
674 static inline struct voptype_d
*
675 realloc_vop (struct voptype_d
*ptr
, unsigned int num_elem
,
676 struct voptype_d
**root
)
680 struct voptype_d
*ret
, *tmp
;
682 if (VUSE_VECT_NUM_ELEM (ptr
->usev
) == num_elem
)
685 val
= VUSE_OP (ptr
, 0);
686 if (TREE_CODE (val
) == SSA_NAME
)
687 val
= SSA_NAME_VAR (val
);
689 stmt
= USE_STMT (VUSE_OP_PTR (ptr
, 0));
691 /* Delink all the existing uses. */
692 for (x
= 0; x
< VUSE_VECT_NUM_ELEM (ptr
->usev
); x
++)
694 use_operand_p use_p
= VUSE_OP_PTR (ptr
, x
);
695 delink_imm_use (use_p
);
698 /* If we want less space, simply use this one, and shrink the size. */
699 if (VUSE_VECT_NUM_ELEM (ptr
->usev
) > num_elem
)
701 VUSE_VECT_NUM_ELEM (ptr
->usev
) = num_elem
;
705 /* It is growing. Allocate a new one and replace the old one. */
706 ret
= add_vuse_op (stmt
, val
, num_elem
, ptr
);
708 /* Clear PTR and add its memory to the free list. */
709 lim
= VUSE_VECT_NUM_ELEM (ptr
->usev
);
711 sizeof (struct voptype_d
) + sizeof (vuse_element_t
) * (lim
- 1));
712 add_vop_to_freelist (ptr
);
714 /* Now simply remove the old one. */
722 tmp
!= NULL
&& tmp
->next
!= ptr
;
729 /* The pointer passed in isn't in STMT's VDEF lists. */
734 /* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
737 realloc_vdef (struct voptype_d
*ptr
, unsigned int num_elem
)
740 struct voptype_d
*ret
;
742 val
= VDEF_RESULT (ptr
);
743 stmt
= USE_STMT (VDEF_OP_PTR (ptr
, 0));
744 ret
= realloc_vop (ptr
, num_elem
, &(VDEF_OPS (stmt
)));
745 VDEF_RESULT (ret
) = val
;
750 /* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
753 realloc_vuse (struct voptype_d
*ptr
, unsigned int num_elem
)
756 struct voptype_d
*ret
;
758 stmt
= USE_STMT (VUSE_OP_PTR (ptr
, 0));
759 ret
= realloc_vop (ptr
, num_elem
, &(VUSE_OPS (stmt
)));
764 /* Takes elements from build_defs and turns them into def operands of STMT.
765 TODO -- Make build_defs VEC of tree *. */
768 finalize_ssa_defs (tree stmt
)
771 struct def_optype_d new_list
;
772 def_optype_p old_ops
, last
;
773 unsigned int num
= VEC_length (tree
, build_defs
);
775 /* There should only be a single real definition per assignment. */
776 gcc_assert ((stmt
&& TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
) || num
<= 1);
778 new_list
.next
= NULL
;
781 old_ops
= DEF_OPS (stmt
);
785 /* Check for the common case of 1 def that hasn't changed. */
786 if (old_ops
&& old_ops
->next
== NULL
&& num
== 1
787 && (tree
*) VEC_index (tree
, build_defs
, 0) == DEF_OP_PTR (old_ops
))
790 /* If there is anything in the old list, free it. */
793 old_ops
->next
= gimple_ssa_operands (cfun
)->free_defs
;
794 gimple_ssa_operands (cfun
)->free_defs
= old_ops
;
797 /* If there is anything remaining in the build_defs list, simply emit it. */
798 for ( ; new_i
< num
; new_i
++)
799 last
= add_def_op ((tree
*) VEC_index (tree
, build_defs
, new_i
), last
);
801 /* Now set the stmt's operands. */
802 DEF_OPS (stmt
) = new_list
.next
;
804 #ifdef ENABLE_CHECKING
808 for (ptr
= DEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
811 gcc_assert (x
== num
);
817 /* Takes elements from build_uses and turns them into use operands of STMT.
818 TODO -- Make build_uses VEC of tree *. */
821 finalize_ssa_uses (tree stmt
)
824 struct use_optype_d new_list
;
825 use_optype_p old_ops
, ptr
, last
;
827 #ifdef ENABLE_CHECKING
830 unsigned num
= VEC_length (tree
, build_uses
);
832 /* If the pointer to the operand is the statement itself, something is
833 wrong. It means that we are pointing to a local variable (the
834 initial call to update_stmt_operands does not pass a pointer to a
836 for (x
= 0; x
< num
; x
++)
837 gcc_assert (*((tree
*)VEC_index (tree
, build_uses
, x
)) != stmt
);
841 new_list
.next
= NULL
;
844 old_ops
= USE_OPS (stmt
);
846 /* If there is anything in the old list, free it. */
849 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
850 delink_imm_use (USE_OP_PTR (ptr
));
851 old_ops
->next
= gimple_ssa_operands (cfun
)->free_uses
;
852 gimple_ssa_operands (cfun
)->free_uses
= old_ops
;
855 /* Now create nodes for all the new nodes. */
856 for (new_i
= 0; new_i
< VEC_length (tree
, build_uses
); new_i
++)
857 last
= add_use_op (stmt
,
858 (tree
*) VEC_index (tree
, build_uses
, new_i
),
861 /* Now set the stmt's operands. */
862 USE_OPS (stmt
) = new_list
.next
;
864 #ifdef ENABLE_CHECKING
867 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
870 gcc_assert (x
== VEC_length (tree
, build_uses
));
876 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
877 STMT. FIXME, for now VDEF operators should have a single operand
881 finalize_ssa_vdefs (tree stmt
)
884 struct voptype_d new_list
;
885 voptype_p old_ops
, ptr
, last
;
886 stmt_ann_t ann
= stmt_ann (stmt
);
888 /* Set the symbols referenced by STMT. */
889 if (!bitmap_empty_p (build_stores
))
891 if (ann
->operands
.stores
== NULL
)
892 ann
->operands
.stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
894 bitmap_copy (ann
->operands
.stores
, build_stores
);
897 BITMAP_FREE (ann
->operands
.stores
);
899 /* If aliases have not been computed, do not instantiate a virtual
900 operator on STMT. Initially, we only compute the SSA form on
901 GIMPLE registers. The virtual SSA form is only computed after
902 alias analysis, so virtual operators will remain unrenamed and
903 the verifier will complain. However, alias analysis needs to
904 access symbol load/store information, so we need to compute
906 if (!gimple_aliases_computed_p (cfun
))
909 new_list
.next
= NULL
;
912 old_ops
= VDEF_OPS (stmt
);
914 while (old_ops
&& new_i
< VEC_length (tree
, build_vdefs
))
916 tree op
= VEC_index (tree
, build_vdefs
, new_i
);
917 unsigned new_uid
= get_name_decl (op
);
918 unsigned old_uid
= get_name_decl (VDEF_RESULT (old_ops
));
920 /* FIXME, for now each VDEF operator should have at most one
921 operand in their RHS. */
922 gcc_assert (VDEF_NUM (old_ops
) == 1);
924 if (old_uid
== new_uid
)
926 /* If the symbols are the same, reuse the existing operand. */
927 last
->next
= old_ops
;
929 old_ops
= old_ops
->next
;
931 set_virtual_use_link (VDEF_OP_PTR (last
, 0), stmt
);
934 else if (old_uid
< new_uid
)
936 /* If old is less than new, old goes to the free list. */
938 delink_imm_use (VDEF_OP_PTR (old_ops
, 0));
939 next
= old_ops
->next
;
940 add_vop_to_freelist (old_ops
);
945 /* This is a new operand. */
946 last
= add_vdef_op (stmt
, op
, 1, last
);
951 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
952 for ( ; new_i
< VEC_length (tree
, build_vdefs
); new_i
++)
953 last
= add_vdef_op (stmt
, VEC_index (tree
, build_vdefs
, new_i
), 1, last
);
955 /* If there is anything in the old list, free it. */
958 for (ptr
= old_ops
; ptr
; ptr
= last
)
961 delink_imm_use (VDEF_OP_PTR (ptr
, 0));
962 add_vop_to_freelist (ptr
);
966 /* Now set STMT's operands. */
967 VDEF_OPS (stmt
) = new_list
.next
;
969 #ifdef ENABLE_CHECKING
972 for (ptr
= VDEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
975 gcc_assert (x
== VEC_length (tree
, build_vdefs
));
981 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
985 finalize_ssa_vuse_ops (tree stmt
)
987 unsigned new_i
, old_i
;
988 voptype_p old_ops
, last
;
989 VEC(tree
,heap
) *new_ops
;
992 /* Set the symbols referenced by STMT. */
993 ann
= stmt_ann (stmt
);
994 if (!bitmap_empty_p (build_loads
))
996 if (ann
->operands
.loads
== NULL
)
997 ann
->operands
.loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
999 bitmap_copy (ann
->operands
.loads
, build_loads
);
1002 BITMAP_FREE (ann
->operands
.loads
);
1004 /* If aliases have not been computed, do not instantiate a virtual
1005 operator on STMT. Initially, we only compute the SSA form on
1006 GIMPLE registers. The virtual SSA form is only computed after
1007 alias analysis, so virtual operators will remain unrenamed and
1008 the verifier will complain. However, alias analysis needs to
1009 access symbol load/store information, so we need to compute
1011 if (!gimple_aliases_computed_p (cfun
))
1014 /* STMT should have at most one VUSE operator. */
1015 old_ops
= VUSE_OPS (stmt
);
1016 gcc_assert (old_ops
== NULL
|| old_ops
->next
== NULL
);
1021 && old_i
< VUSE_NUM (old_ops
)
1022 && new_i
< VEC_length (tree
, build_vuses
))
1024 tree new_op
= VEC_index (tree
, build_vuses
, new_i
);
1025 tree old_op
= VUSE_OP (old_ops
, old_i
);
1026 unsigned new_uid
= get_name_decl (new_op
);
1027 unsigned old_uid
= get_name_decl (old_op
);
1029 if (old_uid
== new_uid
)
1031 /* If the symbols are the same, reuse the existing operand. */
1032 VEC_safe_push (tree
, heap
, new_ops
, old_op
);
1036 else if (old_uid
< new_uid
)
1038 /* If OLD_UID is less than NEW_UID, the old operand has
1039 disappeared, skip to the next old operand. */
1044 /* This is a new operand. */
1045 VEC_safe_push (tree
, heap
, new_ops
, new_op
);
1050 /* If there is anything remaining in the build_vuses list, simply emit it. */
1051 for ( ; new_i
< VEC_length (tree
, build_vuses
); new_i
++)
1052 VEC_safe_push (tree
, heap
, new_ops
, VEC_index (tree
, build_vuses
, new_i
));
1054 /* If there is anything in the old list, free it. */
1057 for (old_i
= 0; old_i
< VUSE_NUM (old_ops
); old_i
++)
1058 delink_imm_use (VUSE_OP_PTR (old_ops
, old_i
));
1059 add_vop_to_freelist (old_ops
);
1060 VUSE_OPS (stmt
) = NULL
;
1063 /* If there are any operands, instantiate a VUSE operator for STMT. */
1069 last
= add_vuse_op (stmt
, NULL
, VEC_length (tree
, new_ops
), NULL
);
1071 for (i
= 0; VEC_iterate (tree
, new_ops
, i
, op
); i
++)
1072 SET_USE (VUSE_OP_PTR (last
, (int) i
), op
);
1074 VUSE_OPS (stmt
) = last
;
1077 #ifdef ENABLE_CHECKING
1081 if (VUSE_OPS (stmt
))
1083 gcc_assert (VUSE_OPS (stmt
)->next
== NULL
);
1084 x
= VUSE_NUM (VUSE_OPS (stmt
));
1089 gcc_assert (x
== VEC_length (tree
, build_vuses
));
1094 /* Return a new VUSE operand vector for STMT. */
1097 finalize_ssa_vuses (tree stmt
)
1099 unsigned num
, num_vdefs
;
1100 unsigned vuse_index
;
1102 /* Remove superfluous VUSE operands. If the statement already has a
1103 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1104 needed because VDEFs imply a VUSE of the variable. For instance,
1105 suppose that variable 'a' is pointed-to by p and q:
1111 The VUSE <a_2> is superfluous because it is implied by the
1113 num
= VEC_length (tree
, build_vuses
);
1114 num_vdefs
= VEC_length (tree
, build_vdefs
);
1116 if (num
> 0 && num_vdefs
> 0)
1117 for (vuse_index
= 0; vuse_index
< VEC_length (tree
, build_vuses
); )
1120 vuse
= VEC_index (tree
, build_vuses
, vuse_index
);
1121 if (TREE_CODE (vuse
) != SSA_NAME
)
1123 var_ann_t ann
= var_ann (vuse
);
1124 ann
->in_vuse_list
= 0;
1125 if (ann
->in_vdef_list
)
1127 VEC_ordered_remove (tree
, build_vuses
, vuse_index
);
1134 finalize_ssa_vuse_ops (stmt
);
1138 /* Clear the in_list bits and empty the build array for VDEFs and
1142 cleanup_build_arrays (void)
1147 for (i
= 0; VEC_iterate (tree
, build_vdefs
, i
, t
); i
++)
1148 if (TREE_CODE (t
) != SSA_NAME
)
1149 var_ann (t
)->in_vdef_list
= false;
1151 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, t
); i
++)
1152 if (TREE_CODE (t
) != SSA_NAME
)
1153 var_ann (t
)->in_vuse_list
= false;
1155 VEC_truncate (tree
, build_vdefs
, 0);
1156 VEC_truncate (tree
, build_vuses
, 0);
1157 VEC_truncate (tree
, build_defs
, 0);
1158 VEC_truncate (tree
, build_uses
, 0);
1159 bitmap_clear (build_loads
);
1160 bitmap_clear (build_stores
);
1164 /* Finalize all the build vectors, fill the new ones into INFO. */
1167 finalize_ssa_stmt_operands (tree stmt
)
1169 finalize_ssa_defs (stmt
);
1170 finalize_ssa_uses (stmt
);
1171 finalize_ssa_vdefs (stmt
);
1172 finalize_ssa_vuses (stmt
);
1173 cleanup_build_arrays ();
1177 /* Start the process of building up operands vectors in INFO. */
1180 start_ssa_stmt_operands (void)
1182 gcc_assert (VEC_length (tree
, build_defs
) == 0);
1183 gcc_assert (VEC_length (tree
, build_uses
) == 0);
1184 gcc_assert (VEC_length (tree
, build_vuses
) == 0);
1185 gcc_assert (VEC_length (tree
, build_vdefs
) == 0);
1186 gcc_assert (bitmap_empty_p (build_loads
));
1187 gcc_assert (bitmap_empty_p (build_stores
));
1191 /* Add DEF_P to the list of pointers to operands. */
1194 append_def (tree
*def_p
)
1196 VEC_safe_push (tree
, heap
, build_defs
, (tree
) def_p
);
1200 /* Add USE_P to the list of pointers to operands. */
1203 append_use (tree
*use_p
)
1205 VEC_safe_push (tree
, heap
, build_uses
, (tree
) use_p
);
1209 /* Add VAR to the set of variables that require a VDEF operator. */
1212 append_vdef (tree var
)
1216 if (TREE_CODE (var
) != SSA_NAME
)
1221 /* If VAR belongs to a memory partition, use it instead of VAR. */
1222 mpt
= memory_partition (var
);
1226 /* Don't allow duplicate entries. */
1227 ann
= get_var_ann (var
);
1228 if (ann
->in_vdef_list
)
1231 ann
->in_vdef_list
= true;
1235 sym
= SSA_NAME_VAR (var
);
1237 VEC_safe_push (tree
, heap
, build_vdefs
, var
);
1238 bitmap_set_bit (build_stores
, DECL_UID (sym
));
1242 /* Add VAR to the set of variables that require a VUSE operator. */
1245 append_vuse (tree var
)
1249 if (TREE_CODE (var
) != SSA_NAME
)
1254 /* If VAR belongs to a memory partition, use it instead of VAR. */
1255 mpt
= memory_partition (var
);
1259 /* Don't allow duplicate entries. */
1260 ann
= get_var_ann (var
);
1261 if (ann
->in_vuse_list
|| ann
->in_vdef_list
)
1264 ann
->in_vuse_list
= true;
1268 sym
= SSA_NAME_VAR (var
);
1270 VEC_safe_push (tree
, heap
, build_vuses
, var
);
1271 bitmap_set_bit (build_loads
, DECL_UID (sym
));
1275 /* REF is a tree that contains the entire pointer dereference
1276 expression, if available, or NULL otherwise. ALIAS is the variable
1277 we are asking if REF can access. OFFSET and SIZE come from the
1278 memory access expression that generated this virtual operand. */
1281 access_can_touch_variable (tree ref
, tree alias
, HOST_WIDE_INT offset
,
1284 bool offsetgtz
= offset
> 0;
1285 unsigned HOST_WIDE_INT uoffset
= (unsigned HOST_WIDE_INT
) offset
;
1286 tree base
= ref
? get_base_address (ref
) : NULL
;
1288 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1289 using a call-clobbered memory tag. By definition, call-clobbered
1290 memory tags can always touch .GLOBAL_VAR. */
1291 if (alias
== gimple_global_var (cfun
))
1294 /* If ALIAS is an SFT, it can't be touched if the offset
1295 and size of the access is not overlapping with the SFT offset and
1296 size. This is only true if we are accessing through a pointer
1297 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1298 be accessing through a pointer to some substruct of the
1299 structure, and if we try to prune there, we will have the wrong
1300 offset, and get the wrong answer.
1301 i.e., we can't prune without more work if we have something like
1307 const char *byte_op;
1315 foo = &targetm.asm_out.aligned_op;
1318 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1319 terms of SFT_PARENT_VAR, that is where it is.
1320 However, the access through the foo pointer will be at offset 0. */
1322 && TREE_CODE (alias
) == STRUCT_FIELD_TAG
1324 && TREE_TYPE (base
) == TREE_TYPE (SFT_PARENT_VAR (alias
))
1325 && !overlap_subvar (offset
, size
, alias
, NULL
))
1327 #ifdef ACCESS_DEBUGGING
1328 fprintf (stderr
, "Access to ");
1329 print_generic_expr (stderr
, ref
, 0);
1330 fprintf (stderr
, " may not touch ");
1331 print_generic_expr (stderr
, alias
, 0);
1332 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1337 /* Without strict aliasing, it is impossible for a component access
1338 through a pointer to touch a random variable, unless that
1339 variable *is* a structure or a pointer.
1341 That is, given p->c, and some random global variable b,
1342 there is no legal way that p->c could be an access to b.
1344 Without strict aliasing on, we consider it legal to do something
1347 struct foos { int l; };
1349 static struct foos *getfoo(void);
1352 struct foos *f = getfoo();
1359 static struct foos *getfoo(void)
1360 { return (struct foos *)&foo; }
1362 (taken from 20000623-1.c)
1364 The docs also say/imply that access through union pointers
1365 is legal (but *not* if you take the address of the union member,
1366 i.e. the inverse), such that you can do
1376 U *pretmp = (U*)&rv;
1380 To implement this, we just punt on accesses through union
1384 && flag_strict_aliasing
1385 && TREE_CODE (ref
) != INDIRECT_REF
1387 && (TREE_CODE (base
) != INDIRECT_REF
1388 || TREE_CODE (TREE_TYPE (base
)) != UNION_TYPE
)
1389 && !AGGREGATE_TYPE_P (TREE_TYPE (alias
))
1390 && TREE_CODE (TREE_TYPE (alias
)) != COMPLEX_TYPE
1391 && !var_ann (alias
)->is_heapvar
1392 /* When the struct has may_alias attached to it, we need not to
1394 && get_alias_set (base
))
1396 #ifdef ACCESS_DEBUGGING
1397 fprintf (stderr
, "Access to ");
1398 print_generic_expr (stderr
, ref
, 0);
1399 fprintf (stderr
, " may not touch ");
1400 print_generic_expr (stderr
, alias
, 0);
1401 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1406 /* If the offset of the access is greater than the size of one of
1407 the possible aliases, it can't be touching that alias, because it
1408 would be past the end of the structure. */
1410 && flag_strict_aliasing
1411 && TREE_CODE (ref
) != INDIRECT_REF
1413 && !POINTER_TYPE_P (TREE_TYPE (alias
))
1415 && DECL_SIZE (alias
)
1416 && TREE_CODE (DECL_SIZE (alias
)) == INTEGER_CST
1417 && uoffset
> TREE_INT_CST_LOW (DECL_SIZE (alias
)))
1419 #ifdef ACCESS_DEBUGGING
1420 fprintf (stderr
, "Access to ");
1421 print_generic_expr (stderr
, ref
, 0);
1422 fprintf (stderr
, " may not touch ");
1423 print_generic_expr (stderr
, alias
, 0);
1424 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1433 /* Add VAR to the virtual operands array. FLAGS is as in
1434 get_expr_operands. FULL_REF is a tree that contains the entire
1435 pointer dereference expression, if available, or NULL otherwise.
1436 OFFSET and SIZE come from the memory access expression that
1437 generated this virtual operand. IS_CALL_SITE is true if the
1438 affected statement is a call site. */
1441 add_virtual_operand (tree var
, stmt_ann_t s_ann
, int flags
,
1442 tree full_ref
, HOST_WIDE_INT offset
,
1443 HOST_WIDE_INT size
, bool is_call_site
)
1445 bitmap aliases
= NULL
;
1449 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1450 v_ann
= var_ann (sym
);
1452 /* Mark the statement as having memory operands. */
1453 s_ann
->references_memory
= true;
1455 /* Mark statements with volatile operands. Optimizers should back
1456 off from statements having volatile operands. */
1457 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1458 s_ann
->has_volatile_ops
= true;
1460 /* If the variable cannot be modified and this is a VDEF change
1461 it into a VUSE. This happens when read-only variables are marked
1462 call-clobbered and/or aliased to writable variables. So we only
1463 check that this only happens on non-specific stores.
1465 Note that if this is a specific store, i.e. associated with a
1466 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
1467 into validation problems.
1469 This can happen when programs cast away const, leaving us with a
1470 store to read-only memory. If the statement is actually executed
1471 at runtime, then the program is ill formed. If the statement is
1472 not executed then all is well. At the very least, we cannot ICE. */
1473 if ((flags
& opf_implicit
) && unmodifiable_var_p (var
))
1476 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1477 virtual operands, unless the caller has specifically requested
1478 not to add virtual operands (used when adding operands inside an
1479 ADDR_EXPR expression). */
1480 if (flags
& opf_no_vops
)
1484 aliases
= MTAG_ALIASES (var
);
1486 if (aliases
== NULL
)
1488 if (s_ann
&& !gimple_aliases_computed_p (cfun
))
1489 s_ann
->has_volatile_ops
= true;
1491 /* The variable is not aliased or it is an alias tag. */
1492 if (flags
& opf_def
)
1503 /* The variable is aliased. Add its aliases to the virtual
1505 gcc_assert (!bitmap_empty_p (aliases
));
1507 if (flags
& opf_def
)
1509 bool none_added
= true;
1510 EXECUTE_IF_SET_IN_BITMAP (aliases
, 0, i
, bi
)
1512 al
= referenced_var (i
);
1513 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1516 /* Call-clobbered tags may have non-call-clobbered
1517 symbols in their alias sets. Ignore them if we are
1518 adding VOPs for a call site. */
1519 if (is_call_site
&& !is_call_clobbered (al
))
1526 /* If the variable is also an alias tag, add a virtual
1527 operand for it, otherwise we will miss representing
1528 references to the members of the variable's alias set.
1529 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1531 It is also necessary to add bare defs on clobbers for
1532 SMT's, so that bare SMT uses caused by pruning all the
1533 aliases will link up properly with calls. In order to
1534 keep the number of these bare defs we add down to the
1535 minimum necessary, we keep track of which SMT's were used
1536 alone in statement vdefs or VUSEs. */
1538 || (TREE_CODE (var
) == SYMBOL_MEMORY_TAG
1546 bool none_added
= true;
1547 EXECUTE_IF_SET_IN_BITMAP (aliases
, 0, i
, bi
)
1549 al
= referenced_var (i
);
1550 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1553 /* Call-clobbered tags may have non-call-clobbered
1554 symbols in their alias sets. Ignore them if we are
1555 adding VOPs for a call site. */
1556 if (is_call_site
&& !is_call_clobbered (al
))
1563 /* Even if no aliases have been added, we still need to
1564 establish def-use and use-def chains, lest
1565 transformations think that this is not a memory
1566 reference. For an example of this scenario, see
1567 testsuite/g++.dg/opt/cleanup1.C. */
1575 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1576 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1577 the statement's real operands, otherwise it is added to virtual
1581 add_stmt_operand (tree
*var_p
, stmt_ann_t s_ann
, int flags
)
1586 gcc_assert (SSA_VAR_P (*var_p
) && s_ann
);
1589 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1590 v_ann
= var_ann (sym
);
1592 /* Mark statements with volatile operands. */
1593 if (TREE_THIS_VOLATILE (sym
))
1594 s_ann
->has_volatile_ops
= true;
1596 if (is_gimple_reg (sym
))
1598 /* The variable is a GIMPLE register. Add it to real operands. */
1599 if (flags
& opf_def
)
1605 add_virtual_operand (var
, s_ann
, flags
, NULL_TREE
, 0, -1, false);
1609 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1610 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1612 STMT is the statement being processed, EXPR is the INDIRECT_REF
1615 FLAGS is as in get_expr_operands.
1617 FULL_REF contains the full pointer dereference expression, if we
1618 have it, or NULL otherwise.
1620 OFFSET and SIZE are the location of the access inside the
1621 dereferenced pointer, if known.
1623 RECURSE_ON_BASE should be set to true if we want to continue
1624 calling get_expr_operands on the base pointer, and false if
1625 something else will do it for us. */
1628 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
,
1630 HOST_WIDE_INT offset
, HOST_WIDE_INT size
,
1631 bool recurse_on_base
)
1633 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1635 stmt_ann_t s_ann
= stmt_ann (stmt
);
1637 s_ann
->references_memory
= true;
1638 if (s_ann
&& TREE_THIS_VOLATILE (expr
))
1639 s_ann
->has_volatile_ops
= true;
1641 if (SSA_VAR_P (ptr
))
1643 struct ptr_info_def
*pi
= NULL
;
1645 /* If PTR has flow-sensitive points-to information, use it. */
1646 if (TREE_CODE (ptr
) == SSA_NAME
1647 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1648 && pi
->name_mem_tag
)
1650 /* PTR has its own memory tag. Use it. */
1651 add_virtual_operand (pi
->name_mem_tag
, s_ann
, flags
,
1652 full_ref
, offset
, size
, false);
1656 /* If PTR is not an SSA_NAME or it doesn't have a name
1657 tag, use its symbol memory tag. */
1660 /* If we are emitting debugging dumps, display a warning if
1661 PTR is an SSA_NAME with no flow-sensitive alias
1662 information. That means that we may need to compute
1665 && TREE_CODE (ptr
) == SSA_NAME
1669 "NOTE: no flow-sensitive alias info for ");
1670 print_generic_expr (dump_file
, ptr
, dump_flags
);
1671 fprintf (dump_file
, " in ");
1672 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1675 if (TREE_CODE (ptr
) == SSA_NAME
)
1676 ptr
= SSA_NAME_VAR (ptr
);
1677 v_ann
= var_ann (ptr
);
1679 if (v_ann
->symbol_mem_tag
)
1680 add_virtual_operand (v_ann
->symbol_mem_tag
, s_ann
, flags
,
1681 full_ref
, offset
, size
, false);
1683 /* Aliasing information is missing; mark statement as
1684 volatile so we won't optimize it out too actively. */
1686 && !gimple_aliases_computed_p (cfun
)
1687 && (flags
& opf_def
))
1688 s_ann
->has_volatile_ops
= true;
1691 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1693 /* If a constant is used as a pointer, we can't generate a real
1694 operand for it but we mark the statement volatile to prevent
1695 optimizations from messing things up. */
1697 s_ann
->has_volatile_ops
= true;
1702 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1706 /* If requested, add a USE operand for the base pointer. */
1707 if (recurse_on_base
)
1708 get_expr_operands (stmt
, pptr
, opf_use
);
1712 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1715 get_tmr_operands (tree stmt
, tree expr
, int flags
)
1718 HOST_WIDE_INT offset
, size
, maxsize
;
1720 stmt_ann_t s_ann
= stmt_ann (stmt
);
1722 /* This statement references memory. */
1723 s_ann
->references_memory
= 1;
1725 /* First record the real operands. */
1726 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_use
);
1727 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_use
);
1729 if (TMR_SYMBOL (expr
))
1730 add_to_addressable_set (TMR_SYMBOL (expr
), &s_ann
->addresses_taken
);
1732 tag
= TMR_TAG (expr
);
1735 /* Something weird, so ensure that we will be careful. */
1736 s_ann
->has_volatile_ops
= true;
1742 get_expr_operands (stmt
, &tag
, flags
);
1746 ref
= get_ref_base_and_extent (tag
, &offset
, &size
, &maxsize
);
1747 gcc_assert (ref
!= NULL_TREE
);
1748 svars
= get_subvars_for_var (ref
);
1749 for (sv
= svars
; sv
; sv
= sv
->next
)
1753 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
1754 add_stmt_operand (&sv
->var
, s_ann
, flags
);
1759 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1760 clobbered variables in the function. */
1763 add_call_clobber_ops (tree stmt
, tree callee
)
1767 stmt_ann_t s_ann
= stmt_ann (stmt
);
1768 bitmap not_read_b
, not_written_b
;
1770 /* Functions that are not const, pure or never return may clobber
1771 call-clobbered variables. */
1773 s_ann
->makes_clobbering_call
= true;
1775 /* If we created .GLOBAL_VAR earlier, just use it. */
1776 if (gimple_global_var (cfun
))
1778 tree var
= gimple_global_var (cfun
);
1779 add_virtual_operand (var
, s_ann
, opf_def
, NULL
, 0, -1, true);
1783 /* Get info for local and module level statics. There is a bit
1784 set for each static if the call being processed does not read
1785 or write that variable. */
1786 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1787 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1789 /* Add a VDEF operand for every call clobbered variable. */
1790 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1792 tree var
= referenced_var_lookup (u
);
1793 unsigned int escape_mask
= var_ann (var
)->escape_mask
;
1794 tree real_var
= var
;
1798 /* Not read and not written are computed on regular vars, not
1799 subvars, so look at the parent var if this is an SFT. */
1800 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1801 real_var
= SFT_PARENT_VAR (var
);
1803 not_read
= not_read_b
1804 ? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1807 not_written
= not_written_b
1808 ? bitmap_bit_p (not_written_b
, DECL_UID (real_var
))
1810 gcc_assert (!unmodifiable_var_p (var
));
1812 clobber_stats
.clobbered_vars
++;
1814 /* See if this variable is really clobbered by this function. */
1816 /* Trivial case: Things escaping only to pure/const are not
1817 clobbered by non-pure-const, and only read by pure/const. */
1818 if ((escape_mask
& ~(ESCAPE_TO_PURE_CONST
)) == 0)
1820 tree call
= get_call_expr_in (stmt
);
1821 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1823 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1824 clobber_stats
.unescapable_clobbers_avoided
++;
1829 clobber_stats
.unescapable_clobbers_avoided
++;
1836 clobber_stats
.static_write_clobbers_avoided
++;
1838 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1840 clobber_stats
.static_read_clobbers_avoided
++;
1843 add_virtual_operand (var
, s_ann
, opf_def
, NULL
, 0, -1, true);
1848 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1852 add_call_read_ops (tree stmt
, tree callee
)
1856 stmt_ann_t s_ann
= stmt_ann (stmt
);
1859 /* if the function is not pure, it may reference memory. Add
1860 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1861 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1862 if (gimple_global_var (cfun
))
1864 tree var
= gimple_global_var (cfun
);
1865 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1869 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1871 /* Add a VUSE for each call-clobbered variable. */
1872 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1874 tree var
= referenced_var (u
);
1875 tree real_var
= var
;
1878 clobber_stats
.readonly_clobbers
++;
1880 /* Not read and not written are computed on regular vars, not
1881 subvars, so look at the parent var if this is an SFT. */
1883 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1884 real_var
= SFT_PARENT_VAR (var
);
1886 not_read
= not_read_b
? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1891 clobber_stats
.static_readonly_clobbers_avoided
++;
1895 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1900 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1903 get_call_expr_operands (tree stmt
, tree expr
)
1905 int call_flags
= call_expr_flags (expr
);
1907 stmt_ann_t ann
= stmt_ann (stmt
);
1909 ann
->references_memory
= true;
1911 /* If aliases have been computed already, add VDEF or VUSE
1912 operands for all the symbols that have been found to be
1914 if (gimple_aliases_computed_p (cfun
)
1915 && !(call_flags
& ECF_NOVOPS
))
1917 /* A 'pure' or a 'const' function never call-clobbers anything.
1918 A 'noreturn' function might, but since we don't return anyway
1919 there is no point in recording that. */
1920 if (TREE_SIDE_EFFECTS (expr
)
1921 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1922 add_call_clobber_ops (stmt
, get_callee_fndecl (expr
));
1923 else if (!(call_flags
& ECF_CONST
))
1924 add_call_read_ops (stmt
, get_callee_fndecl (expr
));
1927 /* Find uses in the called function. */
1928 get_expr_operands (stmt
, &CALL_EXPR_FN (expr
), opf_use
);
1929 nargs
= call_expr_nargs (expr
);
1930 for (i
= 0; i
< nargs
; i
++)
1931 get_expr_operands (stmt
, &CALL_EXPR_ARG (expr
, i
), opf_use
);
1933 get_expr_operands (stmt
, &CALL_EXPR_STATIC_CHAIN (expr
), opf_use
);
1937 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1940 get_asm_expr_operands (tree stmt
)
1944 const char **oconstraints
;
1945 const char *constraint
;
1946 bool allows_mem
, allows_reg
, is_inout
;
1949 s_ann
= stmt_ann (stmt
);
1950 noutputs
= list_length (ASM_OUTPUTS (stmt
));
1951 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
1953 /* Gather all output operands. */
1954 for (i
= 0, link
= ASM_OUTPUTS (stmt
); link
; i
++, link
= TREE_CHAIN (link
))
1956 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1957 oconstraints
[i
] = constraint
;
1958 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
1959 &allows_reg
, &is_inout
);
1961 /* This should have been split in gimplify_asm_expr. */
1962 gcc_assert (!allows_reg
|| !is_inout
);
1964 /* Memory operands are addressable. Note that STMT needs the
1965 address of this operand. */
1966 if (!allows_reg
&& allows_mem
)
1968 tree t
= get_base_address (TREE_VALUE (link
));
1969 if (t
&& DECL_P (t
) && s_ann
)
1970 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1973 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_def
);
1976 /* Gather all input operands. */
1977 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1979 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1980 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
1981 &allows_mem
, &allows_reg
);
1983 /* Memory operands are addressable. Note that STMT needs the
1984 address of this operand. */
1985 if (!allows_reg
&& allows_mem
)
1987 tree t
= get_base_address (TREE_VALUE (link
));
1988 if (t
&& DECL_P (t
) && s_ann
)
1989 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1992 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1995 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1996 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1997 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
2002 s_ann
->references_memory
= true;
2004 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, i
, bi
)
2006 tree var
= referenced_var (i
);
2007 add_stmt_operand (&var
, s_ann
, opf_def
| opf_implicit
);
2010 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun
), 0, i
, bi
)
2012 tree var
= referenced_var (i
);
2014 /* Subvars are explicitly represented in this list, so we
2015 don't need the original to be added to the clobber ops,
2016 but the original *will* be in this list because we keep
2017 the addressability of the original variable up-to-date
2018 to avoid confusing the back-end. */
2019 if (var_can_have_subvars (var
)
2020 && get_subvars_for_var (var
) != NULL
)
2023 add_stmt_operand (&var
, s_ann
, opf_def
| opf_implicit
);
2030 /* Scan operands for the assignment expression EXPR in statement STMT. */
2033 get_modify_stmt_operands (tree stmt
, tree expr
)
2035 /* First get operands from the RHS. */
2036 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 1), opf_use
);
2038 /* For the LHS, use a regular definition (opf_def) for GIMPLE
2039 registers. If the LHS is a store to memory, we will need
2040 a preserving definition (VDEF).
2042 Preserving definitions are those that modify a part of an
2043 aggregate object for which no subvars have been computed (or the
2044 reference does not correspond exactly to one of them). Stores
2045 through a pointer are also represented with VDEF operators.
2047 We used to distinguish between preserving and killing definitions.
2048 We always emit preserving definitions now. */
2049 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 0), opf_def
);
2053 /* Recursively scan the expression pointed to by EXPR_P in statement
2054 STMT. FLAGS is one of the OPF_* constants modifying how to
2055 interpret the operands found. */
2058 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
2060 enum tree_code code
;
2061 enum tree_code_class
class;
2062 tree expr
= *expr_p
;
2063 stmt_ann_t s_ann
= stmt_ann (stmt
);
2068 code
= TREE_CODE (expr
);
2069 class = TREE_CODE_CLASS (code
);
2074 /* Taking the address of a variable does not represent a
2075 reference to it, but the fact that the statement takes its
2076 address will be of interest to some passes (e.g. alias
2078 add_to_addressable_set (TREE_OPERAND (expr
, 0), &s_ann
->addresses_taken
);
2080 /* If the address is invariant, there may be no interesting
2081 variable references inside. */
2082 if (is_gimple_min_invariant (expr
))
2085 /* Otherwise, there may be variables referenced inside but there
2086 should be no VUSEs created, since the referenced objects are
2087 not really accessed. The only operands that we should find
2088 here are ARRAY_REF indices which will always be real operands
2089 (GIMPLE does not allow non-registers as array indices). */
2090 flags
|= opf_no_vops
;
2091 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2095 case STRUCT_FIELD_TAG
:
2096 case SYMBOL_MEMORY_TAG
:
2097 case NAME_MEMORY_TAG
:
2098 add_stmt_operand (expr_p
, s_ann
, flags
);
2107 /* Add the subvars for a variable, if it has subvars, to DEFS
2108 or USES. Otherwise, add the variable itself. Whether it
2109 goes to USES or DEFS depends on the operand flags. */
2110 if (var_can_have_subvars (expr
)
2111 && (svars
= get_subvars_for_var (expr
)))
2114 for (sv
= svars
; sv
; sv
= sv
->next
)
2115 add_stmt_operand (&sv
->var
, s_ann
, flags
);
2118 add_stmt_operand (expr_p
, s_ann
, flags
);
2123 case MISALIGNED_INDIRECT_REF
:
2124 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2127 case ALIGN_INDIRECT_REF
:
2129 get_indirect_ref_operands (stmt
, expr
, flags
, NULL_TREE
, 0, -1, true);
2132 case TARGET_MEM_REF
:
2133 get_tmr_operands (stmt
, expr
, flags
);
2137 case ARRAY_RANGE_REF
:
2143 HOST_WIDE_INT offset
, size
, maxsize
;
2146 /* This component reference becomes an access to all of the
2147 subvariables it can touch, if we can determine that, but
2148 *NOT* the real one. If we can't determine which fields we
2149 could touch, the recursion will eventually get to a
2150 variable and add *all* of its subvars, or whatever is the
2151 minimum correct subset. */
2152 ref
= get_ref_base_and_extent (expr
, &offset
, &size
, &maxsize
);
2153 if (SSA_VAR_P (ref
) && get_subvars_for_var (ref
))
2156 subvar_t svars
= get_subvars_for_var (ref
);
2158 for (sv
= svars
; sv
; sv
= sv
->next
)
2162 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
2164 int subvar_flags
= flags
;
2166 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
2171 flags
|= opf_no_vops
;
2173 else if (TREE_CODE (ref
) == INDIRECT_REF
)
2175 get_indirect_ref_operands (stmt
, ref
, flags
, expr
, offset
,
2177 flags
|= opf_no_vops
;
2180 /* Even if we found subvars above we need to ensure to see
2181 immediate uses for d in s.a[d]. In case of s.a having
2182 a subvar or we would miss it otherwise. */
2183 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2185 if (code
== COMPONENT_REF
)
2187 if (s_ann
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
2188 s_ann
->has_volatile_ops
= true;
2189 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2191 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
2193 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2194 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2195 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_use
);
2201 case WITH_SIZE_EXPR
:
2202 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2203 and an rvalue reference to its second argument. */
2204 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2205 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2209 get_call_expr_operands (stmt
, expr
);
2214 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_use
);
2215 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2216 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2219 case GIMPLE_MODIFY_STMT
:
2220 get_modify_stmt_operands (stmt
, expr
);
2225 /* General aggregate CONSTRUCTORs have been decomposed, but they
2226 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2227 constructor_elt
*ce
;
2228 unsigned HOST_WIDE_INT idx
;
2231 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
2233 get_expr_operands (stmt
, &ce
->value
, opf_use
);
2239 case TRUTH_NOT_EXPR
:
2240 case VIEW_CONVERT_EXPR
:
2242 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2245 case TRUTH_AND_EXPR
:
2247 case TRUTH_XOR_EXPR
:
2253 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2254 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2259 case REALIGN_LOAD_EXPR
:
2261 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2262 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2263 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
2282 /* Expressions that make no memory references. */
2286 if (class == tcc_unary
)
2288 if (class == tcc_binary
|| class == tcc_comparison
)
2290 if (class == tcc_constant
|| class == tcc_type
)
2294 /* If we get here, something has gone wrong. */
2295 #ifdef ENABLE_CHECKING
2296 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
2298 fputs ("\n", stderr
);
2304 /* Parse STMT looking for operands. When finished, the various
2305 build_* operand vectors will have potential operands in them. */
2308 parse_ssa_operands (tree stmt
)
2310 enum tree_code code
;
2312 code
= TREE_CODE (stmt
);
2315 case GIMPLE_MODIFY_STMT
:
2316 get_modify_stmt_operands (stmt
, stmt
);
2320 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_use
);
2324 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_use
);
2328 get_asm_expr_operands (stmt
);
2332 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_use
);
2336 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_use
);
2340 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_use
);
2344 case CASE_LABEL_EXPR
:
2345 case TRY_CATCH_EXPR
:
2346 case TRY_FINALLY_EXPR
:
2347 case EH_FILTER_EXPR
:
2350 /* These nodes contain no variable references. */
2354 /* Notice that if get_expr_operands tries to use &STMT as the
2355 operand pointer (which may only happen for USE operands), we
2356 will fail in add_stmt_operand. This default will handle
2357 statements like empty statements, or CALL_EXPRs that may
2358 appear on the RHS of a statement or as statements themselves. */
2359 get_expr_operands (stmt
, &stmt
, opf_use
);
2365 /* Create an operands cache for STMT. */
2368 build_ssa_operands (tree stmt
)
2370 stmt_ann_t ann
= get_stmt_ann (stmt
);
2372 /* Initially assume that the statement has no volatile operands and
2373 makes no memory references. */
2374 ann
->has_volatile_ops
= false;
2375 ann
->references_memory
= false;
2376 /* Just clear the bitmap so we don't end up reallocating it over and over. */
2377 if (ann
->addresses_taken
)
2378 bitmap_clear (ann
->addresses_taken
);
2380 start_ssa_stmt_operands ();
2381 parse_ssa_operands (stmt
);
2382 operand_build_sort_virtual (build_vuses
);
2383 operand_build_sort_virtual (build_vdefs
);
2384 finalize_ssa_stmt_operands (stmt
);
2386 if (ann
->addresses_taken
&& bitmap_empty_p (ann
->addresses_taken
))
2387 ann
->addresses_taken
= NULL
;
2388 /* For added safety, assume that statements with volatile operands
2389 also reference memory. */
2390 if (ann
->has_volatile_ops
)
2391 ann
->references_memory
= true;
2395 /* Free any operands vectors in OPS. */
2398 free_ssa_operands (stmt_operands_p ops
)
2400 ops
->def_ops
= NULL
;
2401 ops
->use_ops
= NULL
;
2402 ops
->vdef_ops
= NULL
;
2403 ops
->vuse_ops
= NULL
;
2404 BITMAP_FREE (ops
->loads
);
2405 BITMAP_FREE (ops
->stores
);
2409 /* Get the operands of statement STMT. */
2412 update_stmt_operands (tree stmt
)
2414 stmt_ann_t ann
= get_stmt_ann (stmt
);
2416 /* If update_stmt_operands is called before SSA is initialized, do
2418 if (!ssa_operands_active ())
2421 /* The optimizers cannot handle statements that are nothing but a
2422 _DECL. This indicates a bug in the gimplifier. */
2423 gcc_assert (!SSA_VAR_P (stmt
));
2425 timevar_push (TV_TREE_OPS
);
2427 gcc_assert (ann
->modified
);
2428 build_ssa_operands (stmt
);
2431 timevar_pop (TV_TREE_OPS
);
2435 /* Copies virtual operands from SRC to DST. */
2438 copy_virtual_operands (tree dest
, tree src
)
2441 voptype_p src_vuses
, dest_vuses
;
2442 voptype_p src_vdefs
, dest_vdefs
;
2443 struct voptype_d vuse
;
2444 struct voptype_d vdef
;
2445 stmt_ann_t dest_ann
;
2447 VDEF_OPS (dest
) = NULL
;
2448 VUSE_OPS (dest
) = NULL
;
2450 dest_ann
= get_stmt_ann (dest
);
2451 BITMAP_FREE (dest_ann
->operands
.loads
);
2452 BITMAP_FREE (dest_ann
->operands
.stores
);
2454 if (LOADED_SYMS (src
))
2456 dest_ann
->operands
.loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
2457 bitmap_copy (dest_ann
->operands
.loads
, LOADED_SYMS (src
));
2460 if (STORED_SYMS (src
))
2462 dest_ann
->operands
.stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
2463 bitmap_copy (dest_ann
->operands
.stores
, STORED_SYMS (src
));
2466 /* Copy all the VUSE operators and corresponding operands. */
2468 for (src_vuses
= VUSE_OPS (src
); src_vuses
; src_vuses
= src_vuses
->next
)
2470 n
= VUSE_NUM (src_vuses
);
2471 dest_vuses
= add_vuse_op (dest
, NULL_TREE
, n
, dest_vuses
);
2472 for (i
= 0; i
< n
; i
++)
2473 SET_USE (VUSE_OP_PTR (dest_vuses
, i
), VUSE_OP (src_vuses
, i
));
2475 if (VUSE_OPS (dest
) == NULL
)
2476 VUSE_OPS (dest
) = vuse
.next
;
2479 /* Copy all the VDEF operators and corresponding operands. */
2481 for (src_vdefs
= VDEF_OPS (src
); src_vdefs
; src_vdefs
= src_vdefs
->next
)
2483 n
= VUSE_NUM (src_vdefs
);
2484 dest_vdefs
= add_vdef_op (dest
, NULL_TREE
, n
, dest_vdefs
);
2485 VDEF_RESULT (dest_vdefs
) = VDEF_RESULT (src_vdefs
);
2486 for (i
= 0; i
< n
; i
++)
2487 SET_USE (VUSE_OP_PTR (dest_vdefs
, i
), VUSE_OP (src_vdefs
, i
));
2489 if (VDEF_OPS (dest
) == NULL
)
2490 VDEF_OPS (dest
) = vdef
.next
;
2495 /* Specifically for use in DOM's expression analysis. Given a store, we
2496 create an artificial stmt which looks like a load from the store, this can
2497 be used to eliminate redundant loads. OLD_OPS are the operands from the
2498 store stmt, and NEW_STMT is the new load which represents a load of the
2502 create_ssa_artificial_load_stmt (tree new_stmt
, tree old_stmt
)
2506 use_operand_p use_p
;
2509 get_stmt_ann (new_stmt
);
2511 /* Process NEW_STMT looking for operands. */
2512 start_ssa_stmt_operands ();
2513 parse_ssa_operands (new_stmt
);
2515 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, op
); i
++)
2516 if (TREE_CODE (op
) != SSA_NAME
)
2517 var_ann (op
)->in_vuse_list
= false;
2519 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, op
); i
++)
2520 if (TREE_CODE (op
) != SSA_NAME
)
2521 var_ann (op
)->in_vdef_list
= false;
2523 /* Remove any virtual operands that were found. */
2524 VEC_truncate (tree
, build_vdefs
, 0);
2525 VEC_truncate (tree
, build_vuses
, 0);
2527 /* For each VDEF on the original statement, we want to create a
2528 VUSE of the VDEF result operand on the new statement. */
2529 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
, SSA_OP_VDEF
)
2532 finalize_ssa_stmt_operands (new_stmt
);
2534 /* All uses in this fake stmt must not be in the immediate use lists. */
2535 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
2536 delink_imm_use (use_p
);
2540 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2541 to test the validity of the swap operation. */
2544 swap_tree_operands (tree stmt
, tree
*exp0
, tree
*exp1
)
2550 /* If the operand cache is active, attempt to preserve the relative
2551 positions of these two operands in their respective immediate use
2553 if (ssa_operands_active () && op0
!= op1
)
2555 use_optype_p use0
, use1
, ptr
;
2558 /* Find the 2 operands in the cache, if they are there. */
2559 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2560 if (USE_OP_PTR (ptr
)->use
== exp0
)
2566 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2567 if (USE_OP_PTR (ptr
)->use
== exp1
)
2573 /* If both uses don't have operand entries, there isn't much we can do
2574 at this point. Presumably we don't need to worry about it. */
2577 tree
*tmp
= USE_OP_PTR (use1
)->use
;
2578 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
2579 USE_OP_PTR (use0
)->use
= tmp
;
2583 /* Now swap the data. */
2589 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2590 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2591 a single variable whose address has been taken or any other valid
2592 GIMPLE memory reference (structure reference, array, etc). If the
2593 base address of REF is a decl that has sub-variables, also add all
2594 of its sub-variables. */
2597 add_to_addressable_set (tree ref
, bitmap
*addresses_taken
)
2602 gcc_assert (addresses_taken
);
2604 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2605 as the only thing we take the address of. If VAR is a structure,
2606 taking the address of a field means that the whole structure may
2607 be referenced using pointer arithmetic. See PR 21407 and the
2608 ensuing mailing list discussion. */
2609 var
= get_base_address (ref
);
2610 if (var
&& SSA_VAR_P (var
))
2612 if (*addresses_taken
== NULL
)
2613 *addresses_taken
= BITMAP_GGC_ALLOC ();
2615 if (var_can_have_subvars (var
)
2616 && (svars
= get_subvars_for_var (var
)))
2619 for (sv
= svars
; sv
; sv
= sv
->next
)
2621 bitmap_set_bit (*addresses_taken
, DECL_UID (sv
->var
));
2622 TREE_ADDRESSABLE (sv
->var
) = 1;
2627 bitmap_set_bit (*addresses_taken
, DECL_UID (var
));
2628 TREE_ADDRESSABLE (var
) = 1;
2634 /* Scan the immediate_use list for VAR making sure its linked properly.
2635 Return TRUE if there is a problem and emit an error message to F. */
2638 verify_imm_links (FILE *f
, tree var
)
2640 use_operand_p ptr
, prev
, list
;
2643 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
2645 list
= &(SSA_NAME_IMM_USE_NODE (var
));
2646 gcc_assert (list
->use
== NULL
);
2648 if (list
->prev
== NULL
)
2650 gcc_assert (list
->next
== NULL
);
2656 for (ptr
= list
->next
; ptr
!= list
; )
2658 if (prev
!= ptr
->prev
)
2661 if (ptr
->use
== NULL
)
2662 goto error
; /* 2 roots, or SAFE guard node. */
2663 else if (*(ptr
->use
) != var
)
2669 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2671 if (count
++ > 50000000)
2675 /* Verify list in the other direction. */
2677 for (ptr
= list
->prev
; ptr
!= list
; )
2679 if (prev
!= ptr
->next
)
2693 if (ptr
->stmt
&& stmt_modified_p (ptr
->stmt
))
2695 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->stmt
);
2696 print_generic_stmt (f
, ptr
->stmt
, TDF_SLIM
);
2698 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
2700 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
2706 /* Dump all the immediate uses to FILE. */
2709 dump_immediate_uses_for (FILE *file
, tree var
)
2711 imm_use_iterator iter
;
2712 use_operand_p use_p
;
2714 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2716 print_generic_expr (file
, var
, TDF_SLIM
);
2717 fprintf (file
, " : -->");
2718 if (has_zero_uses (var
))
2719 fprintf (file
, " no uses.\n");
2721 if (has_single_use (var
))
2722 fprintf (file
, " single use.\n");
2724 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2726 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2728 if (use_p
->stmt
== NULL
&& use_p
->use
== NULL
)
2729 fprintf (file
, "***end of stmt iterator marker***\n");
2731 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2732 print_generic_stmt (file
, USE_STMT (use_p
), TDF_VOPS
|TDF_MEMSYMS
);
2734 print_generic_stmt (file
, USE_STMT (use_p
), TDF_SLIM
);
2736 fprintf(file
, "\n");
2740 /* Dump all the immediate uses to FILE. */
2743 dump_immediate_uses (FILE *file
)
2748 fprintf (file
, "Immediate_uses: \n\n");
2749 for (x
= 1; x
< num_ssa_names
; x
++)
2754 dump_immediate_uses_for (file
, var
);
2759 /* Dump def-use edges on stderr. */
2762 debug_immediate_uses (void)
2764 dump_immediate_uses (stderr
);
2768 /* Dump def-use edges on stderr. */
2771 debug_immediate_uses_for (tree var
)
2773 dump_immediate_uses_for (stderr
, var
);
2777 /* Create a new change buffer for the statement pointed by STMT_P and
2778 push the buffer into SCB_STACK. Each change buffer
2779 records state information needed to determine what changed in the
2780 statement. Mainly, this keeps track of symbols that may need to be
2781 put into SSA form, SSA name replacements and other information
2782 needed to keep the SSA form up to date. */
2785 push_stmt_changes (tree
*stmt_p
)
2792 /* It makes no sense to keep track of PHI nodes. */
2793 if (TREE_CODE (stmt
) == PHI_NODE
)
2796 buf
= xmalloc (sizeof *buf
);
2797 memset (buf
, 0, sizeof *buf
);
2799 buf
->stmt_p
= stmt_p
;
2801 if (stmt_references_memory_p (stmt
))
2806 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2808 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2809 if (buf
->loads
== NULL
)
2810 buf
->loads
= BITMAP_ALLOC (NULL
);
2811 bitmap_set_bit (buf
->loads
, DECL_UID (sym
));
2814 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2816 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2817 if (buf
->stores
== NULL
)
2818 buf
->stores
= BITMAP_ALLOC (NULL
);
2819 bitmap_set_bit (buf
->stores
, DECL_UID (sym
));
2823 VEC_safe_push (scb_t
, heap
, scb_stack
, buf
);
2827 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2828 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2831 mark_difference_for_renaming (bitmap s1
, bitmap s2
)
2833 if (s1
== NULL
&& s2
== NULL
)
2836 if (s1
&& s2
== NULL
)
2837 mark_set_for_renaming (s1
);
2838 else if (s1
== NULL
&& s2
)
2839 mark_set_for_renaming (s2
);
2840 else if (!bitmap_equal_p (s1
, s2
))
2842 bitmap t1
= BITMAP_ALLOC (NULL
);
2843 bitmap t2
= BITMAP_ALLOC (NULL
);
2845 bitmap_and_compl (t1
, s1
, s2
);
2846 bitmap_and_compl (t2
, s2
, s1
);
2847 bitmap_ior_into (t1
, t2
);
2848 mark_set_for_renaming (t1
);
2856 /* Pop the top SCB from SCB_STACK and act on the differences between
2857 what was recorded by push_stmt_changes and the current state of
2861 pop_stmt_changes (tree
*stmt_p
)
2865 bitmap loads
, stores
;
2870 /* It makes no sense to keep track of PHI nodes. */
2871 if (TREE_CODE (stmt
) == PHI_NODE
)
2874 buf
= VEC_pop (scb_t
, scb_stack
);
2875 gcc_assert (stmt_p
== buf
->stmt_p
);
2877 /* Force an operand re-scan on the statement and mark any newly
2878 exposed variables. */
2881 /* Determine whether any memory symbols need to be renamed. If the
2882 sets of loads and stores are different after the statement is
2883 modified, then the affected symbols need to be renamed.
2885 Note that it may be possible for the statement to not reference
2886 memory anymore, but we still need to act on the differences in
2887 the sets of symbols. */
2888 loads
= stores
= NULL
;
2889 if (stmt_references_memory_p (stmt
))
2894 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2896 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2898 loads
= BITMAP_ALLOC (NULL
);
2899 bitmap_set_bit (loads
, DECL_UID (sym
));
2902 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2904 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2906 stores
= BITMAP_ALLOC (NULL
);
2907 bitmap_set_bit (stores
, DECL_UID (sym
));
2911 /* If LOADS is different from BUF->LOADS, the affected
2912 symbols need to be marked for renaming. */
2913 mark_difference_for_renaming (loads
, buf
->loads
);
2915 /* Similarly for STORES and BUF->STORES. */
2916 mark_difference_for_renaming (stores
, buf
->stores
);
2918 /* Mark all the naked GIMPLE register operands for renaming. */
2919 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
|SSA_OP_USE
)
2921 mark_sym_for_renaming (op
);
2923 /* FIXME, need to add more finalizers here. Cleanup EH info,
2924 recompute invariants for address expressions, add
2925 SSA replacement mappings, etc. For instance, given
2926 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2929 # SMT.4_20 = VDEF <SMT.4_16>
2932 So, the VDEF will disappear, but instead of marking SMT.4 for
2933 renaming it would be far more efficient to establish a
2934 replacement mapping that would replace every reference of
2935 SMT.4_20 with SMT.4_16. */
2937 /* Free memory used by the buffer. */
2938 BITMAP_FREE (buf
->loads
);
2939 BITMAP_FREE (buf
->stores
);
2940 BITMAP_FREE (loads
);
2941 BITMAP_FREE (stores
);
2947 /* Discard the topmost change buffer from SCB_STACK. This is useful
2948 when the caller realized that it did not actually modified the
2949 statement. It avoids the expensive operand re-scan. */
2952 discard_stmt_changes (tree
*stmt_p
)
2957 /* It makes no sense to keep track of PHI nodes. */
2959 if (TREE_CODE (stmt
) == PHI_NODE
)
2962 buf
= VEC_pop (scb_t
, scb_stack
);
2963 gcc_assert (stmt_p
== buf
->stmt_p
);
2965 /* Free memory used by the buffer. */
2966 BITMAP_FREE (buf
->loads
);
2967 BITMAP_FREE (buf
->stores
);
2973 /* Returns true if statement STMT may access memory. */
2976 stmt_references_memory_p (tree stmt
)
2978 if (!gimple_ssa_operands (cfun
)->ops_active
|| TREE_CODE (stmt
) == PHI_NODE
)
2981 return stmt_ann (stmt
)->references_memory
;
2985 /* Return the memory partition tag (MPT) associated with memory
2986 symbol SYM. From a correctness standpoint, memory partitions can
2987 be assigned in any arbitrary fashion as long as this rule is
2988 observed: Given two memory partitions MPT.i and MPT.j, they must
2989 not contain symbols in common.
2991 Memory partitions are used when putting the program into Memory-SSA
2992 form. In particular, in Memory-SSA PHI nodes are not computed for
2993 individual memory symbols. They are computed for memory
2994 partitions. This reduces the amount of PHI nodes in the SSA graph
2995 at the expense of precision (i.e., it makes unrelated stores affect
2998 However, it is possible to increase precision by changing this
2999 partitioning scheme. For instance, if the partitioning scheme is
3000 such that get_mpt_for is the identity function (that is,
3001 get_mpt_for (s) = s), this will result in ultimate precision at the
3002 expense of huge SSA webs.
3004 At the other extreme, a partitioning scheme that groups all the
3005 symbols in the same set results in minimal SSA webs and almost
3006 total loss of precision. */
3009 get_mpt_for (tree sym
)
3013 /* Don't create a new tag unnecessarily. */
3014 mpt
= memory_partition (sym
);
3015 if (mpt
== NULL_TREE
)
3017 mpt
= create_tag_raw (MEMORY_PARTITION_TAG
, TREE_TYPE (sym
), "MPT");
3018 TREE_ADDRESSABLE (mpt
) = 0;
3019 MTAG_GLOBAL (mpt
) = 1;
3020 add_referenced_var (mpt
);
3021 VEC_safe_push (tree
, heap
, gimple_ssa_operands (cfun
)->mpt_table
, mpt
);
3022 MPT_SYMBOLS (mpt
) = BITMAP_ALLOC (&operands_bitmap_obstack
);
3023 set_memory_partition (sym
, mpt
);
3030 /* Dump memory partition information to FILE. */
3033 dump_memory_partitions (FILE *file
)
3036 unsigned long nsyms
;
3039 fprintf (file
, "\nMemory partitions\n\n");
3040 for (i
= 0, npart
= 0, nsyms
= 0;
3041 VEC_iterate (tree
, gimple_ssa_operands (cfun
)->mpt_table
, i
, mpt
);
3046 bitmap syms
= MPT_SYMBOLS (mpt
);
3047 unsigned long n
= bitmap_count_bits (syms
);
3049 fprintf (file
, "#%u: ", i
);
3050 print_generic_expr (file
, mpt
, 0);
3051 fprintf (file
, ": %lu elements: ", n
);
3052 dump_decl_set (file
, syms
);
3058 fprintf (file
, "\n%u memory partitions holding %lu symbols\n", npart
, nsyms
);
3062 /* Dump memory partition information to stderr. */
3065 debug_memory_partitions (void)
3067 dump_memory_partitions (stderr
);