1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
75 operand vector for VUSE, then the new vector will also be modified
76 such that it contains 'a_5' rather than 'a'. */
78 /* Helper functions from gimple.c. These are GIMPLE manipulation
79 routines that only the operand scanner should need. */
80 void gimple_set_stored_syms (gimple
, bitmap
, bitmap_obstack
*);
81 void gimple_set_loaded_syms (gimple
, bitmap
, bitmap_obstack
*);
83 /* Structure storing statistics on how many call clobbers we have, and
84 how many where avoided. */
88 /* Number of call-clobbered ops we attempt to add to calls in
89 add_call_clobbered_mem_symbols. */
90 unsigned int clobbered_vars
;
92 /* Number of write-clobbers (VDEFs) avoided by using
93 not_written information. */
94 unsigned int static_write_clobbers_avoided
;
96 /* Number of reads (VUSEs) avoided by using not_read information. */
97 unsigned int static_read_clobbers_avoided
;
99 /* Number of write-clobbers avoided because the variable can't escape to
101 unsigned int unescapable_clobbers_avoided
;
103 /* Number of read-only uses we attempt to add to calls in
104 add_call_read_mem_symbols. */
105 unsigned int readonly_clobbers
;
107 /* Number of read-only uses we avoid using not_read information. */
108 unsigned int static_readonly_clobbers_avoided
;
112 /* Flags to describe operand properties in helpers. */
114 /* By default, operands are loaded. */
117 /* Operand is the target of an assignment expression or a
118 call-clobbered variable. */
119 #define opf_def (1 << 0)
121 /* No virtual operands should be created in the expression. This is used
122 when traversing ADDR_EXPR nodes which have different semantics than
123 other expressions. Inside an ADDR_EXPR node, the only operands that we
124 need to consider are indices into arrays. For instance, &a.b[i] should
125 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
127 #define opf_no_vops (1 << 1)
129 /* Operand is an implicit reference. This is used to distinguish
130 explicit assignments in the form of MODIFY_EXPR from
131 clobbering sites like function calls or ASM_EXPRs. */
132 #define opf_implicit (1 << 2)
134 /* Array for building all the def operands. */
135 static VEC(tree
,heap
) *build_defs
;
137 /* Array for building all the use operands. */
138 static VEC(tree
,heap
) *build_uses
;
140 /* Set for building all the VDEF operands. */
141 static VEC(tree
,heap
) *build_vdefs
;
143 /* Set for building all the VUSE operands. */
144 static VEC(tree
,heap
) *build_vuses
;
146 /* Bitmap obstack for our datastructures that needs to survive across
147 compilations of multiple functions. */
148 static bitmap_obstack operands_bitmap_obstack
;
150 /* Set for building all the loaded symbols. */
151 static bitmap build_loads
;
153 /* Set for building all the stored symbols. */
154 static bitmap build_stores
;
156 static void get_expr_operands (gimple
, tree
*, int);
158 /* Number of functions with initialized ssa_operands. */
159 static int n_initialized
= 0;
161 /* Statement change buffer. Data structure used to record state
162 information for statements. This is used to determine what needs
163 to be done in order to update the SSA web after a statement is
164 modified by a pass. If STMT is a statement that has just been
165 created, or needs to be folded via fold_stmt, or anything that
166 changes its physical structure then the pass should:
168 1- Call push_stmt_changes (&stmt) to record the current state of
169 STMT before any modifications are made.
171 2- Make all appropriate modifications to the statement.
173 3- Call pop_stmt_changes (&stmt) to find new symbols that
174 need to be put in SSA form, SSA name mappings for names that
175 have disappeared, recompute invariantness for address
176 expressions, cleanup EH information, etc.
178 If it is possible to determine that the statement was not modified,
179 instead of calling pop_stmt_changes it is quicker to call
180 discard_stmt_changes to avoid the expensive and unnecessary operand
181 re-scan and change comparison. */
185 /* Pointer to the statement being modified. */
188 /* If the statement references memory these are the sets of symbols
189 loaded and stored by the statement. */
194 typedef struct scb_d
*scb_t
;
196 DEF_VEC_ALLOC_P(scb_t
,heap
);
198 /* Stack of statement change buffers (SCB). Every call to
199 push_stmt_changes pushes a new buffer onto the stack. Calls to
200 pop_stmt_changes pop a buffer off of the stack and compute the set
201 of changes for the popped statement. */
202 static VEC(scb_t
,heap
) *scb_stack
;
204 /* Return the DECL_UID of the base variable of T. */
206 static inline unsigned
207 get_name_decl (const_tree t
)
209 if (TREE_CODE (t
) != SSA_NAME
)
212 return DECL_UID (SSA_NAME_VAR (t
));
216 /* Comparison function for qsort used in operand_build_sort_virtual. */
219 operand_build_cmp (const void *p
, const void *q
)
221 const_tree
const e1
= *((const_tree
const *)p
);
222 const_tree
const e2
= *((const_tree
const *)q
);
223 const unsigned int u1
= get_name_decl (e1
);
224 const unsigned int u2
= get_name_decl (e2
);
226 /* We want to sort in ascending order. They can never be equal. */
227 #ifdef ENABLE_CHECKING
228 gcc_assert (u1
!= u2
);
230 return (u1
> u2
? 1 : -1);
234 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
237 operand_build_sort_virtual (VEC(tree
,heap
) *list
)
239 int num
= VEC_length (tree
, list
);
246 if (get_name_decl (VEC_index (tree
, list
, 0))
247 > get_name_decl (VEC_index (tree
, list
, 1)))
249 /* Swap elements if in the wrong order. */
250 tree tmp
= VEC_index (tree
, list
, 0);
251 VEC_replace (tree
, list
, 0, VEC_index (tree
, list
, 1));
252 VEC_replace (tree
, list
, 1, tmp
);
257 /* There are 3 or more elements, call qsort. */
258 qsort (VEC_address (tree
, list
),
259 VEC_length (tree
, list
),
264 /* Return true if the SSA operands cache is active. */
267 ssa_operands_active (void)
269 /* This function may be invoked from contexts where CFUN is NULL
270 (IPA passes), return false for now. FIXME: operands may be
271 active in each individual function, maybe this function should
272 take CFUN as a parameter. */
276 return cfun
->gimple_df
&& gimple_ssa_operands (cfun
)->ops_active
;
280 /* VOPs are of variable sized, so the free list maps "free buckets" to the
293 Any VOPs larger than this are simply added to the largest bucket when they
297 /* Return the number of operands used in bucket BUCKET. */
300 vop_free_bucket_size (int bucket
)
302 #ifdef ENABLE_CHECKING
303 gcc_assert (bucket
>= 0 && bucket
< NUM_VOP_FREE_BUCKETS
);
307 return (bucket
- 13) * 8;
311 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
312 beyond the end of the bucket table, return -1. */
315 vop_free_bucket_index (int num
)
317 gcc_assert (num
> 0 && NUM_VOP_FREE_BUCKETS
> 16);
319 /* Sizes 1 through 16 use buckets 0-15. */
322 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
323 num
= 14 + (num
- 1) / 8;
324 if (num
>= NUM_VOP_FREE_BUCKETS
)
331 /* Initialize the VOP free buckets. */
334 init_vop_buckets (void)
338 for (x
= 0; x
< NUM_VOP_FREE_BUCKETS
; x
++)
339 gimple_ssa_operands (cfun
)->vop_free_buckets
[x
] = NULL
;
343 /* Add PTR to the appropriate VOP bucket. */
346 add_vop_to_freelist (voptype_p ptr
)
348 int bucket
= vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr
->usev
));
350 /* Too large, use the largest bucket so its not a complete throw away. */
352 bucket
= NUM_VOP_FREE_BUCKETS
- 1;
354 ptr
->next
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
355 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] = ptr
;
359 /* These are the sizes of the operand memory buffer which gets allocated each
360 time more operands space is required. The final value is the amount that is
361 allocated every time after that. */
363 #define OP_SIZE_INIT 0
365 #define OP_SIZE_2 110
366 #define OP_SIZE_3 511
368 /* Initialize the operand cache routines. */
371 init_ssa_operands (void)
373 if (!n_initialized
++)
375 build_defs
= VEC_alloc (tree
, heap
, 5);
376 build_uses
= VEC_alloc (tree
, heap
, 10);
377 build_vuses
= VEC_alloc (tree
, heap
, 25);
378 build_vdefs
= VEC_alloc (tree
, heap
, 25);
379 bitmap_obstack_initialize (&operands_bitmap_obstack
);
380 build_loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
381 build_stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
382 scb_stack
= VEC_alloc (scb_t
, heap
, 20);
385 gcc_assert (gimple_ssa_operands (cfun
)->operand_memory
== NULL
);
386 gcc_assert (gimple_ssa_operands (cfun
)->mpt_table
== NULL
);
387 gimple_ssa_operands (cfun
)->operand_memory_index
388 = gimple_ssa_operands (cfun
)->ssa_operand_mem_size
;
389 gimple_ssa_operands (cfun
)->ops_active
= true;
390 memset (&clobber_stats
, 0, sizeof (clobber_stats
));
392 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_INIT
;
396 /* Dispose of anything required by the operand routines. */
399 fini_ssa_operands (void)
401 struct ssa_operand_memory_d
*ptr
;
405 if (!--n_initialized
)
407 VEC_free (tree
, heap
, build_defs
);
408 VEC_free (tree
, heap
, build_uses
);
409 VEC_free (tree
, heap
, build_vdefs
);
410 VEC_free (tree
, heap
, build_vuses
);
411 BITMAP_FREE (build_loads
);
412 BITMAP_FREE (build_stores
);
414 /* The change buffer stack had better be empty. */
415 gcc_assert (VEC_length (scb_t
, scb_stack
) == 0);
416 VEC_free (scb_t
, heap
, scb_stack
);
420 gimple_ssa_operands (cfun
)->free_defs
= NULL
;
421 gimple_ssa_operands (cfun
)->free_uses
= NULL
;
423 while ((ptr
= gimple_ssa_operands (cfun
)->operand_memory
) != NULL
)
425 gimple_ssa_operands (cfun
)->operand_memory
426 = gimple_ssa_operands (cfun
)->operand_memory
->next
;
431 VEC_iterate (tree
, gimple_ssa_operands (cfun
)->mpt_table
, ix
, mpt
);
435 BITMAP_FREE (MPT_SYMBOLS (mpt
));
438 VEC_free (tree
, heap
, gimple_ssa_operands (cfun
)->mpt_table
);
440 gimple_ssa_operands (cfun
)->ops_active
= false;
443 bitmap_obstack_release (&operands_bitmap_obstack
);
445 if (dump_file
&& (dump_flags
& TDF_STATS
))
447 fprintf (dump_file
, "Original clobbered vars: %d\n",
448 clobber_stats
.clobbered_vars
);
449 fprintf (dump_file
, "Static write clobbers avoided: %d\n",
450 clobber_stats
.static_write_clobbers_avoided
);
451 fprintf (dump_file
, "Static read clobbers avoided: %d\n",
452 clobber_stats
.static_read_clobbers_avoided
);
453 fprintf (dump_file
, "Unescapable clobbers avoided: %d\n",
454 clobber_stats
.unescapable_clobbers_avoided
);
455 fprintf (dump_file
, "Original read-only clobbers: %d\n",
456 clobber_stats
.readonly_clobbers
);
457 fprintf (dump_file
, "Static read-only clobbers avoided: %d\n",
458 clobber_stats
.static_readonly_clobbers_avoided
);
463 /* Return memory for operands of SIZE chunks. */
466 ssa_operand_alloc (unsigned size
)
470 if (gimple_ssa_operands (cfun
)->operand_memory_index
+ size
471 >= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
473 struct ssa_operand_memory_d
*ptr
;
475 if (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
== OP_SIZE_INIT
)
476 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
477 = OP_SIZE_1
* sizeof (struct voptype_d
);
479 if (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
480 == OP_SIZE_1
* sizeof (struct voptype_d
))
481 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
482 = OP_SIZE_2
* sizeof (struct voptype_d
);
484 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
485 = OP_SIZE_3
* sizeof (struct voptype_d
);
487 /* Go right to the maximum size if the request is too large. */
488 if (size
> gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
489 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
490 = OP_SIZE_3
* sizeof (struct voptype_d
);
492 /* We can reliably trigger the case that we need arbitrary many
493 operands (see PR34093), so allocate a buffer just for this request. */
494 if (size
> gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
495 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= size
;
497 ptr
= (struct ssa_operand_memory_d
*)
498 ggc_alloc (sizeof (struct ssa_operand_memory_d
)
499 + gimple_ssa_operands (cfun
)->ssa_operand_mem_size
- 1);
500 ptr
->next
= gimple_ssa_operands (cfun
)->operand_memory
;
501 gimple_ssa_operands (cfun
)->operand_memory
= ptr
;
502 gimple_ssa_operands (cfun
)->operand_memory_index
= 0;
504 ptr
= &(gimple_ssa_operands (cfun
)->operand_memory
505 ->mem
[gimple_ssa_operands (cfun
)->operand_memory_index
]);
506 gimple_ssa_operands (cfun
)->operand_memory_index
+= size
;
511 /* Allocate a DEF operand. */
513 static inline struct def_optype_d
*
516 struct def_optype_d
*ret
;
517 if (gimple_ssa_operands (cfun
)->free_defs
)
519 ret
= gimple_ssa_operands (cfun
)->free_defs
;
520 gimple_ssa_operands (cfun
)->free_defs
521 = gimple_ssa_operands (cfun
)->free_defs
->next
;
524 ret
= (struct def_optype_d
*)
525 ssa_operand_alloc (sizeof (struct def_optype_d
));
530 /* Allocate a USE operand. */
532 static inline struct use_optype_d
*
535 struct use_optype_d
*ret
;
536 if (gimple_ssa_operands (cfun
)->free_uses
)
538 ret
= gimple_ssa_operands (cfun
)->free_uses
;
539 gimple_ssa_operands (cfun
)->free_uses
540 = gimple_ssa_operands (cfun
)->free_uses
->next
;
543 ret
= (struct use_optype_d
*)
544 ssa_operand_alloc (sizeof (struct use_optype_d
));
549 /* Allocate a vop with NUM elements. */
551 static inline struct voptype_d
*
554 struct voptype_d
*ret
= NULL
;
557 int bucket
= vop_free_bucket_index (num
);
560 /* If there is a free operand, use it. */
561 if (gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] != NULL
)
563 ret
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
564 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] =
565 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
]->next
;
568 alloc_size
= vop_free_bucket_size(bucket
);
574 ret
= (struct voptype_d
*)ssa_operand_alloc (
575 sizeof (struct voptype_d
) + (alloc_size
- 1) * sizeof (vuse_element_t
));
577 VUSE_VECT_NUM_ELEM (ret
->usev
) = num
;
582 /* This routine makes sure that PTR is in an immediate use list, and makes
583 sure the stmt pointer is set to the current stmt. */
586 set_virtual_use_link (use_operand_p ptr
, gimple stmt
)
588 /* fold_stmt may have changed the stmt pointers. */
589 if (ptr
->loc
.stmt
!= stmt
)
590 ptr
->loc
.stmt
= stmt
;
592 /* If this use isn't in a list, add it to the correct list. */
594 link_imm_use (ptr
, *(ptr
->use
));
598 /* Adds OP to the list of defs after LAST. */
600 static inline def_optype_p
601 add_def_op (tree
*op
, def_optype_p last
)
603 def_optype_p new_def
;
605 new_def
= alloc_def ();
606 DEF_OP_PTR (new_def
) = op
;
607 last
->next
= new_def
;
608 new_def
->next
= NULL
;
613 /* Adds OP to the list of uses of statement STMT after LAST. */
615 static inline use_optype_p
616 add_use_op (gimple stmt
, tree
*op
, use_optype_p last
)
618 use_optype_p new_use
;
620 new_use
= alloc_use ();
621 USE_OP_PTR (new_use
)->use
= op
;
622 link_imm_use_stmt (USE_OP_PTR (new_use
), *op
, stmt
);
623 last
->next
= new_use
;
624 new_use
->next
= NULL
;
629 /* Return a virtual op pointer with NUM elements which are all
630 initialized to OP and are linked into the immediate uses for STMT.
631 The new vop is appended after PREV. */
633 static inline voptype_p
634 add_vop (gimple stmt
, tree op
, int num
, voptype_p prev
)
639 new_vop
= alloc_vop (num
);
640 for (x
= 0; x
< num
; x
++)
642 VUSE_OP_PTR (new_vop
, x
)->prev
= NULL
;
643 SET_VUSE_OP (new_vop
, x
, op
);
644 VUSE_OP_PTR (new_vop
, x
)->use
= &new_vop
->usev
.uses
[x
].use_var
;
645 link_imm_use_stmt (VUSE_OP_PTR (new_vop
, x
),
646 new_vop
->usev
.uses
[x
].use_var
, stmt
);
650 prev
->next
= new_vop
;
651 new_vop
->next
= NULL
;
656 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
657 LAST to the new element. */
659 static inline voptype_p
660 add_vuse_op (gimple stmt
, tree op
, int num
, voptype_p last
)
662 voptype_p new_vop
= add_vop (stmt
, op
, num
, last
);
663 VDEF_RESULT (new_vop
) = NULL_TREE
;
668 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
669 LAST to the new element. */
671 static inline voptype_p
672 add_vdef_op (gimple stmt
, tree op
, int num
, voptype_p last
)
674 voptype_p new_vop
= add_vop (stmt
, op
, num
, last
);
675 VDEF_RESULT (new_vop
) = op
;
680 /* Takes elements from build_defs and turns them into def operands of STMT.
681 TODO -- Make build_defs VEC of tree *. */
684 finalize_ssa_defs (gimple stmt
)
687 struct def_optype_d new_list
;
688 def_optype_p old_ops
, last
;
689 unsigned int num
= VEC_length (tree
, build_defs
);
691 /* There should only be a single real definition per assignment. */
692 gcc_assert ((stmt
&& gimple_code (stmt
) != GIMPLE_ASSIGN
) || num
<= 1);
694 new_list
.next
= NULL
;
697 old_ops
= gimple_def_ops (stmt
);
701 /* Check for the common case of 1 def that hasn't changed. */
702 if (old_ops
&& old_ops
->next
== NULL
&& num
== 1
703 && (tree
*) VEC_index (tree
, build_defs
, 0) == DEF_OP_PTR (old_ops
))
706 /* If there is anything in the old list, free it. */
709 old_ops
->next
= gimple_ssa_operands (cfun
)->free_defs
;
710 gimple_ssa_operands (cfun
)->free_defs
= old_ops
;
713 /* If there is anything remaining in the build_defs list, simply emit it. */
714 for ( ; new_i
< num
; new_i
++)
715 last
= add_def_op ((tree
*) VEC_index (tree
, build_defs
, new_i
), last
);
717 /* Now set the stmt's operands. */
718 gimple_set_def_ops (stmt
, new_list
.next
);
720 #ifdef ENABLE_CHECKING
724 for (ptr
= gimple_def_ops (stmt
); ptr
; ptr
= ptr
->next
)
727 gcc_assert (x
== num
);
733 /* Takes elements from build_uses and turns them into use operands of STMT.
734 TODO -- Make build_uses VEC of tree *. */
737 finalize_ssa_uses (gimple stmt
)
740 struct use_optype_d new_list
;
741 use_optype_p old_ops
, ptr
, last
;
743 new_list
.next
= NULL
;
746 old_ops
= gimple_use_ops (stmt
);
748 /* If there is anything in the old list, free it. */
751 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
752 delink_imm_use (USE_OP_PTR (ptr
));
753 old_ops
->next
= gimple_ssa_operands (cfun
)->free_uses
;
754 gimple_ssa_operands (cfun
)->free_uses
= old_ops
;
757 /* Now create nodes for all the new nodes. */
758 for (new_i
= 0; new_i
< VEC_length (tree
, build_uses
); new_i
++)
759 last
= add_use_op (stmt
,
760 (tree
*) VEC_index (tree
, build_uses
, new_i
),
763 /* Now set the stmt's operands. */
764 gimple_set_use_ops (stmt
, new_list
.next
);
766 #ifdef ENABLE_CHECKING
769 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
772 gcc_assert (x
== VEC_length (tree
, build_uses
));
778 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
782 finalize_ssa_vdefs (gimple stmt
)
785 struct voptype_d new_list
;
786 voptype_p old_ops
, ptr
, last
;
788 /* Set the symbols referenced by STMT. */
789 gimple_set_stored_syms (stmt
, build_stores
, &operands_bitmap_obstack
);
791 /* If aliases have not been computed, do not instantiate a virtual
792 operator on STMT. Initially, we only compute the SSA form on
793 GIMPLE registers. The virtual SSA form is only computed after
794 alias analysis, so virtual operators will remain unrenamed and
795 the verifier will complain. However, alias analysis needs to
796 access symbol load/store information, so we need to compute
798 if (!gimple_aliases_computed_p (cfun
))
801 new_list
.next
= NULL
;
804 old_ops
= gimple_vdef_ops (stmt
);
806 while (old_ops
&& new_i
< VEC_length (tree
, build_vdefs
))
808 tree op
= VEC_index (tree
, build_vdefs
, new_i
);
809 unsigned new_uid
= get_name_decl (op
);
810 unsigned old_uid
= get_name_decl (VDEF_RESULT (old_ops
));
812 /* FIXME, for now each VDEF operator should have at most one
813 operand in their RHS. */
814 gcc_assert (VDEF_NUM (old_ops
) == 1);
816 if (old_uid
== new_uid
)
818 /* If the symbols are the same, reuse the existing operand. */
819 last
->next
= old_ops
;
821 old_ops
= old_ops
->next
;
823 set_virtual_use_link (VDEF_OP_PTR (last
, 0), stmt
);
826 else if (old_uid
< new_uid
)
828 /* If old is less than new, old goes to the free list. */
830 delink_imm_use (VDEF_OP_PTR (old_ops
, 0));
831 next
= old_ops
->next
;
832 add_vop_to_freelist (old_ops
);
837 /* This is a new operand. */
838 last
= add_vdef_op (stmt
, op
, 1, last
);
843 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
844 for ( ; new_i
< VEC_length (tree
, build_vdefs
); new_i
++)
845 last
= add_vdef_op (stmt
, VEC_index (tree
, build_vdefs
, new_i
), 1, last
);
847 /* If there is anything in the old list, free it. */
850 for (ptr
= old_ops
; ptr
; ptr
= last
)
853 delink_imm_use (VDEF_OP_PTR (ptr
, 0));
854 add_vop_to_freelist (ptr
);
858 /* Now set STMT's operands. */
859 gimple_set_vdef_ops (stmt
, new_list
.next
);
861 #ifdef ENABLE_CHECKING
864 for (ptr
= gimple_vdef_ops (stmt
); ptr
; ptr
= ptr
->next
)
867 gcc_assert (x
== VEC_length (tree
, build_vdefs
));
873 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
877 finalize_ssa_vuse_ops (gimple stmt
)
879 unsigned new_i
, old_i
;
880 voptype_p old_ops
, last
;
881 VEC(tree
,heap
) *new_ops
;
883 /* Set the symbols referenced by STMT. */
884 gimple_set_loaded_syms (stmt
, build_loads
, &operands_bitmap_obstack
);
886 /* If aliases have not been computed, do not instantiate a virtual
887 operator on STMT. Initially, we only compute the SSA form on
888 GIMPLE registers. The virtual SSA form is only computed after
889 alias analysis, so virtual operators will remain unrenamed and
890 the verifier will complain. However, alias analysis needs to
891 access symbol load/store information, so we need to compute
893 if (!gimple_aliases_computed_p (cfun
))
896 /* STMT should have at most one VUSE operator. */
897 old_ops
= gimple_vuse_ops (stmt
);
898 gcc_assert (old_ops
== NULL
|| old_ops
->next
== NULL
);
903 && old_i
< VUSE_NUM (old_ops
)
904 && new_i
< VEC_length (tree
, build_vuses
))
906 tree new_op
= VEC_index (tree
, build_vuses
, new_i
);
907 tree old_op
= VUSE_OP (old_ops
, old_i
);
908 unsigned new_uid
= get_name_decl (new_op
);
909 unsigned old_uid
= get_name_decl (old_op
);
911 if (old_uid
== new_uid
)
913 /* If the symbols are the same, reuse the existing operand. */
914 VEC_safe_push (tree
, heap
, new_ops
, old_op
);
918 else if (old_uid
< new_uid
)
920 /* If OLD_UID is less than NEW_UID, the old operand has
921 disappeared, skip to the next old operand. */
926 /* This is a new operand. */
927 VEC_safe_push (tree
, heap
, new_ops
, new_op
);
932 /* If there is anything remaining in the build_vuses list, simply emit it. */
933 for ( ; new_i
< VEC_length (tree
, build_vuses
); new_i
++)
934 VEC_safe_push (tree
, heap
, new_ops
, VEC_index (tree
, build_vuses
, new_i
));
936 /* If there is anything in the old list, free it. */
939 for (old_i
= 0; old_i
< VUSE_NUM (old_ops
); old_i
++)
940 delink_imm_use (VUSE_OP_PTR (old_ops
, old_i
));
941 add_vop_to_freelist (old_ops
);
942 gimple_set_vuse_ops (stmt
, NULL
);
945 /* If there are any operands, instantiate a VUSE operator for STMT. */
951 last
= add_vuse_op (stmt
, NULL
, VEC_length (tree
, new_ops
), NULL
);
953 for (i
= 0; VEC_iterate (tree
, new_ops
, i
, op
); i
++)
954 SET_USE (VUSE_OP_PTR (last
, (int) i
), op
);
956 gimple_set_vuse_ops (stmt
, last
);
957 VEC_free (tree
, heap
, new_ops
);
960 #ifdef ENABLE_CHECKING
964 if (gimple_vuse_ops (stmt
))
966 gcc_assert (gimple_vuse_ops (stmt
)->next
== NULL
);
967 x
= VUSE_NUM (gimple_vuse_ops (stmt
));
972 gcc_assert (x
== VEC_length (tree
, build_vuses
));
977 /* Return a new VUSE operand vector for STMT. */
980 finalize_ssa_vuses (gimple stmt
)
982 unsigned num
, num_vdefs
;
985 /* Remove superfluous VUSE operands. If the statement already has a
986 VDEF operator for a variable 'a', then a VUSE for 'a' is not
987 needed because VDEFs imply a VUSE of the variable. For instance,
988 suppose that variable 'a' is pointed-to by p and q:
994 The VUSE <a_2> is superfluous because it is implied by the
996 num
= VEC_length (tree
, build_vuses
);
997 num_vdefs
= VEC_length (tree
, build_vdefs
);
999 if (num
> 0 && num_vdefs
> 0)
1000 for (vuse_index
= 0; vuse_index
< VEC_length (tree
, build_vuses
); )
1003 vuse
= VEC_index (tree
, build_vuses
, vuse_index
);
1004 if (TREE_CODE (vuse
) != SSA_NAME
)
1006 var_ann_t ann
= var_ann (vuse
);
1007 ann
->in_vuse_list
= 0;
1008 if (ann
->in_vdef_list
)
1010 VEC_ordered_remove (tree
, build_vuses
, vuse_index
);
1017 finalize_ssa_vuse_ops (stmt
);
1021 /* Clear the in_list bits and empty the build array for VDEFs and
1025 cleanup_build_arrays (void)
1030 for (i
= 0; VEC_iterate (tree
, build_vdefs
, i
, t
); i
++)
1031 if (TREE_CODE (t
) != SSA_NAME
)
1032 var_ann (t
)->in_vdef_list
= false;
1034 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, t
); i
++)
1035 if (TREE_CODE (t
) != SSA_NAME
)
1036 var_ann (t
)->in_vuse_list
= false;
1038 VEC_truncate (tree
, build_vdefs
, 0);
1039 VEC_truncate (tree
, build_vuses
, 0);
1040 VEC_truncate (tree
, build_defs
, 0);
1041 VEC_truncate (tree
, build_uses
, 0);
1042 bitmap_clear (build_loads
);
1043 bitmap_clear (build_stores
);
1047 /* Finalize all the build vectors, fill the new ones into INFO. */
1050 finalize_ssa_stmt_operands (gimple stmt
)
1052 finalize_ssa_defs (stmt
);
1053 finalize_ssa_uses (stmt
);
1054 if (gimple_has_mem_ops (stmt
))
1056 finalize_ssa_vdefs (stmt
);
1057 finalize_ssa_vuses (stmt
);
1059 cleanup_build_arrays ();
1063 /* Start the process of building up operands vectors in INFO. */
1066 start_ssa_stmt_operands (void)
1068 gcc_assert (VEC_length (tree
, build_defs
) == 0);
1069 gcc_assert (VEC_length (tree
, build_uses
) == 0);
1070 gcc_assert (VEC_length (tree
, build_vuses
) == 0);
1071 gcc_assert (VEC_length (tree
, build_vdefs
) == 0);
1072 gcc_assert (bitmap_empty_p (build_loads
));
1073 gcc_assert (bitmap_empty_p (build_stores
));
1077 /* Add DEF_P to the list of pointers to operands. */
1080 append_def (tree
*def_p
)
1082 VEC_safe_push (tree
, heap
, build_defs
, (tree
) def_p
);
1086 /* Add USE_P to the list of pointers to operands. */
1089 append_use (tree
*use_p
)
1091 VEC_safe_push (tree
, heap
, build_uses
, (tree
) use_p
);
1095 /* Add VAR to the set of variables that require a VDEF operator. */
1098 append_vdef (tree var
)
1102 if (TREE_CODE (var
) != SSA_NAME
)
1107 /* If VAR belongs to a memory partition, use it instead of VAR. */
1108 mpt
= memory_partition (var
);
1112 /* Don't allow duplicate entries. */
1113 ann
= get_var_ann (var
);
1114 if (ann
->in_vdef_list
)
1117 ann
->in_vdef_list
= true;
1121 sym
= SSA_NAME_VAR (var
);
1123 VEC_safe_push (tree
, heap
, build_vdefs
, var
);
1124 bitmap_set_bit (build_stores
, DECL_UID (sym
));
1128 /* Add VAR to the set of variables that require a VUSE operator. */
1131 append_vuse (tree var
)
1135 if (TREE_CODE (var
) != SSA_NAME
)
1140 /* If VAR belongs to a memory partition, use it instead of VAR. */
1141 mpt
= memory_partition (var
);
1145 /* Don't allow duplicate entries. */
1146 ann
= get_var_ann (var
);
1147 if (ann
->in_vuse_list
)
1149 else if (ann
->in_vdef_list
)
1151 /* We don't want a vuse if we already have a vdef, but we must
1152 still put this in build_loads. */
1153 bitmap_set_bit (build_loads
, DECL_UID (var
));
1157 ann
->in_vuse_list
= true;
1161 sym
= SSA_NAME_VAR (var
);
1163 VEC_safe_push (tree
, heap
, build_vuses
, var
);
1164 bitmap_set_bit (build_loads
, DECL_UID (sym
));
1168 /* REF is a tree that contains the entire pointer dereference
1169 expression, if available, or NULL otherwise. ALIAS is the variable
1170 we are asking if REF can access. OFFSET and SIZE come from the
1171 memory access expression that generated this virtual operand.
1173 XXX: We should handle the NO_ALIAS attributes here. */
1176 access_can_touch_variable (tree ref
, tree alias
, HOST_WIDE_INT offset
,
1179 bool offsetgtz
= offset
> 0;
1180 unsigned HOST_WIDE_INT uoffset
= (unsigned HOST_WIDE_INT
) offset
;
1181 tree base
= ref
? get_base_address (ref
) : NULL
;
1183 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1184 using a call-clobbered memory tag. By definition, call-clobbered
1185 memory tags can always touch .GLOBAL_VAR. */
1186 if (alias
== gimple_global_var (cfun
))
1189 /* If ref is a TARGET_MEM_REF, just return true, as we can't really
1190 disambiguate them right now. */
1191 if (ref
&& TREE_CODE (ref
) == TARGET_MEM_REF
)
1194 /* Without strict aliasing, it is impossible for a component access
1195 through a pointer to touch a random variable, unless that
1196 variable *is* a structure or a pointer.
1198 That is, given p->c, and some random global variable b,
1199 there is no legal way that p->c could be an access to b.
1201 Without strict aliasing on, we consider it legal to do something
1204 struct foos { int l; };
1206 static struct foos *getfoo(void);
1209 struct foos *f = getfoo();
1216 static struct foos *getfoo(void)
1217 { return (struct foos *)&foo; }
1219 (taken from 20000623-1.c)
1221 The docs also say/imply that access through union pointers
1222 is legal (but *not* if you take the address of the union member,
1223 i.e. the inverse), such that you can do
1233 U *pretmp = (U*)&rv;
1237 To implement this, we just punt on accesses through union
1240 Another case we have to allow is accessing a variable
1241 through an array access at offset zero. This happens from
1242 code generated by the fortran frontend like
1244 char[1:1] & my_char_ref;
1246 my_char_ref_1 = (char[1:1] &) &my_char;
1247 D.874_2 = (*my_char_ref_1)[1]{lb: 1 sz: 1};
1250 && flag_strict_aliasing
1251 && TREE_CODE (ref
) != INDIRECT_REF
1254 && (TREE_CODE (base
) != INDIRECT_REF
1255 || TREE_CODE (TREE_TYPE (base
)) != UNION_TYPE
)
1256 && (TREE_CODE (base
) != INDIRECT_REF
1257 || TREE_CODE (ref
) != ARRAY_REF
1259 || (DECL_SIZE (alias
)
1260 && TREE_CODE (DECL_SIZE (alias
)) == INTEGER_CST
1262 && (unsigned HOST_WIDE_INT
)size
1263 != TREE_INT_CST_LOW (DECL_SIZE (alias
))))
1264 && !AGGREGATE_TYPE_P (TREE_TYPE (alias
))
1265 && TREE_CODE (TREE_TYPE (alias
)) != COMPLEX_TYPE
1266 && !var_ann (alias
)->is_heapvar
1267 /* When the struct has may_alias attached to it, we need not to
1269 && get_alias_set (base
))
1271 #ifdef ACCESS_DEBUGGING
1272 fprintf (stderr
, "Access to ");
1273 print_generic_expr (stderr
, ref
, 0);
1274 fprintf (stderr
, " may not touch ");
1275 print_generic_expr (stderr
, alias
, 0);
1276 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1281 /* If the offset of the access is greater than the size of one of
1282 the possible aliases, it can't be touching that alias, because it
1283 would be past the end of the structure. */
1285 && flag_strict_aliasing
1286 && TREE_CODE (ref
) != INDIRECT_REF
1288 && !POINTER_TYPE_P (TREE_TYPE (alias
))
1290 && DECL_SIZE (alias
)
1291 && TREE_CODE (DECL_SIZE (alias
)) == INTEGER_CST
1292 && uoffset
> TREE_INT_CST_LOW (DECL_SIZE (alias
)))
1294 #ifdef ACCESS_DEBUGGING
1295 fprintf (stderr
, "Access to ");
1296 print_generic_expr (stderr
, ref
, 0);
1297 fprintf (stderr
, " may not touch ");
1298 print_generic_expr (stderr
, alias
, 0);
1299 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1307 /* Add VAR to the virtual operands for STMT. FLAGS is as in
1308 get_expr_operands. FULL_REF is a tree that contains the entire
1309 pointer dereference expression, if available, or NULL otherwise.
1310 OFFSET and SIZE come from the memory access expression that
1311 generated this virtual operand. IS_CALL_SITE is true if the
1312 affected statement is a call site. */
1315 add_virtual_operand (tree var
, gimple stmt
, int flags
,
1316 tree full_ref
, HOST_WIDE_INT offset
,
1317 HOST_WIDE_INT size
, bool is_call_site
)
1319 bitmap aliases
= NULL
;
1323 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1324 v_ann
= var_ann (sym
);
1326 /* Mark the statement as having memory operands. */
1327 gimple_set_references_memory (stmt
, true);
1329 /* If the variable cannot be modified and this is a VDEF change
1330 it into a VUSE. This happens when read-only variables are marked
1331 call-clobbered and/or aliased to writable variables. So we only
1332 check that this only happens on non-specific stores.
1334 Note that if this is a specific store, i.e. associated with a
1335 MODIFY_EXPR, then we can't suppress the VDEF, lest we run
1336 into validation problems.
1338 This can happen when programs cast away const, leaving us with a
1339 store to read-only memory. If the statement is actually executed
1340 at runtime, then the program is ill formed. If the statement is
1341 not executed then all is well. At the very least, we cannot ICE. */
1342 if ((flags
& opf_implicit
) && unmodifiable_var_p (var
))
1345 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1346 virtual operands, unless the caller has specifically requested
1347 not to add virtual operands (used when adding operands inside an
1348 ADDR_EXPR expression). */
1349 if (flags
& opf_no_vops
)
1353 aliases
= MTAG_ALIASES (var
);
1355 if (aliases
== NULL
)
1357 if (!gimple_aliases_computed_p (cfun
) && (flags
& opf_def
))
1358 gimple_set_has_volatile_ops (stmt
, true);
1360 /* The variable is not aliased or it is an alias tag. */
1361 if (flags
& opf_def
)
1370 bool none_added
= true;
1372 /* The variable is aliased. Add its aliases to the virtual
1374 gcc_assert (!bitmap_empty_p (aliases
));
1376 EXECUTE_IF_SET_IN_BITMAP (aliases
, 0, i
, bi
)
1378 tree al
= referenced_var (i
);
1380 /* Call-clobbered tags may have non-call-clobbered
1381 symbols in their alias sets. Ignore them if we are
1382 adding VOPs for a call site. */
1383 if (is_call_site
&& !is_call_clobbered (al
))
1386 /* If we do not know the full reference tree or if the access is
1387 unspecified [0, -1], we cannot prune it. Otherwise try doing
1388 so using access_can_touch_variable. */
1390 && !access_can_touch_variable (full_ref
, al
, offset
, size
))
1393 if (flags
& opf_def
)
1400 if (flags
& opf_def
)
1402 /* If the variable is also an alias tag, add a virtual
1403 operand for it, otherwise we will miss representing
1404 references to the members of the variable's alias set.
1405 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1407 It is also necessary to add bare defs on clobbers for
1408 SMT's, so that bare SMT uses caused by pruning all the
1409 aliases will link up properly with calls. In order to
1410 keep the number of these bare defs we add down to the
1411 minimum necessary, we keep track of which SMT's were used
1412 alone in statement vdefs or VUSEs. */
1414 || (TREE_CODE (var
) == SYMBOL_MEMORY_TAG
1420 /* Even if no aliases have been added, we still need to
1421 establish def-use and use-def chains, lest
1422 transformations think that this is not a memory
1423 reference. For an example of this scenario, see
1424 testsuite/g++.dg/opt/cleanup1.C. */
1432 /* Add *VAR_P to the appropriate operand array for statement STMT.
1433 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
1434 it will be added to the statement's real operands, otherwise it is
1435 added to virtual operands. */
1438 add_stmt_operand (tree
*var_p
, gimple stmt
, int flags
)
1443 gcc_assert (SSA_VAR_P (*var_p
));
1446 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1447 v_ann
= var_ann (sym
);
1449 /* Mark statements with volatile operands. */
1450 if (TREE_THIS_VOLATILE (sym
))
1451 gimple_set_has_volatile_ops (stmt
, true);
1453 if (is_gimple_reg (sym
))
1455 /* The variable is a GIMPLE register. Add it to real operands. */
1456 if (flags
& opf_def
)
1462 add_virtual_operand (var
, stmt
, flags
, NULL_TREE
, 0, -1, false);
1465 /* Subroutine of get_indirect_ref_operands. ADDR is the address
1466 that is dereferenced, the meaning of the rest of the arguments
1467 is the same as in get_indirect_ref_operands. */
1470 get_addr_dereference_operands (gimple stmt
, tree
*addr
, int flags
,
1471 tree full_ref
, HOST_WIDE_INT offset
,
1472 HOST_WIDE_INT size
, bool recurse_on_base
)
1476 /* Mark the statement as having memory operands. */
1477 gimple_set_references_memory (stmt
, true);
1479 if (SSA_VAR_P (ptr
))
1481 struct ptr_info_def
*pi
= NULL
;
1483 /* If PTR has flow-sensitive points-to information, use it. */
1484 if (TREE_CODE (ptr
) == SSA_NAME
1485 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1486 && pi
->name_mem_tag
)
1488 /* PTR has its own memory tag. Use it. */
1489 add_virtual_operand (pi
->name_mem_tag
, stmt
, flags
,
1490 full_ref
, offset
, size
, false);
1494 /* If PTR is not an SSA_NAME or it doesn't have a name
1495 tag, use its symbol memory tag. */
1498 /* If we are emitting debugging dumps, display a warning if
1499 PTR is an SSA_NAME with no flow-sensitive alias
1500 information. That means that we may need to compute
1501 aliasing again or that a propagation pass forgot to
1502 update the alias information on the pointers. */
1504 && TREE_CODE (ptr
) == SSA_NAME
1506 || (pi
->name_mem_tag
== NULL_TREE
1507 && !pi
->pt_anything
))
1508 && gimple_aliases_computed_p (cfun
))
1511 "NOTE: no flow-sensitive alias info for ");
1512 print_generic_expr (dump_file
, ptr
, dump_flags
);
1513 fprintf (dump_file
, " in ");
1514 print_gimple_stmt (dump_file
, stmt
, 0, 0);
1517 if (TREE_CODE (ptr
) == SSA_NAME
)
1518 ptr
= SSA_NAME_VAR (ptr
);
1519 v_ann
= var_ann (ptr
);
1521 /* If we don't know what this pointer points to then we have
1522 to make sure to not prune virtual operands based on offset
1524 if (v_ann
->symbol_mem_tag
)
1526 add_virtual_operand (v_ann
->symbol_mem_tag
, stmt
, flags
,
1527 full_ref
, 0, -1, false);
1528 /* Make sure we add the SMT itself. */
1529 if (!(flags
& opf_no_vops
))
1531 if (flags
& opf_def
)
1532 append_vdef (v_ann
->symbol_mem_tag
);
1534 append_vuse (v_ann
->symbol_mem_tag
);
1538 /* Aliasing information is missing; mark statement as
1539 volatile so we won't optimize it out too actively. */
1540 else if (!gimple_aliases_computed_p (cfun
)
1541 && (flags
& opf_def
))
1542 gimple_set_has_volatile_ops (stmt
, true);
1545 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1547 /* If a constant is used as a pointer, we can't generate a real
1548 operand for it but we mark the statement volatile to prevent
1549 optimizations from messing things up. */
1550 gimple_set_has_volatile_ops (stmt
, true);
1555 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1559 /* If requested, add a USE operand for the base pointer. */
1560 if (recurse_on_base
)
1561 get_expr_operands (stmt
, addr
, opf_use
);
1565 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1566 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1568 STMT is the statement being processed, EXPR is the INDIRECT_REF
1571 FLAGS is as in get_expr_operands.
1573 FULL_REF contains the full pointer dereference expression, if we
1574 have it, or NULL otherwise.
1576 OFFSET and SIZE are the location of the access inside the
1577 dereferenced pointer, if known.
1579 RECURSE_ON_BASE should be set to true if we want to continue
1580 calling get_expr_operands on the base pointer, and false if
1581 something else will do it for us. */
1584 get_indirect_ref_operands (gimple stmt
, tree expr
, int flags
, tree full_ref
,
1585 HOST_WIDE_INT offset
, HOST_WIDE_INT size
,
1586 bool recurse_on_base
)
1588 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1590 if (TREE_THIS_VOLATILE (expr
))
1591 gimple_set_has_volatile_ops (stmt
, true);
1593 get_addr_dereference_operands (stmt
, pptr
, flags
, full_ref
, offset
, size
,
1598 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1601 get_tmr_operands (gimple stmt
, tree expr
, int flags
)
1605 /* Mark the statement as having memory operands. */
1606 gimple_set_references_memory (stmt
, true);
1608 /* First record the real operands. */
1609 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_use
);
1610 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_use
);
1612 if (TMR_SYMBOL (expr
))
1613 gimple_add_to_addresses_taken (stmt
, TMR_SYMBOL (expr
));
1615 tag
= TMR_TAG (expr
);
1618 /* Something weird, so ensure that we will be careful. */
1619 gimple_set_has_volatile_ops (stmt
, true);
1624 get_expr_operands (stmt
, &tag
, flags
);
1628 add_virtual_operand (tag
, stmt
, flags
, expr
, 0, -1, false);
1632 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1633 clobbered variables in the function. */
1636 add_call_clobber_ops (gimple stmt
, tree callee ATTRIBUTE_UNUSED
)
1640 bitmap not_read_b
, not_written_b
;
1642 gcc_assert (!(gimple_call_flags (stmt
) & (ECF_PURE
| ECF_CONST
)));
1644 /* If we created .GLOBAL_VAR earlier, just use it. */
1645 if (gimple_global_var (cfun
))
1647 tree var
= gimple_global_var (cfun
);
1648 add_virtual_operand (var
, stmt
, opf_def
, NULL
, 0, -1, true);
1652 /* Get info for local and module level statics. There is a bit
1653 set for each static if the call being processed does not read
1654 or write that variable. */
1655 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1656 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1658 /* Add a VDEF operand for every call clobbered variable. */
1659 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1661 tree var
= referenced_var_lookup (u
);
1662 tree real_var
= var
;
1666 not_read
= not_read_b
1667 ? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1670 not_written
= not_written_b
1671 ? bitmap_bit_p (not_written_b
, DECL_UID (real_var
))
1673 gcc_assert (!unmodifiable_var_p (var
));
1675 clobber_stats
.clobbered_vars
++;
1677 /* See if this variable is really clobbered by this function. */
1681 clobber_stats
.static_write_clobbers_avoided
++;
1683 add_virtual_operand (var
, stmt
, opf_use
, NULL
, 0, -1, true);
1685 clobber_stats
.static_read_clobbers_avoided
++;
1688 add_virtual_operand (var
, stmt
, opf_def
, NULL
, 0, -1, true);
1693 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1697 add_call_read_ops (gimple stmt
, tree callee ATTRIBUTE_UNUSED
)
1703 /* Const functions do not reference memory. */
1704 if (gimple_call_flags (stmt
) & ECF_CONST
)
1707 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1709 /* For pure functions we compute non-escaped uses separately. */
1710 if (gimple_call_flags (stmt
) & ECF_PURE
)
1711 EXECUTE_IF_SET_IN_BITMAP (gimple_call_used_vars (cfun
), 0, u
, bi
)
1713 tree var
= referenced_var_lookup (u
);
1714 tree real_var
= var
;
1717 if (unmodifiable_var_p (var
))
1720 not_read
= not_read_b
1721 ? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1724 clobber_stats
.readonly_clobbers
++;
1726 /* See if this variable is really used by this function. */
1728 add_virtual_operand (var
, stmt
, opf_use
, NULL
, 0, -1, true);
1730 clobber_stats
.static_readonly_clobbers_avoided
++;
1733 /* Add a VUSE for .GLOBAL_VAR if it has been created. See
1734 add_referenced_var for the heuristic used to decide whether to
1735 create .GLOBAL_VAR. */
1736 if (gimple_global_var (cfun
))
1738 tree var
= gimple_global_var (cfun
);
1739 add_virtual_operand (var
, stmt
, opf_use
, NULL
, 0, -1, true);
1743 /* Add a VUSE for each call-clobbered variable. */
1744 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1746 tree var
= referenced_var (u
);
1747 tree real_var
= var
;
1750 clobber_stats
.readonly_clobbers
++;
1752 not_read
= not_read_b
? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1757 clobber_stats
.static_readonly_clobbers_avoided
++;
1761 add_virtual_operand (var
, stmt
, opf_use
, NULL
, 0, -1, true);
1766 /* If STMT is a call that may clobber globals and other symbols that
1767 escape, add them to the VDEF/VUSE lists for it. */
1770 maybe_add_call_clobbered_vops (gimple stmt
)
1772 int call_flags
= gimple_call_flags (stmt
);
1774 /* Mark the statement as having memory operands. */
1775 gimple_set_references_memory (stmt
, true);
1777 /* If aliases have been computed already, add VDEF or VUSE
1778 operands for all the symbols that have been found to be
1780 if (gimple_aliases_computed_p (cfun
) && !(call_flags
& ECF_NOVOPS
))
1782 /* A 'pure' or a 'const' function never call-clobbers anything.
1783 A 'noreturn' function might, but since we don't return anyway
1784 there is no point in recording that. */
1785 if (!(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1786 add_call_clobber_ops (stmt
, gimple_call_fndecl (stmt
));
1787 else if (!(call_flags
& ECF_CONST
))
1788 add_call_read_ops (stmt
, gimple_call_fndecl (stmt
));
1793 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1796 get_asm_expr_operands (gimple stmt
)
1799 const char **oconstraints
;
1800 const char *constraint
;
1801 bool allows_mem
, allows_reg
, is_inout
;
1803 noutputs
= gimple_asm_noutputs (stmt
);
1804 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
1806 /* Gather all output operands. */
1807 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
1809 tree link
= gimple_asm_output_op (stmt
, i
);
1810 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1811 oconstraints
[i
] = constraint
;
1812 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
1813 &allows_reg
, &is_inout
);
1815 /* This should have been split in gimplify_asm_expr. */
1816 gcc_assert (!allows_reg
|| !is_inout
);
1818 /* Memory operands are addressable. Note that STMT needs the
1819 address of this operand. */
1820 if (!allows_reg
&& allows_mem
)
1822 tree t
= get_base_address (TREE_VALUE (link
));
1823 if (t
&& DECL_P (t
))
1824 gimple_add_to_addresses_taken (stmt
, t
);
1827 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_def
);
1830 /* Gather all input operands. */
1831 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
1833 tree link
= gimple_asm_input_op (stmt
, i
);
1834 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1835 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
1836 &allows_mem
, &allows_reg
);
1838 /* Memory operands are addressable. Note that STMT needs the
1839 address of this operand. */
1840 if (!allows_reg
&& allows_mem
)
1842 tree t
= get_base_address (TREE_VALUE (link
));
1843 if (t
&& DECL_P (t
))
1844 gimple_add_to_addresses_taken (stmt
, t
);
1847 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1850 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1851 for (i
= 0; i
< gimple_asm_nclobbers (stmt
); i
++)
1853 tree link
= gimple_asm_clobber_op (stmt
, i
);
1854 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1859 /* Mark the statement as having memory operands. */
1860 gimple_set_references_memory (stmt
, true);
1862 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, i
, bi
)
1864 tree var
= referenced_var (i
);
1865 add_stmt_operand (&var
, stmt
, opf_def
| opf_implicit
);
1868 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun
), 0, i
, bi
)
1870 tree var
= referenced_var (i
);
1871 add_stmt_operand (&var
, stmt
, opf_def
| opf_implicit
);
1879 /* Recursively scan the expression pointed to by EXPR_P in statement
1880 STMT. FLAGS is one of the OPF_* constants modifying how to
1881 interpret the operands found. */
1884 get_expr_operands (gimple stmt
, tree
*expr_p
, int flags
)
1886 enum tree_code code
;
1887 enum tree_code_class codeclass
;
1888 tree expr
= *expr_p
;
1893 code
= TREE_CODE (expr
);
1894 codeclass
= TREE_CODE_CLASS (code
);
1899 /* Taking the address of a variable does not represent a
1900 reference to it, but the fact that the statement takes its
1901 address will be of interest to some passes (e.g. alias
1903 gimple_add_to_addresses_taken (stmt
, TREE_OPERAND (expr
, 0));
1905 /* If the address is invariant, there may be no interesting
1906 variable references inside. */
1907 if (is_gimple_min_invariant (expr
))
1910 /* Otherwise, there may be variables referenced inside but there
1911 should be no VUSEs created, since the referenced objects are
1912 not really accessed. The only operands that we should find
1913 here are ARRAY_REF indices which will always be real operands
1914 (GIMPLE does not allow non-registers as array indices). */
1915 flags
|= opf_no_vops
;
1916 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1920 case SYMBOL_MEMORY_TAG
:
1921 case NAME_MEMORY_TAG
:
1922 add_stmt_operand (expr_p
, stmt
, flags
);
1928 add_stmt_operand (expr_p
, stmt
, flags
);
1931 case MISALIGNED_INDIRECT_REF
:
1932 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1935 case ALIGN_INDIRECT_REF
:
1937 get_indirect_ref_operands (stmt
, expr
, flags
, expr
, 0, -1, true);
1940 case TARGET_MEM_REF
:
1941 get_tmr_operands (stmt
, expr
, flags
);
1945 case ARRAY_RANGE_REF
:
1951 HOST_WIDE_INT offset
, size
, maxsize
;
1953 if (TREE_THIS_VOLATILE (expr
))
1954 gimple_set_has_volatile_ops (stmt
, true);
1956 ref
= get_ref_base_and_extent (expr
, &offset
, &size
, &maxsize
);
1957 if (TREE_CODE (ref
) == INDIRECT_REF
)
1959 get_indirect_ref_operands (stmt
, ref
, flags
, expr
, offset
,
1961 flags
|= opf_no_vops
;
1964 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1966 if (code
== COMPONENT_REF
)
1968 if (TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
1969 gimple_set_has_volatile_ops (stmt
, true);
1970 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
1972 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
1974 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
1975 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
1976 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_use
);
1982 case WITH_SIZE_EXPR
:
1983 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1984 and an rvalue reference to its second argument. */
1985 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
1986 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1991 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_use
);
1992 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
1993 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
1998 /* General aggregate CONSTRUCTORs have been decomposed, but they
1999 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2000 constructor_elt
*ce
;
2001 unsigned HOST_WIDE_INT idx
;
2004 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
2006 get_expr_operands (stmt
, &ce
->value
, opf_use
);
2012 case TRUTH_NOT_EXPR
:
2013 case VIEW_CONVERT_EXPR
:
2015 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2018 case TRUTH_AND_EXPR
:
2020 case TRUTH_XOR_EXPR
:
2026 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2027 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2032 case REALIGN_LOAD_EXPR
:
2034 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2035 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2036 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
2040 case CHANGE_DYNAMIC_TYPE_EXPR
:
2046 case CASE_LABEL_EXPR
:
2049 /* Expressions that make no memory references. */
2053 if (codeclass
== tcc_unary
)
2055 if (codeclass
== tcc_binary
|| codeclass
== tcc_comparison
)
2057 if (codeclass
== tcc_constant
|| codeclass
== tcc_type
)
2061 /* If we get here, something has gone wrong. */
2062 #ifdef ENABLE_CHECKING
2063 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
2065 fputs ("\n", stderr
);
2071 /* Parse STMT looking for operands. When finished, the various
2072 build_* operand vectors will have potential operands in them. */
2075 parse_ssa_operands (gimple stmt
)
2077 enum gimple_code code
= gimple_code (stmt
);
2079 if (code
== GIMPLE_ASM
)
2080 get_asm_expr_operands (stmt
);
2083 size_t i
, start
= 0;
2085 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
2087 get_expr_operands (stmt
, gimple_op_ptr (stmt
, 0), opf_def
);
2091 for (i
= start
; i
< gimple_num_ops (stmt
); i
++)
2092 get_expr_operands (stmt
, gimple_op_ptr (stmt
, i
), opf_use
);
2094 /* Add call-clobbered operands, if needed. */
2095 if (code
== GIMPLE_CALL
)
2096 maybe_add_call_clobbered_vops (stmt
);
2101 /* Create an operands cache for STMT. */
2104 build_ssa_operands (gimple stmt
)
2106 /* Initially assume that the statement has no volatile operands and
2107 makes no memory references. */
2108 gimple_set_has_volatile_ops (stmt
, false);
2109 gimple_set_references_memory (stmt
, false);
2111 /* Just clear the bitmap so we don't end up reallocating it over and over. */
2112 if (gimple_addresses_taken (stmt
))
2113 bitmap_clear (gimple_addresses_taken (stmt
));
2115 start_ssa_stmt_operands ();
2116 parse_ssa_operands (stmt
);
2117 operand_build_sort_virtual (build_vuses
);
2118 operand_build_sort_virtual (build_vdefs
);
2119 finalize_ssa_stmt_operands (stmt
);
2121 /* For added safety, assume that statements with volatile operands
2122 also reference memory. */
2123 if (gimple_has_volatile_ops (stmt
))
2124 gimple_set_references_memory (stmt
, true);
2128 /* Releases the operands of STMT back to their freelists, and clears
2129 the stmt operand lists. */
2132 free_stmt_operands (gimple stmt
)
2134 def_optype_p defs
= gimple_def_ops (stmt
), last_def
;
2135 use_optype_p uses
= gimple_use_ops (stmt
), last_use
;
2136 voptype_p vuses
= gimple_vuse_ops (stmt
);
2137 voptype_p vdefs
= gimple_vdef_ops (stmt
), vdef
, next_vdef
;
2142 for (last_def
= defs
; last_def
->next
; last_def
= last_def
->next
)
2144 last_def
->next
= gimple_ssa_operands (cfun
)->free_defs
;
2145 gimple_ssa_operands (cfun
)->free_defs
= defs
;
2146 gimple_set_def_ops (stmt
, NULL
);
2151 for (last_use
= uses
; last_use
->next
; last_use
= last_use
->next
)
2152 delink_imm_use (USE_OP_PTR (last_use
));
2153 delink_imm_use (USE_OP_PTR (last_use
));
2154 last_use
->next
= gimple_ssa_operands (cfun
)->free_uses
;
2155 gimple_ssa_operands (cfun
)->free_uses
= uses
;
2156 gimple_set_use_ops (stmt
, NULL
);
2161 for (i
= 0; i
< VUSE_NUM (vuses
); i
++)
2162 delink_imm_use (VUSE_OP_PTR (vuses
, i
));
2163 add_vop_to_freelist (vuses
);
2164 gimple_set_vuse_ops (stmt
, NULL
);
2169 for (vdef
= vdefs
; vdef
; vdef
= next_vdef
)
2171 next_vdef
= vdef
->next
;
2172 delink_imm_use (VDEF_OP_PTR (vdef
, 0));
2173 add_vop_to_freelist (vdef
);
2175 gimple_set_vdef_ops (stmt
, NULL
);
2178 if (gimple_has_ops (stmt
))
2179 gimple_set_addresses_taken (stmt
, NULL
);
2181 if (gimple_has_mem_ops (stmt
))
2183 gimple_set_stored_syms (stmt
, NULL
, &operands_bitmap_obstack
);
2184 gimple_set_loaded_syms (stmt
, NULL
, &operands_bitmap_obstack
);
2189 /* Get the operands of statement STMT. */
2192 update_stmt_operands (gimple stmt
)
2194 /* If update_stmt_operands is called before SSA is initialized, do
2196 if (!ssa_operands_active ())
2199 timevar_push (TV_TREE_OPS
);
2201 gcc_assert (gimple_modified_p (stmt
));
2202 build_ssa_operands (stmt
);
2203 gimple_set_modified (stmt
, false);
2205 timevar_pop (TV_TREE_OPS
);
2209 /* Copies virtual operands from SRC to DST. */
2212 copy_virtual_operands (gimple dest
, gimple src
)
2215 voptype_p src_vuses
, dest_vuses
;
2216 voptype_p src_vdefs
, dest_vdefs
;
2217 struct voptype_d vuse
;
2218 struct voptype_d vdef
;
2220 if (!gimple_has_mem_ops (src
))
2223 gimple_set_vdef_ops (dest
, NULL
);
2224 gimple_set_vuse_ops (dest
, NULL
);
2226 gimple_set_stored_syms (dest
, gimple_stored_syms (src
),
2227 &operands_bitmap_obstack
);
2228 gimple_set_loaded_syms (dest
, gimple_loaded_syms (src
),
2229 &operands_bitmap_obstack
);
2231 /* Copy all the VUSE operators and corresponding operands. */
2233 for (src_vuses
= gimple_vuse_ops (src
);
2235 src_vuses
= src_vuses
->next
)
2237 n
= VUSE_NUM (src_vuses
);
2238 dest_vuses
= add_vuse_op (dest
, NULL_TREE
, n
, dest_vuses
);
2239 for (i
= 0; i
< n
; i
++)
2240 SET_USE (VUSE_OP_PTR (dest_vuses
, i
), VUSE_OP (src_vuses
, i
));
2242 if (gimple_vuse_ops (dest
) == NULL
)
2243 gimple_set_vuse_ops (dest
, vuse
.next
);
2246 /* Copy all the VDEF operators and corresponding operands. */
2248 for (src_vdefs
= gimple_vdef_ops (src
);
2250 src_vdefs
= src_vdefs
->next
)
2252 n
= VUSE_NUM (src_vdefs
);
2253 dest_vdefs
= add_vdef_op (dest
, NULL_TREE
, n
, dest_vdefs
);
2254 VDEF_RESULT (dest_vdefs
) = VDEF_RESULT (src_vdefs
);
2255 for (i
= 0; i
< n
; i
++)
2256 SET_USE (VUSE_OP_PTR (dest_vdefs
, i
), VUSE_OP (src_vdefs
, i
));
2258 if (gimple_vdef_ops (dest
) == NULL
)
2259 gimple_set_vdef_ops (dest
, vdef
.next
);
2264 /* Specifically for use in DOM's expression analysis. Given a store, we
2265 create an artificial stmt which looks like a load from the store, this can
2266 be used to eliminate redundant loads. OLD_OPS are the operands from the
2267 store stmt, and NEW_STMT is the new load which represents a load of the
2268 values stored. If DELINK_IMM_USES_P is specified, the immediate
2269 uses of this stmt will be de-linked. */
2272 create_ssa_artificial_load_stmt (gimple new_stmt
, gimple old_stmt
,
2273 bool delink_imm_uses_p
)
2277 use_operand_p use_p
;
2280 gimple_set_modified (new_stmt
, false);
2282 /* Process NEW_STMT looking for operands. */
2283 start_ssa_stmt_operands ();
2284 parse_ssa_operands (new_stmt
);
2286 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, op
); i
++)
2287 if (TREE_CODE (op
) != SSA_NAME
)
2288 var_ann (op
)->in_vuse_list
= false;
2290 for (i
= 0; VEC_iterate (tree
, build_vdefs
, i
, op
); i
++)
2291 if (TREE_CODE (op
) != SSA_NAME
)
2292 var_ann (op
)->in_vdef_list
= false;
2294 /* Remove any virtual operands that were found. */
2295 VEC_truncate (tree
, build_vdefs
, 0);
2296 VEC_truncate (tree
, build_vuses
, 0);
2298 /* Clear the loads and stores bitmaps. */
2299 bitmap_clear (build_loads
);
2300 bitmap_clear (build_stores
);
2302 /* For each VDEF on the original statement, we want to create a
2303 VUSE of the VDEF result operand on the new statement. */
2304 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
, SSA_OP_VDEF
)
2307 finalize_ssa_stmt_operands (new_stmt
);
2309 /* All uses in this fake stmt must not be in the immediate use lists. */
2310 if (delink_imm_uses_p
)
2311 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
2312 delink_imm_use (use_p
);
2316 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2317 to test the validity of the swap operation. */
2320 swap_tree_operands (gimple stmt
, tree
*exp0
, tree
*exp1
)
2326 /* If the operand cache is active, attempt to preserve the relative
2327 positions of these two operands in their respective immediate use
2329 if (ssa_operands_active () && op0
!= op1
)
2331 use_optype_p use0
, use1
, ptr
;
2334 /* Find the 2 operands in the cache, if they are there. */
2335 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
2336 if (USE_OP_PTR (ptr
)->use
== exp0
)
2342 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
2343 if (USE_OP_PTR (ptr
)->use
== exp1
)
2349 /* If both uses don't have operand entries, there isn't much we can do
2350 at this point. Presumably we don't need to worry about it. */
2353 tree
*tmp
= USE_OP_PTR (use1
)->use
;
2354 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
2355 USE_OP_PTR (use0
)->use
= tmp
;
2359 /* Now swap the data. */
2364 /* Add the base address of REF to SET. */
2367 add_to_addressable_set (tree ref
, bitmap
*set
)
2371 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2372 as the only thing we take the address of. If VAR is a structure,
2373 taking the address of a field means that the whole structure may
2374 be referenced using pointer arithmetic. See PR 21407 and the
2375 ensuing mailing list discussion. */
2376 var
= get_base_address (ref
);
2377 if (var
&& SSA_VAR_P (var
))
2380 *set
= BITMAP_ALLOC (&operands_bitmap_obstack
);
2382 bitmap_set_bit (*set
, DECL_UID (var
));
2383 TREE_ADDRESSABLE (var
) = 1;
2388 /* Add the base address of REF to the set of addresses taken by STMT.
2389 REF may be a single variable whose address has been taken or any
2390 other valid GIMPLE memory reference (structure reference, array,
2391 etc). If the base address of REF is a decl that has sub-variables,
2392 also add all of its sub-variables. */
2395 gimple_add_to_addresses_taken (gimple stmt
, tree ref
)
2397 gcc_assert (gimple_has_ops (stmt
));
2398 add_to_addressable_set (ref
, gimple_addresses_taken_ptr (stmt
));
2402 /* Scan the immediate_use list for VAR making sure its linked properly.
2403 Return TRUE if there is a problem and emit an error message to F. */
2406 verify_imm_links (FILE *f
, tree var
)
2408 use_operand_p ptr
, prev
, list
;
2411 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
2413 list
= &(SSA_NAME_IMM_USE_NODE (var
));
2414 gcc_assert (list
->use
== NULL
);
2416 if (list
->prev
== NULL
)
2418 gcc_assert (list
->next
== NULL
);
2424 for (ptr
= list
->next
; ptr
!= list
; )
2426 if (prev
!= ptr
->prev
)
2429 if (ptr
->use
== NULL
)
2430 goto error
; /* 2 roots, or SAFE guard node. */
2431 else if (*(ptr
->use
) != var
)
2437 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2439 if (count
++ > 50000000)
2443 /* Verify list in the other direction. */
2445 for (ptr
= list
->prev
; ptr
!= list
; )
2447 if (prev
!= ptr
->next
)
2461 if (ptr
->loc
.stmt
&& gimple_modified_p (ptr
->loc
.stmt
))
2463 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->loc
.stmt
);
2464 print_gimple_stmt (f
, ptr
->loc
.stmt
, 0, TDF_SLIM
);
2466 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
2468 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
2474 /* Dump all the immediate uses to FILE. */
2477 dump_immediate_uses_for (FILE *file
, tree var
)
2479 imm_use_iterator iter
;
2480 use_operand_p use_p
;
2482 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2484 print_generic_expr (file
, var
, TDF_SLIM
);
2485 fprintf (file
, " : -->");
2486 if (has_zero_uses (var
))
2487 fprintf (file
, " no uses.\n");
2489 if (has_single_use (var
))
2490 fprintf (file
, " single use.\n");
2492 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2494 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2496 if (use_p
->loc
.stmt
== NULL
&& use_p
->use
== NULL
)
2497 fprintf (file
, "***end of stmt iterator marker***\n");
2499 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2500 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_VOPS
|TDF_MEMSYMS
);
2502 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_SLIM
);
2504 fprintf(file
, "\n");
2508 /* Dump all the immediate uses to FILE. */
2511 dump_immediate_uses (FILE *file
)
2516 fprintf (file
, "Immediate_uses: \n\n");
2517 for (x
= 1; x
< num_ssa_names
; x
++)
2522 dump_immediate_uses_for (file
, var
);
2527 /* Dump def-use edges on stderr. */
2530 debug_immediate_uses (void)
2532 dump_immediate_uses (stderr
);
2536 /* Dump def-use edges on stderr. */
2539 debug_immediate_uses_for (tree var
)
2541 dump_immediate_uses_for (stderr
, var
);
2545 /* Create a new change buffer for the statement pointed by STMT_P and
2546 push the buffer into SCB_STACK. Each change buffer
2547 records state information needed to determine what changed in the
2548 statement. Mainly, this keeps track of symbols that may need to be
2549 put into SSA form, SSA name replacements and other information
2550 needed to keep the SSA form up to date. */
2553 push_stmt_changes (gimple
*stmt_p
)
2560 /* It makes no sense to keep track of PHI nodes. */
2561 if (gimple_code (stmt
) == GIMPLE_PHI
)
2564 buf
= XNEW (struct scb_d
);
2565 memset (buf
, 0, sizeof *buf
);
2567 buf
->stmt_p
= stmt_p
;
2569 if (gimple_references_memory_p (stmt
))
2574 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2576 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2577 if (buf
->loads
== NULL
)
2578 buf
->loads
= BITMAP_ALLOC (NULL
);
2579 bitmap_set_bit (buf
->loads
, DECL_UID (sym
));
2582 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2584 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2585 if (buf
->stores
== NULL
)
2586 buf
->stores
= BITMAP_ALLOC (NULL
);
2587 bitmap_set_bit (buf
->stores
, DECL_UID (sym
));
2591 VEC_safe_push (scb_t
, heap
, scb_stack
, buf
);
2595 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2596 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2599 mark_difference_for_renaming (bitmap s1
, bitmap s2
)
2601 if (s1
== NULL
&& s2
== NULL
)
2604 if (s1
&& s2
== NULL
)
2605 mark_set_for_renaming (s1
);
2606 else if (s1
== NULL
&& s2
)
2607 mark_set_for_renaming (s2
);
2608 else if (!bitmap_equal_p (s1
, s2
))
2610 bitmap t1
= BITMAP_ALLOC (NULL
);
2611 bitmap_xor (t1
, s1
, s2
);
2612 mark_set_for_renaming (t1
);
2618 /* Pop the top SCB from SCB_STACK and act on the differences between
2619 what was recorded by push_stmt_changes and the current state of
2623 pop_stmt_changes (gimple
*stmt_p
)
2628 bitmap loads
, stores
;
2633 /* It makes no sense to keep track of PHI nodes. */
2634 if (gimple_code (stmt
) == GIMPLE_PHI
)
2637 buf
= VEC_pop (scb_t
, scb_stack
);
2638 gcc_assert (stmt_p
== buf
->stmt_p
);
2640 /* Force an operand re-scan on the statement and mark any newly
2641 exposed variables. */
2644 /* Determine whether any memory symbols need to be renamed. If the
2645 sets of loads and stores are different after the statement is
2646 modified, then the affected symbols need to be renamed.
2648 Note that it may be possible for the statement to not reference
2649 memory anymore, but we still need to act on the differences in
2650 the sets of symbols. */
2651 loads
= stores
= NULL
;
2652 if (gimple_references_memory_p (stmt
))
2657 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2659 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2661 loads
= BITMAP_ALLOC (NULL
);
2662 bitmap_set_bit (loads
, DECL_UID (sym
));
2665 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2667 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2669 stores
= BITMAP_ALLOC (NULL
);
2670 bitmap_set_bit (stores
, DECL_UID (sym
));
2674 /* If LOADS is different from BUF->LOADS, the affected
2675 symbols need to be marked for renaming. */
2676 mark_difference_for_renaming (loads
, buf
->loads
);
2678 /* Similarly for STORES and BUF->STORES. */
2679 mark_difference_for_renaming (stores
, buf
->stores
);
2681 /* Mark all the naked GIMPLE register operands for renaming. */
2682 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
|SSA_OP_USE
)
2684 mark_sym_for_renaming (op
);
2686 /* FIXME, need to add more finalizers here. Cleanup EH info,
2687 recompute invariants for address expressions, add
2688 SSA replacement mappings, etc. For instance, given
2689 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2692 # SMT.4_20 = VDEF <SMT.4_16>
2695 So, the VDEF will disappear, but instead of marking SMT.4 for
2696 renaming it would be far more efficient to establish a
2697 replacement mapping that would replace every reference of
2698 SMT.4_20 with SMT.4_16. */
2700 /* Free memory used by the buffer. */
2701 BITMAP_FREE (buf
->loads
);
2702 BITMAP_FREE (buf
->stores
);
2703 BITMAP_FREE (loads
);
2704 BITMAP_FREE (stores
);
2710 /* Discard the topmost change buffer from SCB_STACK. This is useful
2711 when the caller realized that it did not actually modified the
2712 statement. It avoids the expensive operand re-scan. */
2715 discard_stmt_changes (gimple
*stmt_p
)
2720 /* It makes no sense to keep track of PHI nodes. */
2722 if (gimple_code (stmt
) == GIMPLE_PHI
)
2725 buf
= VEC_pop (scb_t
, scb_stack
);
2726 gcc_assert (stmt_p
== buf
->stmt_p
);
2728 /* Free memory used by the buffer. */
2729 BITMAP_FREE (buf
->loads
);
2730 BITMAP_FREE (buf
->stores
);