1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "diagnostic-core.h"
39 /* This file contains the code required to manage the operands cache of the
40 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
41 annotation. This cache contains operands that will be of interest to
42 optimizers and other passes wishing to manipulate the IL.
44 The operand type are broken up into REAL and VIRTUAL operands. The real
45 operands are represented as pointers into the stmt's operand tree. Thus
46 any manipulation of the real operands will be reflected in the actual tree.
47 Virtual operands are represented solely in the cache, although the base
48 variable for the SSA_NAME may, or may not occur in the stmt's tree.
49 Manipulation of the virtual operands will not be reflected in the stmt tree.
51 The routines in this file are concerned with creating this operand cache
54 The operand tree is the parsed by the various get_* routines which look
55 through the stmt tree for the occurrence of operands which may be of
56 interest, and calls are made to the append_* routines whenever one is
57 found. There are 4 of these routines, each representing one of the
58 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 4 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
76 operand vector for VUSE, then the new vector will also be modified
77 such that it contains 'a_5' rather than 'a'. */
80 /* Flags to describe operand properties in helpers. */
82 /* By default, operands are loaded. */
85 /* Operand is the target of an assignment expression or a
86 call-clobbered variable. */
87 #define opf_def (1 << 0)
89 /* No virtual operands should be created in the expression. This is used
90 when traversing ADDR_EXPR nodes which have different semantics than
91 other expressions. Inside an ADDR_EXPR node, the only operands that we
92 need to consider are indices into arrays. For instance, &a.b[i] should
93 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
95 #define opf_no_vops (1 << 1)
97 /* Operand is an implicit reference. This is used to distinguish
98 explicit assignments in the form of MODIFY_EXPR from
99 clobbering sites like function calls or ASM_EXPRs. */
100 #define opf_implicit (1 << 2)
102 /* Operand is in a place where address-taken does not imply addressable. */
103 #define opf_non_addressable (1 << 3)
105 /* Operand is in a place where opf_non_addressable does not apply. */
106 #define opf_not_non_addressable (1 << 4)
108 /* Array for building all the def operands. */
109 static VEC(tree
,heap
) *build_defs
;
111 /* Array for building all the use operands. */
112 static VEC(tree
,heap
) *build_uses
;
114 /* The built VDEF operand. */
115 static tree build_vdef
;
117 /* The built VUSE operand. */
118 static tree build_vuse
;
120 /* Bitmap obstack for our datastructures that needs to survive across
121 compilations of multiple functions. */
122 static bitmap_obstack operands_bitmap_obstack
;
124 static void get_expr_operands (gimple
, tree
*, int);
126 /* Number of functions with initialized ssa_operands. */
127 static int n_initialized
= 0;
129 /* Return the DECL_UID of the base variable of T. */
131 static inline unsigned
132 get_name_decl (const_tree t
)
134 if (TREE_CODE (t
) != SSA_NAME
)
137 return DECL_UID (SSA_NAME_VAR (t
));
141 /* Return true if the SSA operands cache is active. */
144 ssa_operands_active (void)
146 /* This function may be invoked from contexts where CFUN is NULL
147 (IPA passes), return false for now. FIXME: operands may be
148 active in each individual function, maybe this function should
149 take CFUN as a parameter. */
153 return cfun
->gimple_df
&& gimple_ssa_operands (cfun
)->ops_active
;
157 /* Create the VOP variable, an artificial global variable to act as a
158 representative of all of the virtual operands FUD chain. */
161 create_vop_var (struct function
*fn
)
165 gcc_assert (fn
->gimple_df
->vop
== NULL_TREE
);
167 global_var
= build_decl (BUILTINS_LOCATION
, VAR_DECL
,
168 get_identifier (".MEM"),
170 DECL_ARTIFICIAL (global_var
) = 1;
171 TREE_READONLY (global_var
) = 0;
172 DECL_EXTERNAL (global_var
) = 1;
173 TREE_STATIC (global_var
) = 1;
174 TREE_USED (global_var
) = 1;
175 DECL_CONTEXT (global_var
) = NULL_TREE
;
176 TREE_THIS_VOLATILE (global_var
) = 0;
177 TREE_ADDRESSABLE (global_var
) = 0;
178 VAR_DECL_IS_VIRTUAL_OPERAND (global_var
) = 1;
180 add_referenced_var_1 (global_var
, fn
);
181 fn
->gimple_df
->vop
= global_var
;
184 /* These are the sizes of the operand memory buffer in bytes which gets
185 allocated each time more operands space is required. The final value is
186 the amount that is allocated every time after that.
187 In 1k we can fit 25 use operands (or 63 def operands) on a host with
188 8 byte pointers, that would be 10 statements each with 1 def and 2
191 #define OP_SIZE_INIT 0
192 #define OP_SIZE_1 (1024 - sizeof (void *))
193 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
194 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
196 /* Initialize the operand cache routines. */
199 init_ssa_operands (struct function
*fn
)
201 if (!n_initialized
++)
203 build_defs
= VEC_alloc (tree
, heap
, 5);
204 build_uses
= VEC_alloc (tree
, heap
, 10);
205 build_vuse
= NULL_TREE
;
206 build_vdef
= NULL_TREE
;
207 bitmap_obstack_initialize (&operands_bitmap_obstack
);
210 gcc_assert (gimple_ssa_operands (fn
)->operand_memory
== NULL
);
211 gimple_ssa_operands (fn
)->operand_memory_index
212 = gimple_ssa_operands (fn
)->ssa_operand_mem_size
;
213 gimple_ssa_operands (fn
)->ops_active
= true;
214 gimple_ssa_operands (fn
)->ssa_operand_mem_size
= OP_SIZE_INIT
;
219 /* Dispose of anything required by the operand routines. */
222 fini_ssa_operands (void)
224 struct ssa_operand_memory_d
*ptr
;
226 if (!--n_initialized
)
228 VEC_free (tree
, heap
, build_defs
);
229 VEC_free (tree
, heap
, build_uses
);
230 build_vdef
= NULL_TREE
;
231 build_vuse
= NULL_TREE
;
234 gimple_ssa_operands (cfun
)->free_defs
= NULL
;
235 gimple_ssa_operands (cfun
)->free_uses
= NULL
;
237 while ((ptr
= gimple_ssa_operands (cfun
)->operand_memory
) != NULL
)
239 gimple_ssa_operands (cfun
)->operand_memory
240 = gimple_ssa_operands (cfun
)->operand_memory
->next
;
244 gimple_ssa_operands (cfun
)->ops_active
= false;
247 bitmap_obstack_release (&operands_bitmap_obstack
);
249 cfun
->gimple_df
->vop
= NULL_TREE
;
253 /* Return memory for an operand of size SIZE. */
256 ssa_operand_alloc (unsigned size
)
260 gcc_assert (size
== sizeof (struct use_optype_d
)
261 || size
== sizeof (struct def_optype_d
));
263 if (gimple_ssa_operands (cfun
)->operand_memory_index
+ size
264 >= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
266 struct ssa_operand_memory_d
*ptr
;
268 switch (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
271 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_1
;
274 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_2
;
278 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_3
;
285 ptr
= ggc_alloc_ssa_operand_memory_d (sizeof (void *)
286 + gimple_ssa_operands (cfun
)->ssa_operand_mem_size
);
288 ptr
->next
= gimple_ssa_operands (cfun
)->operand_memory
;
289 gimple_ssa_operands (cfun
)->operand_memory
= ptr
;
290 gimple_ssa_operands (cfun
)->operand_memory_index
= 0;
293 ptr
= &(gimple_ssa_operands (cfun
)->operand_memory
294 ->mem
[gimple_ssa_operands (cfun
)->operand_memory_index
]);
295 gimple_ssa_operands (cfun
)->operand_memory_index
+= size
;
300 /* Allocate a DEF operand. */
302 static inline struct def_optype_d
*
305 struct def_optype_d
*ret
;
306 if (gimple_ssa_operands (cfun
)->free_defs
)
308 ret
= gimple_ssa_operands (cfun
)->free_defs
;
309 gimple_ssa_operands (cfun
)->free_defs
310 = gimple_ssa_operands (cfun
)->free_defs
->next
;
313 ret
= (struct def_optype_d
*)
314 ssa_operand_alloc (sizeof (struct def_optype_d
));
319 /* Allocate a USE operand. */
321 static inline struct use_optype_d
*
324 struct use_optype_d
*ret
;
325 if (gimple_ssa_operands (cfun
)->free_uses
)
327 ret
= gimple_ssa_operands (cfun
)->free_uses
;
328 gimple_ssa_operands (cfun
)->free_uses
329 = gimple_ssa_operands (cfun
)->free_uses
->next
;
332 ret
= (struct use_optype_d
*)
333 ssa_operand_alloc (sizeof (struct use_optype_d
));
338 /* Adds OP to the list of defs after LAST. */
340 static inline def_optype_p
341 add_def_op (tree
*op
, def_optype_p last
)
343 def_optype_p new_def
;
345 new_def
= alloc_def ();
346 DEF_OP_PTR (new_def
) = op
;
347 last
->next
= new_def
;
348 new_def
->next
= NULL
;
353 /* Adds OP to the list of uses of statement STMT after LAST. */
355 static inline use_optype_p
356 add_use_op (gimple stmt
, tree
*op
, use_optype_p last
)
358 use_optype_p new_use
;
360 new_use
= alloc_use ();
361 USE_OP_PTR (new_use
)->use
= op
;
362 link_imm_use_stmt (USE_OP_PTR (new_use
), *op
, stmt
);
363 last
->next
= new_use
;
364 new_use
->next
= NULL
;
370 /* Takes elements from build_defs and turns them into def operands of STMT.
371 TODO -- Make build_defs VEC of tree *. */
374 finalize_ssa_defs (gimple stmt
)
377 struct def_optype_d new_list
;
378 def_optype_p old_ops
, last
;
379 unsigned int num
= VEC_length (tree
, build_defs
);
381 /* There should only be a single real definition per assignment. */
382 gcc_assert ((stmt
&& gimple_code (stmt
) != GIMPLE_ASSIGN
) || num
<= 1);
384 /* Pre-pend the vdef we may have built. */
385 if (build_vdef
!= NULL_TREE
)
387 tree oldvdef
= gimple_vdef (stmt
);
389 && TREE_CODE (oldvdef
) == SSA_NAME
)
390 oldvdef
= SSA_NAME_VAR (oldvdef
);
391 if (oldvdef
!= build_vdef
)
392 gimple_set_vdef (stmt
, build_vdef
);
393 VEC_safe_insert (tree
, heap
, build_defs
, 0, (tree
)gimple_vdef_ptr (stmt
));
397 new_list
.next
= NULL
;
400 old_ops
= gimple_def_ops (stmt
);
404 /* Clear and unlink a no longer necessary VDEF. */
405 if (build_vdef
== NULL_TREE
406 && gimple_vdef (stmt
) != NULL_TREE
)
408 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
410 unlink_stmt_vdef (stmt
);
411 release_ssa_name (gimple_vdef (stmt
));
413 gimple_set_vdef (stmt
, NULL_TREE
);
416 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
417 if (gimple_vdef (stmt
)
418 && TREE_CODE (gimple_vdef (stmt
)) != SSA_NAME
)
419 mark_sym_for_renaming (gimple_vdef (stmt
));
421 /* Check for the common case of 1 def that hasn't changed. */
422 if (old_ops
&& old_ops
->next
== NULL
&& num
== 1
423 && (tree
*) VEC_index (tree
, build_defs
, 0) == DEF_OP_PTR (old_ops
))
426 /* If there is anything in the old list, free it. */
429 old_ops
->next
= gimple_ssa_operands (cfun
)->free_defs
;
430 gimple_ssa_operands (cfun
)->free_defs
= old_ops
;
433 /* If there is anything remaining in the build_defs list, simply emit it. */
434 for ( ; new_i
< num
; new_i
++)
435 last
= add_def_op ((tree
*) VEC_index (tree
, build_defs
, new_i
), last
);
437 /* Now set the stmt's operands. */
438 gimple_set_def_ops (stmt
, new_list
.next
);
442 /* Takes elements from build_uses and turns them into use operands of STMT.
443 TODO -- Make build_uses VEC of tree *. */
446 finalize_ssa_uses (gimple stmt
)
449 struct use_optype_d new_list
;
450 use_optype_p old_ops
, ptr
, last
;
452 /* Pre-pend the VUSE we may have built. */
453 if (build_vuse
!= NULL_TREE
)
455 tree oldvuse
= gimple_vuse (stmt
);
457 && TREE_CODE (oldvuse
) == SSA_NAME
)
458 oldvuse
= SSA_NAME_VAR (oldvuse
);
459 if (oldvuse
!= (build_vuse
!= NULL_TREE
460 ? build_vuse
: build_vdef
))
461 gimple_set_vuse (stmt
, NULL_TREE
);
462 VEC_safe_insert (tree
, heap
, build_uses
, 0, (tree
)gimple_vuse_ptr (stmt
));
465 new_list
.next
= NULL
;
468 old_ops
= gimple_use_ops (stmt
);
470 /* Clear a no longer necessary VUSE. */
471 if (build_vuse
== NULL_TREE
472 && gimple_vuse (stmt
) != NULL_TREE
)
473 gimple_set_vuse (stmt
, NULL_TREE
);
475 /* If there is anything in the old list, free it. */
478 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
479 delink_imm_use (USE_OP_PTR (ptr
));
480 old_ops
->next
= gimple_ssa_operands (cfun
)->free_uses
;
481 gimple_ssa_operands (cfun
)->free_uses
= old_ops
;
484 /* If we added a VUSE, make sure to set the operand if it is not already
485 present and mark it for renaming. */
486 if (build_vuse
!= NULL_TREE
487 && gimple_vuse (stmt
) == NULL_TREE
)
489 gimple_set_vuse (stmt
, gimple_vop (cfun
));
490 mark_sym_for_renaming (gimple_vop (cfun
));
493 /* Now create nodes for all the new nodes. */
494 for (new_i
= 0; new_i
< VEC_length (tree
, build_uses
); new_i
++)
495 last
= add_use_op (stmt
,
496 (tree
*) VEC_index (tree
, build_uses
, new_i
),
499 /* Now set the stmt's operands. */
500 gimple_set_use_ops (stmt
, new_list
.next
);
504 /* Clear the in_list bits and empty the build array for VDEFs and
508 cleanup_build_arrays (void)
510 build_vdef
= NULL_TREE
;
511 build_vuse
= NULL_TREE
;
512 VEC_truncate (tree
, build_defs
, 0);
513 VEC_truncate (tree
, build_uses
, 0);
517 /* Finalize all the build vectors, fill the new ones into INFO. */
520 finalize_ssa_stmt_operands (gimple stmt
)
522 finalize_ssa_defs (stmt
);
523 finalize_ssa_uses (stmt
);
524 cleanup_build_arrays ();
528 /* Start the process of building up operands vectors in INFO. */
531 start_ssa_stmt_operands (void)
533 gcc_assert (VEC_length (tree
, build_defs
) == 0);
534 gcc_assert (VEC_length (tree
, build_uses
) == 0);
535 gcc_assert (build_vuse
== NULL_TREE
);
536 gcc_assert (build_vdef
== NULL_TREE
);
540 /* Add DEF_P to the list of pointers to operands. */
543 append_def (tree
*def_p
)
545 VEC_safe_push (tree
, heap
, build_defs
, (tree
) def_p
);
549 /* Add USE_P to the list of pointers to operands. */
552 append_use (tree
*use_p
)
554 VEC_safe_push (tree
, heap
, build_uses
, (tree
) use_p
);
558 /* Add VAR to the set of variables that require a VDEF operator. */
561 append_vdef (tree var
)
566 gcc_assert ((build_vdef
== NULL_TREE
567 || build_vdef
== var
)
568 && (build_vuse
== NULL_TREE
569 || build_vuse
== var
));
576 /* Add VAR to the set of variables that require a VUSE operator. */
579 append_vuse (tree var
)
584 gcc_assert (build_vuse
== NULL_TREE
585 || build_vuse
== var
);
590 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
593 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED
, int flags
)
595 /* Add virtual operands to the stmt, unless the caller has specifically
596 requested not to do that (used when adding operands inside an
597 ADDR_EXPR expression). */
598 if (flags
& opf_no_vops
)
601 gcc_assert (!is_gimple_debug (stmt
));
604 append_vdef (gimple_vop (cfun
));
606 append_vuse (gimple_vop (cfun
));
610 /* Add *VAR_P to the appropriate operand array for statement STMT.
611 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
612 it will be added to the statement's real operands, otherwise it is
613 added to virtual operands. */
616 add_stmt_operand (tree
*var_p
, gimple stmt
, int flags
)
620 gcc_assert (SSA_VAR_P (*var_p
));
623 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
625 /* Mark statements with volatile operands. */
626 if (!(flags
& opf_no_vops
)
627 && TREE_THIS_VOLATILE (sym
))
628 gimple_set_has_volatile_ops (stmt
, true);
630 if (is_gimple_reg (sym
))
632 /* The variable is a GIMPLE register. Add it to real operands. */
639 add_virtual_operand (stmt
, flags
);
642 /* Mark the base address of REF as having its address taken.
643 REF may be a single variable whose address has been taken or any
644 other valid GIMPLE memory reference (structure reference, array,
648 mark_address_taken (tree ref
)
652 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
653 as the only thing we take the address of. If VAR is a structure,
654 taking the address of a field means that the whole structure may
655 be referenced using pointer arithmetic. See PR 21407 and the
656 ensuing mailing list discussion. */
657 var
= get_base_address (ref
);
661 TREE_ADDRESSABLE (var
) = 1;
662 else if (TREE_CODE (var
) == MEM_REF
663 && TREE_CODE (TREE_OPERAND (var
, 0)) == ADDR_EXPR
664 && DECL_P (TREE_OPERAND (TREE_OPERAND (var
, 0), 0)))
665 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var
, 0), 0)) = 1;
670 /* A subroutine of get_expr_operands to handle MEM_REF.
672 STMT is the statement being processed, EXPR is the MEM_REF
675 FLAGS is as in get_expr_operands.
677 RECURSE_ON_BASE should be set to true if we want to continue
678 calling get_expr_operands on the base pointer, and false if
679 something else will do it for us. */
682 get_indirect_ref_operands (gimple stmt
, tree expr
, int flags
,
683 bool recurse_on_base
)
685 tree
*pptr
= &TREE_OPERAND (expr
, 0);
687 if (!(flags
& opf_no_vops
)
688 && TREE_THIS_VOLATILE (expr
))
689 gimple_set_has_volatile_ops (stmt
, true);
692 add_virtual_operand (stmt
, flags
);
694 /* If requested, add a USE operand for the base pointer. */
696 get_expr_operands (stmt
, pptr
,
697 opf_non_addressable
| opf_use
698 | (flags
& (opf_no_vops
|opf_not_non_addressable
)));
702 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
705 get_tmr_operands (gimple stmt
, tree expr
, int flags
)
707 if (!(flags
& opf_no_vops
)
708 && TREE_THIS_VOLATILE (expr
))
709 gimple_set_has_volatile_ops (stmt
, true);
711 /* First record the real operands. */
712 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_use
| (flags
& opf_no_vops
));
713 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_use
| (flags
& opf_no_vops
));
714 get_expr_operands (stmt
, &TMR_INDEX2 (expr
), opf_use
| (flags
& opf_no_vops
));
716 add_virtual_operand (stmt
, flags
);
720 /* If STMT is a call that may clobber globals and other symbols that
721 escape, add them to the VDEF/VUSE lists for it. */
724 maybe_add_call_vops (gimple stmt
)
726 int call_flags
= gimple_call_flags (stmt
);
728 /* If aliases have been computed already, add VDEF or VUSE
729 operands for all the symbols that have been found to be
731 if (!(call_flags
& ECF_NOVOPS
))
733 /* A 'pure' or a 'const' function never call-clobbers anything.
734 A 'noreturn' function might, but since we don't return anyway
735 there is no point in recording that. */
736 if (!(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
737 add_virtual_operand (stmt
, opf_def
);
738 else if (!(call_flags
& ECF_CONST
))
739 add_virtual_operand (stmt
, opf_use
);
744 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
747 get_asm_expr_operands (gimple stmt
)
750 const char **oconstraints
;
751 const char *constraint
;
752 bool allows_mem
, allows_reg
, is_inout
;
754 noutputs
= gimple_asm_noutputs (stmt
);
755 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
757 /* Gather all output operands. */
758 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
760 tree link
= gimple_asm_output_op (stmt
, i
);
761 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
762 oconstraints
[i
] = constraint
;
763 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
764 &allows_reg
, &is_inout
);
766 /* This should have been split in gimplify_asm_expr. */
767 gcc_assert (!allows_reg
|| !is_inout
);
769 /* Memory operands are addressable. Note that STMT needs the
770 address of this operand. */
771 if (!allows_reg
&& allows_mem
)
772 mark_address_taken (TREE_VALUE (link
));
774 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_def
| opf_not_non_addressable
);
777 /* Gather all input operands. */
778 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
780 tree link
= gimple_asm_input_op (stmt
, i
);
781 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
782 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
783 &allows_mem
, &allows_reg
);
785 /* Memory operands are addressable. Note that STMT needs the
786 address of this operand. */
787 if (!allows_reg
&& allows_mem
)
788 mark_address_taken (TREE_VALUE (link
));
790 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_not_non_addressable
);
793 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
794 if (gimple_asm_clobbers_memory_p (stmt
))
795 add_virtual_operand (stmt
, opf_def
);
799 /* Recursively scan the expression pointed to by EXPR_P in statement
800 STMT. FLAGS is one of the OPF_* constants modifying how to
801 interpret the operands found. */
804 get_expr_operands (gimple stmt
, tree
*expr_p
, int flags
)
807 enum tree_code_class codeclass
;
809 int uflags
= opf_use
;
814 if (is_gimple_debug (stmt
))
815 uflags
|= (flags
& opf_no_vops
);
817 code
= TREE_CODE (expr
);
818 codeclass
= TREE_CODE_CLASS (code
);
823 /* Taking the address of a variable does not represent a
824 reference to it, but the fact that the statement takes its
825 address will be of interest to some passes (e.g. alias
827 if ((!(flags
& opf_non_addressable
)
828 || (flags
& opf_not_non_addressable
))
829 && !is_gimple_debug (stmt
))
830 mark_address_taken (TREE_OPERAND (expr
, 0));
832 /* If the address is invariant, there may be no interesting
833 variable references inside. */
834 if (is_gimple_min_invariant (expr
))
837 /* Otherwise, there may be variables referenced inside but there
838 should be no VUSEs created, since the referenced objects are
839 not really accessed. The only operands that we should find
840 here are ARRAY_REF indices which will always be real operands
841 (GIMPLE does not allow non-registers as array indices). */
842 flags
|= opf_no_vops
;
843 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0),
844 flags
| opf_not_non_addressable
);
848 add_stmt_operand (expr_p
, stmt
, flags
);
854 add_stmt_operand (expr_p
, stmt
, flags
);
857 case DEBUG_EXPR_DECL
:
858 gcc_assert (gimple_debug_bind_p (stmt
));
862 get_indirect_ref_operands (stmt
, expr
, flags
, true);
866 get_tmr_operands (stmt
, expr
, flags
);
870 case ARRAY_RANGE_REF
:
875 if (!(flags
& opf_no_vops
)
876 && TREE_THIS_VOLATILE (expr
))
877 gimple_set_has_volatile_ops (stmt
, true);
879 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
881 if (code
== COMPONENT_REF
)
883 if (!(flags
& opf_no_vops
)
884 && TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
885 gimple_set_has_volatile_ops (stmt
, true);
886 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), uflags
);
888 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
890 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), uflags
);
891 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), uflags
);
892 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), uflags
);
899 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
900 and an rvalue reference to its second argument. */
901 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), uflags
);
902 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
908 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), uflags
);
909 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), uflags
);
910 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), uflags
);
915 /* General aggregate CONSTRUCTORs have been decomposed, but they
916 are still in use as the COMPLEX_EXPR equivalent for vectors. */
918 unsigned HOST_WIDE_INT idx
;
920 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
921 the volatility to the statement, don't use TREE_CLOBBER_P for
922 mirroring the other uses of THIS_VOLATILE in this file. */
923 if (!(flags
& opf_no_vops
)
924 && TREE_THIS_VOLATILE (expr
))
925 gimple_set_has_volatile_ops (stmt
, true);
928 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
930 get_expr_operands (stmt
, &ce
->value
, uflags
);
936 if (!(flags
& opf_no_vops
)
937 && TREE_THIS_VOLATILE (expr
))
938 gimple_set_has_volatile_ops (stmt
, true);
941 case VIEW_CONVERT_EXPR
:
943 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
951 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
952 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
957 case REALIGN_LOAD_EXPR
:
958 case WIDEN_MULT_PLUS_EXPR
:
959 case WIDEN_MULT_MINUS_EXPR
:
962 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
963 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
964 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
971 case CASE_LABEL_EXPR
:
972 /* Expressions that make no memory references. */
976 if (codeclass
== tcc_unary
)
978 if (codeclass
== tcc_binary
|| codeclass
== tcc_comparison
)
980 if (codeclass
== tcc_constant
|| codeclass
== tcc_type
)
984 /* If we get here, something has gone wrong. */
985 #ifdef ENABLE_CHECKING
986 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
988 fputs ("\n", stderr
);
994 /* Parse STMT looking for operands. When finished, the various
995 build_* operand vectors will have potential operands in them. */
998 parse_ssa_operands (gimple stmt
)
1000 enum gimple_code code
= gimple_code (stmt
);
1001 size_t i
, n
, start
= 0;
1006 get_asm_expr_operands (stmt
);
1009 case GIMPLE_TRANSACTION
:
1010 /* The start of a transaction is a memory barrier. */
1011 add_virtual_operand (stmt
, opf_def
| opf_use
);
1015 if (gimple_debug_bind_p (stmt
)
1016 && gimple_debug_bind_has_value_p (stmt
))
1017 get_expr_operands (stmt
, gimple_debug_bind_get_value_ptr (stmt
),
1018 opf_use
| opf_no_vops
);
1022 append_vuse (gimple_vop (cfun
));
1026 /* Add call-clobbered operands, if needed. */
1027 maybe_add_call_vops (stmt
);
1031 get_expr_operands (stmt
, gimple_op_ptr (stmt
, 0), opf_def
);
1037 n
= gimple_num_ops (stmt
);
1038 for (i
= start
; i
< n
; i
++)
1039 get_expr_operands (stmt
, gimple_op_ptr (stmt
, i
), opf_use
);
1045 /* Create an operands cache for STMT. */
1048 build_ssa_operands (gimple stmt
)
1050 /* Initially assume that the statement has no volatile operands. */
1051 gimple_set_has_volatile_ops (stmt
, false);
1053 start_ssa_stmt_operands ();
1054 parse_ssa_operands (stmt
);
1055 finalize_ssa_stmt_operands (stmt
);
1058 /* Verifies SSA statement operands. */
1061 verify_ssa_operands (gimple stmt
)
1063 use_operand_p use_p
;
1064 def_operand_p def_p
;
1068 bool volatile_p
= gimple_has_volatile_ops (stmt
);
1070 /* build_ssa_operands w/o finalizing them. */
1071 gimple_set_has_volatile_ops (stmt
, false);
1072 start_ssa_stmt_operands ();
1073 parse_ssa_operands (stmt
);
1075 /* Now verify the built operands are the same as present in STMT. */
1076 def
= gimple_vdef (stmt
);
1078 && TREE_CODE (def
) == SSA_NAME
)
1079 def
= SSA_NAME_VAR (def
);
1080 if (build_vdef
!= def
)
1082 error ("virtual definition of statement not up-to-date");
1085 if (gimple_vdef (stmt
)
1086 && ((def_p
= gimple_vdef_op (stmt
)) == NULL_DEF_OPERAND_P
1087 || DEF_FROM_PTR (def_p
) != gimple_vdef (stmt
)))
1089 error ("virtual def operand missing for stmt");
1093 use
= gimple_vuse (stmt
);
1095 && TREE_CODE (use
) == SSA_NAME
)
1096 use
= SSA_NAME_VAR (use
);
1097 if (build_vuse
!= use
)
1099 error ("virtual use of statement not up-to-date");
1102 if (gimple_vuse (stmt
)
1103 && ((use_p
= gimple_vuse_op (stmt
)) == NULL_USE_OPERAND_P
1104 || USE_FROM_PTR (use_p
) != gimple_vuse (stmt
)))
1106 error ("virtual use operand missing for stmt");
1110 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
1112 FOR_EACH_VEC_ELT (tree
, build_uses
, i
, use
)
1114 if (use_p
->use
== (tree
*)use
)
1116 VEC_replace (tree
, build_uses
, i
, NULL_TREE
);
1120 if (i
== VEC_length (tree
, build_uses
))
1122 error ("excess use operand for stmt");
1123 debug_generic_expr (USE_FROM_PTR (use_p
));
1127 FOR_EACH_VEC_ELT (tree
, build_uses
, i
, use
)
1128 if (use
!= NULL_TREE
)
1130 error ("use operand missing for stmt");
1131 debug_generic_expr (*(tree
*)use
);
1135 FOR_EACH_SSA_DEF_OPERAND (def_p
, stmt
, iter
, SSA_OP_DEF
)
1137 FOR_EACH_VEC_ELT (tree
, build_defs
, i
, def
)
1139 if (def_p
== (tree
*)def
)
1141 VEC_replace (tree
, build_defs
, i
, NULL_TREE
);
1145 if (i
== VEC_length (tree
, build_defs
))
1147 error ("excess def operand for stmt");
1148 debug_generic_expr (DEF_FROM_PTR (def_p
));
1152 FOR_EACH_VEC_ELT (tree
, build_defs
, i
, def
)
1153 if (def
!= NULL_TREE
)
1155 error ("def operand missing for stmt");
1156 debug_generic_expr (*(tree
*)def
);
1160 if (gimple_has_volatile_ops (stmt
) != volatile_p
)
1162 error ("stmt volatile flag not up-to-date");
1166 cleanup_build_arrays ();
1171 /* Releases the operands of STMT back to their freelists, and clears
1172 the stmt operand lists. */
1175 free_stmt_operands (gimple stmt
)
1177 def_optype_p defs
= gimple_def_ops (stmt
), last_def
;
1178 use_optype_p uses
= gimple_use_ops (stmt
), last_use
;
1182 for (last_def
= defs
; last_def
->next
; last_def
= last_def
->next
)
1184 last_def
->next
= gimple_ssa_operands (cfun
)->free_defs
;
1185 gimple_ssa_operands (cfun
)->free_defs
= defs
;
1186 gimple_set_def_ops (stmt
, NULL
);
1191 for (last_use
= uses
; last_use
->next
; last_use
= last_use
->next
)
1192 delink_imm_use (USE_OP_PTR (last_use
));
1193 delink_imm_use (USE_OP_PTR (last_use
));
1194 last_use
->next
= gimple_ssa_operands (cfun
)->free_uses
;
1195 gimple_ssa_operands (cfun
)->free_uses
= uses
;
1196 gimple_set_use_ops (stmt
, NULL
);
1199 if (gimple_has_mem_ops (stmt
))
1201 gimple_set_vuse (stmt
, NULL_TREE
);
1202 gimple_set_vdef (stmt
, NULL_TREE
);
1207 /* Get the operands of statement STMT. */
1210 update_stmt_operands (gimple stmt
)
1212 /* If update_stmt_operands is called before SSA is initialized, do
1214 if (!ssa_operands_active ())
1217 timevar_push (TV_TREE_OPS
);
1219 /* If the stmt is a noreturn call queue it to be processed by
1220 split_bbs_on_noreturn_calls during cfg cleanup. */
1221 if (is_gimple_call (stmt
)
1222 && gimple_call_noreturn_p (stmt
))
1223 VEC_safe_push (gimple
, gc
, MODIFIED_NORETURN_CALLS (cfun
), stmt
);
1225 gcc_assert (gimple_modified_p (stmt
));
1226 build_ssa_operands (stmt
);
1227 gimple_set_modified (stmt
, false);
1229 timevar_pop (TV_TREE_OPS
);
1233 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1234 to test the validity of the swap operation. */
1237 swap_tree_operands (gimple stmt
, tree
*exp0
, tree
*exp1
)
1243 /* If the operand cache is active, attempt to preserve the relative
1244 positions of these two operands in their respective immediate use
1245 lists by adjusting their use pointer to point to the new
1246 operand position. */
1247 if (ssa_operands_active () && op0
!= op1
)
1249 use_optype_p use0
, use1
, ptr
;
1252 /* Find the 2 operands in the cache, if they are there. */
1253 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
1254 if (USE_OP_PTR (ptr
)->use
== exp0
)
1260 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
1261 if (USE_OP_PTR (ptr
)->use
== exp1
)
1267 /* And adjust their location to point to the new position of the
1270 USE_OP_PTR (use0
)->use
= exp1
;
1272 USE_OP_PTR (use1
)->use
= exp0
;
1275 /* Now swap the data. */
1281 /* Scan the immediate_use list for VAR making sure its linked properly.
1282 Return TRUE if there is a problem and emit an error message to F. */
1285 verify_imm_links (FILE *f
, tree var
)
1287 use_operand_p ptr
, prev
, list
;
1290 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
1292 list
= &(SSA_NAME_IMM_USE_NODE (var
));
1293 gcc_assert (list
->use
== NULL
);
1295 if (list
->prev
== NULL
)
1297 gcc_assert (list
->next
== NULL
);
1303 for (ptr
= list
->next
; ptr
!= list
; )
1305 if (prev
!= ptr
->prev
)
1308 if (ptr
->use
== NULL
)
1309 goto error
; /* 2 roots, or SAFE guard node. */
1310 else if (*(ptr
->use
) != var
)
1316 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1318 if (count
++ > 50000000)
1322 /* Verify list in the other direction. */
1324 for (ptr
= list
->prev
; ptr
!= list
; )
1326 if (prev
!= ptr
->next
)
1340 if (ptr
->loc
.stmt
&& gimple_modified_p (ptr
->loc
.stmt
))
1342 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->loc
.stmt
);
1343 print_gimple_stmt (f
, ptr
->loc
.stmt
, 0, TDF_SLIM
);
1345 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
1347 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
1353 /* Dump all the immediate uses to FILE. */
1356 dump_immediate_uses_for (FILE *file
, tree var
)
1358 imm_use_iterator iter
;
1359 use_operand_p use_p
;
1361 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
1363 print_generic_expr (file
, var
, TDF_SLIM
);
1364 fprintf (file
, " : -->");
1365 if (has_zero_uses (var
))
1366 fprintf (file
, " no uses.\n");
1368 if (has_single_use (var
))
1369 fprintf (file
, " single use.\n");
1371 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
1373 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
1375 if (use_p
->loc
.stmt
== NULL
&& use_p
->use
== NULL
)
1376 fprintf (file
, "***end of stmt iterator marker***\n");
1378 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
1379 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_VOPS
|TDF_MEMSYMS
);
1381 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_SLIM
);
1383 fprintf(file
, "\n");
1387 /* Dump all the immediate uses to FILE. */
1390 dump_immediate_uses (FILE *file
)
1395 fprintf (file
, "Immediate_uses: \n\n");
1396 for (x
= 1; x
< num_ssa_names
; x
++)
1401 dump_immediate_uses_for (file
, var
);
1406 /* Dump def-use edges on stderr. */
1409 debug_immediate_uses (void)
1411 dump_immediate_uses (stderr
);
1415 /* Dump def-use edges on stderr. */
1418 debug_immediate_uses_for (tree var
)
1420 dump_immediate_uses_for (stderr
, var
);
1424 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1427 unlink_stmt_vdef (gimple stmt
)
1429 use_operand_p use_p
;
1430 imm_use_iterator iter
;
1432 tree vdef
= gimple_vdef (stmt
);
1433 tree vuse
= gimple_vuse (stmt
);
1436 || TREE_CODE (vdef
) != SSA_NAME
)
1439 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vdef
)
1441 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1442 SET_USE (use_p
, vuse
);
1445 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef
))
1446 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 1;