1 /* SSA operands management for trees.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-flow.h"
29 #include "tree-inline.h"
34 #include "langhooks.h"
35 #include "diagnostic-core.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
75 operand vector for VUSE, then the new vector will also be modified
76 such that it contains 'a_5' rather than 'a'. */
79 /* Flags to describe operand properties in helpers. */
81 /* By default, operands are loaded. */
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_def (1 << 0)
88 /* No virtual operands should be created in the expression. This is used
89 when traversing ADDR_EXPR nodes which have different semantics than
90 other expressions. Inside an ADDR_EXPR node, the only operands that we
91 need to consider are indices into arrays. For instance, &a.b[i] should
92 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
94 #define opf_no_vops (1 << 1)
96 /* Operand is an implicit reference. This is used to distinguish
97 explicit assignments in the form of MODIFY_EXPR from
98 clobbering sites like function calls or ASM_EXPRs. */
99 #define opf_implicit (1 << 2)
101 /* Operand is in a place where address-taken does not imply addressable. */
102 #define opf_non_addressable (1 << 3)
104 /* Operand is in a place where opf_non_addressable does not apply. */
105 #define opf_not_non_addressable (1 << 4)
107 /* Array for building all the use operands. */
108 static vec
<tree
> build_uses
;
110 /* The built VDEF operand. */
111 static tree build_vdef
;
113 /* The built VUSE operand. */
114 static tree build_vuse
;
116 /* Bitmap obstack for our datastructures that needs to survive across
117 compilations of multiple functions. */
118 static bitmap_obstack operands_bitmap_obstack
;
120 static void get_expr_operands (gimple
, tree
*, int);
122 /* Number of functions with initialized ssa_operands. */
123 static int n_initialized
= 0;
126 /* Return true if the SSA operands cache is active. */
129 ssa_operands_active (struct function
*fun
)
134 return fun
->gimple_df
&& gimple_ssa_operands (fun
)->ops_active
;
138 /* Create the VOP variable, an artificial global variable to act as a
139 representative of all of the virtual operands FUD chain. */
142 create_vop_var (struct function
*fn
)
146 gcc_assert (fn
->gimple_df
->vop
== NULL_TREE
);
148 global_var
= build_decl (BUILTINS_LOCATION
, VAR_DECL
,
149 get_identifier (".MEM"),
151 DECL_ARTIFICIAL (global_var
) = 1;
152 TREE_READONLY (global_var
) = 0;
153 DECL_EXTERNAL (global_var
) = 1;
154 TREE_STATIC (global_var
) = 1;
155 TREE_USED (global_var
) = 1;
156 DECL_CONTEXT (global_var
) = NULL_TREE
;
157 TREE_THIS_VOLATILE (global_var
) = 0;
158 TREE_ADDRESSABLE (global_var
) = 0;
159 VAR_DECL_IS_VIRTUAL_OPERAND (global_var
) = 1;
161 fn
->gimple_df
->vop
= global_var
;
164 /* These are the sizes of the operand memory buffer in bytes which gets
165 allocated each time more operands space is required. The final value is
166 the amount that is allocated every time after that.
167 In 1k we can fit 25 use operands (or 63 def operands) on a host with
168 8 byte pointers, that would be 10 statements each with 1 def and 2
171 #define OP_SIZE_INIT 0
172 #define OP_SIZE_1 (1024 - sizeof (void *))
173 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
174 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
176 /* Initialize the operand cache routines. */
179 init_ssa_operands (struct function
*fn
)
181 if (!n_initialized
++)
183 build_uses
.create (10);
184 build_vuse
= NULL_TREE
;
185 build_vdef
= NULL_TREE
;
186 bitmap_obstack_initialize (&operands_bitmap_obstack
);
189 gcc_assert (gimple_ssa_operands (fn
)->operand_memory
== NULL
);
190 gimple_ssa_operands (fn
)->operand_memory_index
191 = gimple_ssa_operands (fn
)->ssa_operand_mem_size
;
192 gimple_ssa_operands (fn
)->ops_active
= true;
193 gimple_ssa_operands (fn
)->ssa_operand_mem_size
= OP_SIZE_INIT
;
198 /* Dispose of anything required by the operand routines. */
201 fini_ssa_operands (void)
203 struct ssa_operand_memory_d
*ptr
;
205 if (!--n_initialized
)
207 build_uses
.release ();
208 build_vdef
= NULL_TREE
;
209 build_vuse
= NULL_TREE
;
212 gimple_ssa_operands (cfun
)->free_uses
= NULL
;
214 while ((ptr
= gimple_ssa_operands (cfun
)->operand_memory
) != NULL
)
216 gimple_ssa_operands (cfun
)->operand_memory
217 = gimple_ssa_operands (cfun
)->operand_memory
->next
;
221 gimple_ssa_operands (cfun
)->ops_active
= false;
224 bitmap_obstack_release (&operands_bitmap_obstack
);
226 cfun
->gimple_df
->vop
= NULL_TREE
;
230 /* Return memory for an operand of size SIZE. */
233 ssa_operand_alloc (unsigned size
)
237 gcc_assert (size
== sizeof (struct use_optype_d
));
239 if (gimple_ssa_operands (cfun
)->operand_memory_index
+ size
240 >= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
242 struct ssa_operand_memory_d
*ptr
;
244 switch (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
247 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_1
;
250 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_2
;
254 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_3
;
261 ptr
= ggc_alloc_ssa_operand_memory_d (sizeof (void *)
262 + gimple_ssa_operands (cfun
)->ssa_operand_mem_size
);
264 ptr
->next
= gimple_ssa_operands (cfun
)->operand_memory
;
265 gimple_ssa_operands (cfun
)->operand_memory
= ptr
;
266 gimple_ssa_operands (cfun
)->operand_memory_index
= 0;
269 ptr
= &(gimple_ssa_operands (cfun
)->operand_memory
270 ->mem
[gimple_ssa_operands (cfun
)->operand_memory_index
]);
271 gimple_ssa_operands (cfun
)->operand_memory_index
+= size
;
276 /* Allocate a USE operand. */
278 static inline struct use_optype_d
*
281 struct use_optype_d
*ret
;
282 if (gimple_ssa_operands (cfun
)->free_uses
)
284 ret
= gimple_ssa_operands (cfun
)->free_uses
;
285 gimple_ssa_operands (cfun
)->free_uses
286 = gimple_ssa_operands (cfun
)->free_uses
->next
;
289 ret
= (struct use_optype_d
*)
290 ssa_operand_alloc (sizeof (struct use_optype_d
));
295 /* Adds OP to the list of uses of statement STMT after LAST. */
297 static inline use_optype_p
298 add_use_op (gimple stmt
, tree
*op
, use_optype_p last
)
300 use_optype_p new_use
;
302 new_use
= alloc_use ();
303 USE_OP_PTR (new_use
)->use
= op
;
304 link_imm_use_stmt (USE_OP_PTR (new_use
), *op
, stmt
);
305 last
->next
= new_use
;
306 new_use
->next
= NULL
;
312 /* Takes elements from build_defs and turns them into def operands of STMT.
313 TODO -- Make build_defs vec of tree *. */
316 finalize_ssa_defs (gimple stmt
)
318 /* Pre-pend the vdef we may have built. */
319 if (build_vdef
!= NULL_TREE
)
321 tree oldvdef
= gimple_vdef (stmt
);
323 && TREE_CODE (oldvdef
) == SSA_NAME
)
324 oldvdef
= SSA_NAME_VAR (oldvdef
);
325 if (oldvdef
!= build_vdef
)
326 gimple_set_vdef (stmt
, build_vdef
);
329 /* Clear and unlink a no longer necessary VDEF. */
330 if (build_vdef
== NULL_TREE
331 && gimple_vdef (stmt
) != NULL_TREE
)
333 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
335 unlink_stmt_vdef (stmt
);
336 release_ssa_name (gimple_vdef (stmt
));
338 gimple_set_vdef (stmt
, NULL_TREE
);
341 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
342 if (gimple_vdef (stmt
)
343 && TREE_CODE (gimple_vdef (stmt
)) != SSA_NAME
)
345 cfun
->gimple_df
->rename_vops
= 1;
346 cfun
->gimple_df
->ssa_renaming_needed
= 1;
351 /* Takes elements from build_uses and turns them into use operands of STMT.
352 TODO -- Make build_uses vec of tree *. */
355 finalize_ssa_uses (gimple stmt
)
358 struct use_optype_d new_list
;
359 use_optype_p old_ops
, ptr
, last
;
361 /* Pre-pend the VUSE we may have built. */
362 if (build_vuse
!= NULL_TREE
)
364 tree oldvuse
= gimple_vuse (stmt
);
366 && TREE_CODE (oldvuse
) == SSA_NAME
)
367 oldvuse
= SSA_NAME_VAR (oldvuse
);
368 if (oldvuse
!= (build_vuse
!= NULL_TREE
369 ? build_vuse
: build_vdef
))
370 gimple_set_vuse (stmt
, NULL_TREE
);
371 build_uses
.safe_insert (0, (tree
)gimple_vuse_ptr (stmt
));
374 new_list
.next
= NULL
;
377 old_ops
= gimple_use_ops (stmt
);
379 /* Clear a no longer necessary VUSE. */
380 if (build_vuse
== NULL_TREE
381 && gimple_vuse (stmt
) != NULL_TREE
)
382 gimple_set_vuse (stmt
, NULL_TREE
);
384 /* If there is anything in the old list, free it. */
387 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
388 delink_imm_use (USE_OP_PTR (ptr
));
389 old_ops
->next
= gimple_ssa_operands (cfun
)->free_uses
;
390 gimple_ssa_operands (cfun
)->free_uses
= old_ops
;
393 /* If we added a VUSE, make sure to set the operand if it is not already
394 present and mark it for renaming. */
395 if (build_vuse
!= NULL_TREE
396 && gimple_vuse (stmt
) == NULL_TREE
)
398 gimple_set_vuse (stmt
, gimple_vop (cfun
));
399 cfun
->gimple_df
->rename_vops
= 1;
400 cfun
->gimple_df
->ssa_renaming_needed
= 1;
403 /* Now create nodes for all the new nodes. */
404 for (new_i
= 0; new_i
< build_uses
.length (); new_i
++)
406 tree
*op
= (tree
*) build_uses
[new_i
];
407 last
= add_use_op (stmt
, op
, last
);
410 /* Now set the stmt's operands. */
411 gimple_set_use_ops (stmt
, new_list
.next
);
415 /* Clear the in_list bits and empty the build array for VDEFs and
419 cleanup_build_arrays (void)
421 build_vdef
= NULL_TREE
;
422 build_vuse
= NULL_TREE
;
423 build_uses
.truncate (0);
427 /* Finalize all the build vectors, fill the new ones into INFO. */
430 finalize_ssa_stmt_operands (gimple stmt
)
432 finalize_ssa_defs (stmt
);
433 finalize_ssa_uses (stmt
);
434 cleanup_build_arrays ();
438 /* Start the process of building up operands vectors in INFO. */
441 start_ssa_stmt_operands (void)
443 gcc_assert (build_uses
.length () == 0);
444 gcc_assert (build_vuse
== NULL_TREE
);
445 gcc_assert (build_vdef
== NULL_TREE
);
449 /* Add USE_P to the list of pointers to operands. */
452 append_use (tree
*use_p
)
454 build_uses
.safe_push ((tree
) use_p
);
458 /* Add VAR to the set of variables that require a VDEF operator. */
461 append_vdef (tree var
)
466 gcc_assert ((build_vdef
== NULL_TREE
467 || build_vdef
== var
)
468 && (build_vuse
== NULL_TREE
469 || build_vuse
== var
));
476 /* Add VAR to the set of variables that require a VUSE operator. */
479 append_vuse (tree var
)
484 gcc_assert (build_vuse
== NULL_TREE
485 || build_vuse
== var
);
490 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
493 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED
, int flags
)
495 /* Add virtual operands to the stmt, unless the caller has specifically
496 requested not to do that (used when adding operands inside an
497 ADDR_EXPR expression). */
498 if (flags
& opf_no_vops
)
501 gcc_assert (!is_gimple_debug (stmt
));
504 append_vdef (gimple_vop (cfun
));
506 append_vuse (gimple_vop (cfun
));
510 /* Add *VAR_P to the appropriate operand array for statement STMT.
511 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
512 it will be added to the statement's real operands, otherwise it is
513 added to virtual operands. */
516 add_stmt_operand (tree
*var_p
, gimple stmt
, int flags
)
520 gcc_assert (SSA_VAR_P (*var_p
));
522 if (is_gimple_reg (var
))
524 /* The variable is a GIMPLE register. Add it to real operands. */
530 cfun
->gimple_df
->ssa_renaming_needed
= 1;
534 /* Mark statements with volatile operands. */
535 if (!(flags
& opf_no_vops
)
536 && TREE_THIS_VOLATILE (var
))
537 gimple_set_has_volatile_ops (stmt
, true);
539 /* The variable is a memory access. Add virtual operands. */
540 add_virtual_operand (stmt
, flags
);
544 /* Mark the base address of REF as having its address taken.
545 REF may be a single variable whose address has been taken or any
546 other valid GIMPLE memory reference (structure reference, array,
550 mark_address_taken (tree ref
)
554 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
555 as the only thing we take the address of. If VAR is a structure,
556 taking the address of a field means that the whole structure may
557 be referenced using pointer arithmetic. See PR 21407 and the
558 ensuing mailing list discussion. */
559 var
= get_base_address (ref
);
563 TREE_ADDRESSABLE (var
) = 1;
564 else if (TREE_CODE (var
) == MEM_REF
565 && TREE_CODE (TREE_OPERAND (var
, 0)) == ADDR_EXPR
566 && DECL_P (TREE_OPERAND (TREE_OPERAND (var
, 0), 0)))
567 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var
, 0), 0)) = 1;
572 /* A subroutine of get_expr_operands to handle MEM_REF.
574 STMT is the statement being processed, EXPR is the MEM_REF
577 FLAGS is as in get_expr_operands. */
580 get_indirect_ref_operands (gimple stmt
, tree expr
, int flags
)
582 tree
*pptr
= &TREE_OPERAND (expr
, 0);
584 if (!(flags
& opf_no_vops
)
585 && TREE_THIS_VOLATILE (expr
))
586 gimple_set_has_volatile_ops (stmt
, true);
589 add_virtual_operand (stmt
, flags
);
591 /* If requested, add a USE operand for the base pointer. */
592 get_expr_operands (stmt
, pptr
,
593 opf_non_addressable
| opf_use
594 | (flags
& (opf_no_vops
|opf_not_non_addressable
)));
598 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
601 get_tmr_operands (gimple stmt
, tree expr
, int flags
)
603 if (!(flags
& opf_no_vops
)
604 && TREE_THIS_VOLATILE (expr
))
605 gimple_set_has_volatile_ops (stmt
, true);
607 /* First record the real operands. */
608 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_use
| (flags
& opf_no_vops
));
609 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_use
| (flags
& opf_no_vops
));
610 get_expr_operands (stmt
, &TMR_INDEX2 (expr
), opf_use
| (flags
& opf_no_vops
));
612 add_virtual_operand (stmt
, flags
);
616 /* If STMT is a call that may clobber globals and other symbols that
617 escape, add them to the VDEF/VUSE lists for it. */
620 maybe_add_call_vops (gimple stmt
)
622 int call_flags
= gimple_call_flags (stmt
);
624 /* If aliases have been computed already, add VDEF or VUSE
625 operands for all the symbols that have been found to be
627 if (!(call_flags
& ECF_NOVOPS
))
629 /* A 'pure' or a 'const' function never call-clobbers anything.
630 A 'noreturn' function might, but since we don't return anyway
631 there is no point in recording that. */
632 if (!(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
633 add_virtual_operand (stmt
, opf_def
);
634 else if (!(call_flags
& ECF_CONST
))
635 add_virtual_operand (stmt
, opf_use
);
640 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
643 get_asm_expr_operands (gimple stmt
)
646 const char **oconstraints
;
647 const char *constraint
;
648 bool allows_mem
, allows_reg
, is_inout
;
650 noutputs
= gimple_asm_noutputs (stmt
);
651 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
653 /* Gather all output operands. */
654 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
656 tree link
= gimple_asm_output_op (stmt
, i
);
657 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
658 oconstraints
[i
] = constraint
;
659 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
660 &allows_reg
, &is_inout
);
662 /* This should have been split in gimplify_asm_expr. */
663 gcc_assert (!allows_reg
|| !is_inout
);
665 /* Memory operands are addressable. Note that STMT needs the
666 address of this operand. */
667 if (!allows_reg
&& allows_mem
)
668 mark_address_taken (TREE_VALUE (link
));
670 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_def
| opf_not_non_addressable
);
673 /* Gather all input operands. */
674 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
676 tree link
= gimple_asm_input_op (stmt
, i
);
677 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
678 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
679 &allows_mem
, &allows_reg
);
681 /* Memory operands are addressable. Note that STMT needs the
682 address of this operand. */
683 if (!allows_reg
&& allows_mem
)
684 mark_address_taken (TREE_VALUE (link
));
686 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_not_non_addressable
);
689 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
690 if (gimple_asm_clobbers_memory_p (stmt
))
691 add_virtual_operand (stmt
, opf_def
);
695 /* Recursively scan the expression pointed to by EXPR_P in statement
696 STMT. FLAGS is one of the OPF_* constants modifying how to
697 interpret the operands found. */
700 get_expr_operands (gimple stmt
, tree
*expr_p
, int flags
)
703 enum tree_code_class codeclass
;
705 int uflags
= opf_use
;
710 if (is_gimple_debug (stmt
))
711 uflags
|= (flags
& opf_no_vops
);
713 code
= TREE_CODE (expr
);
714 codeclass
= TREE_CODE_CLASS (code
);
719 /* Taking the address of a variable does not represent a
720 reference to it, but the fact that the statement takes its
721 address will be of interest to some passes (e.g. alias
723 if ((!(flags
& opf_non_addressable
)
724 || (flags
& opf_not_non_addressable
))
725 && !is_gimple_debug (stmt
))
726 mark_address_taken (TREE_OPERAND (expr
, 0));
728 /* If the address is invariant, there may be no interesting
729 variable references inside. */
730 if (is_gimple_min_invariant (expr
))
733 /* Otherwise, there may be variables referenced inside but there
734 should be no VUSEs created, since the referenced objects are
735 not really accessed. The only operands that we should find
736 here are ARRAY_REF indices which will always be real operands
737 (GIMPLE does not allow non-registers as array indices). */
738 flags
|= opf_no_vops
;
739 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0),
740 flags
| opf_not_non_addressable
);
747 add_stmt_operand (expr_p
, stmt
, flags
);
750 case DEBUG_EXPR_DECL
:
751 gcc_assert (gimple_debug_bind_p (stmt
));
755 get_indirect_ref_operands (stmt
, expr
, flags
);
759 get_tmr_operands (stmt
, expr
, flags
);
763 case ARRAY_RANGE_REF
:
768 if (!(flags
& opf_no_vops
)
769 && TREE_THIS_VOLATILE (expr
))
770 gimple_set_has_volatile_ops (stmt
, true);
772 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
774 if (code
== COMPONENT_REF
)
776 if (!(flags
& opf_no_vops
)
777 && TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
778 gimple_set_has_volatile_ops (stmt
, true);
779 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), uflags
);
781 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
783 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), uflags
);
784 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), uflags
);
785 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), uflags
);
792 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
793 and an rvalue reference to its second argument. */
794 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), uflags
);
795 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
801 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), uflags
);
802 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), uflags
);
803 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), uflags
);
808 /* General aggregate CONSTRUCTORs have been decomposed, but they
809 are still in use as the COMPLEX_EXPR equivalent for vectors. */
811 unsigned HOST_WIDE_INT idx
;
813 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
814 the volatility to the statement, don't use TREE_CLOBBER_P for
815 mirroring the other uses of THIS_VOLATILE in this file. */
816 if (!(flags
& opf_no_vops
)
817 && TREE_THIS_VOLATILE (expr
))
818 gimple_set_has_volatile_ops (stmt
, true);
821 vec_safe_iterate (CONSTRUCTOR_ELTS (expr
), idx
, &ce
);
823 get_expr_operands (stmt
, &ce
->value
, uflags
);
829 if (!(flags
& opf_no_vops
)
830 && TREE_THIS_VOLATILE (expr
))
831 gimple_set_has_volatile_ops (stmt
, true);
834 case VIEW_CONVERT_EXPR
:
836 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
844 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
845 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
850 case REALIGN_LOAD_EXPR
:
851 case WIDEN_MULT_PLUS_EXPR
:
852 case WIDEN_MULT_MINUS_EXPR
:
855 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
856 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
857 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
864 case CASE_LABEL_EXPR
:
865 /* Expressions that make no memory references. */
869 if (codeclass
== tcc_unary
)
871 if (codeclass
== tcc_binary
|| codeclass
== tcc_comparison
)
873 if (codeclass
== tcc_constant
|| codeclass
== tcc_type
)
877 /* If we get here, something has gone wrong. */
878 #ifdef ENABLE_CHECKING
879 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
881 fputs ("\n", stderr
);
887 /* Parse STMT looking for operands. When finished, the various
888 build_* operand vectors will have potential operands in them. */
891 parse_ssa_operands (gimple stmt
)
893 enum gimple_code code
= gimple_code (stmt
);
894 size_t i
, n
, start
= 0;
899 get_asm_expr_operands (stmt
);
902 case GIMPLE_TRANSACTION
:
903 /* The start of a transaction is a memory barrier. */
904 add_virtual_operand (stmt
, opf_def
| opf_use
);
908 if (gimple_debug_bind_p (stmt
)
909 && gimple_debug_bind_has_value_p (stmt
))
910 get_expr_operands (stmt
, gimple_debug_bind_get_value_ptr (stmt
),
911 opf_use
| opf_no_vops
);
915 append_vuse (gimple_vop (cfun
));
919 /* Add call-clobbered operands, if needed. */
920 maybe_add_call_vops (stmt
);
924 get_expr_operands (stmt
, gimple_op_ptr (stmt
, 0), opf_def
);
930 n
= gimple_num_ops (stmt
);
931 for (i
= start
; i
< n
; i
++)
932 get_expr_operands (stmt
, gimple_op_ptr (stmt
, i
), opf_use
);
938 /* Create an operands cache for STMT. */
941 build_ssa_operands (gimple stmt
)
943 /* Initially assume that the statement has no volatile operands. */
944 gimple_set_has_volatile_ops (stmt
, false);
946 start_ssa_stmt_operands ();
947 parse_ssa_operands (stmt
);
948 finalize_ssa_stmt_operands (stmt
);
951 /* Verifies SSA statement operands. */
954 verify_ssa_operands (gimple stmt
)
961 bool volatile_p
= gimple_has_volatile_ops (stmt
);
963 /* build_ssa_operands w/o finalizing them. */
964 gimple_set_has_volatile_ops (stmt
, false);
965 start_ssa_stmt_operands ();
966 parse_ssa_operands (stmt
);
968 /* Now verify the built operands are the same as present in STMT. */
969 def
= gimple_vdef (stmt
);
971 && TREE_CODE (def
) == SSA_NAME
)
972 def
= SSA_NAME_VAR (def
);
973 if (build_vdef
!= def
)
975 error ("virtual definition of statement not up-to-date");
978 if (gimple_vdef (stmt
)
979 && ((def_p
= gimple_vdef_op (stmt
)) == NULL_DEF_OPERAND_P
980 || DEF_FROM_PTR (def_p
) != gimple_vdef (stmt
)))
982 error ("virtual def operand missing for stmt");
986 use
= gimple_vuse (stmt
);
988 && TREE_CODE (use
) == SSA_NAME
)
989 use
= SSA_NAME_VAR (use
);
990 if (build_vuse
!= use
)
992 error ("virtual use of statement not up-to-date");
995 if (gimple_vuse (stmt
)
996 && ((use_p
= gimple_vuse_op (stmt
)) == NULL_USE_OPERAND_P
997 || USE_FROM_PTR (use_p
) != gimple_vuse (stmt
)))
999 error ("virtual use operand missing for stmt");
1003 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
1005 FOR_EACH_VEC_ELT (build_uses
, i
, use
)
1007 if (use_p
->use
== (tree
*)use
)
1009 build_uses
[i
] = NULL_TREE
;
1013 if (i
== build_uses
.length ())
1015 error ("excess use operand for stmt");
1016 debug_generic_expr (USE_FROM_PTR (use_p
));
1020 FOR_EACH_VEC_ELT (build_uses
, i
, use
)
1021 if (use
!= NULL_TREE
)
1023 error ("use operand missing for stmt");
1024 debug_generic_expr (*(tree
*)use
);
1028 if (gimple_has_volatile_ops (stmt
) != volatile_p
)
1030 error ("stmt volatile flag not up-to-date");
1034 cleanup_build_arrays ();
1039 /* Releases the operands of STMT back to their freelists, and clears
1040 the stmt operand lists. */
1043 free_stmt_operands (gimple stmt
)
1045 use_optype_p uses
= gimple_use_ops (stmt
), last_use
;
1049 for (last_use
= uses
; last_use
->next
; last_use
= last_use
->next
)
1050 delink_imm_use (USE_OP_PTR (last_use
));
1051 delink_imm_use (USE_OP_PTR (last_use
));
1052 last_use
->next
= gimple_ssa_operands (cfun
)->free_uses
;
1053 gimple_ssa_operands (cfun
)->free_uses
= uses
;
1054 gimple_set_use_ops (stmt
, NULL
);
1057 if (gimple_has_mem_ops (stmt
))
1059 gimple_set_vuse (stmt
, NULL_TREE
);
1060 gimple_set_vdef (stmt
, NULL_TREE
);
1065 /* Get the operands of statement STMT. */
1068 update_stmt_operands (gimple stmt
)
1070 /* If update_stmt_operands is called before SSA is initialized, do
1072 if (!ssa_operands_active (cfun
))
1075 timevar_push (TV_TREE_OPS
);
1077 /* If the stmt is a noreturn call queue it to be processed by
1078 split_bbs_on_noreturn_calls during cfg cleanup. */
1079 if (is_gimple_call (stmt
)
1080 && gimple_call_noreturn_p (stmt
))
1081 vec_safe_push (MODIFIED_NORETURN_CALLS (cfun
), stmt
);
1083 gcc_assert (gimple_modified_p (stmt
));
1084 build_ssa_operands (stmt
);
1085 gimple_set_modified (stmt
, false);
1087 timevar_pop (TV_TREE_OPS
);
1091 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1092 to test the validity of the swap operation. */
1095 swap_tree_operands (gimple stmt
, tree
*exp0
, tree
*exp1
)
1101 /* If the operand cache is active, attempt to preserve the relative
1102 positions of these two operands in their respective immediate use
1103 lists by adjusting their use pointer to point to the new
1104 operand position. */
1105 if (ssa_operands_active (cfun
) && op0
!= op1
)
1107 use_optype_p use0
, use1
, ptr
;
1110 /* Find the 2 operands in the cache, if they are there. */
1111 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
1112 if (USE_OP_PTR (ptr
)->use
== exp0
)
1118 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
1119 if (USE_OP_PTR (ptr
)->use
== exp1
)
1125 /* And adjust their location to point to the new position of the
1128 USE_OP_PTR (use0
)->use
= exp1
;
1130 USE_OP_PTR (use1
)->use
= exp0
;
1133 /* Now swap the data. */
1139 /* Scan the immediate_use list for VAR making sure its linked properly.
1140 Return TRUE if there is a problem and emit an error message to F. */
1143 verify_imm_links (FILE *f
, tree var
)
1145 use_operand_p ptr
, prev
, list
;
1148 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
1150 list
= &(SSA_NAME_IMM_USE_NODE (var
));
1151 gcc_assert (list
->use
== NULL
);
1153 if (list
->prev
== NULL
)
1155 gcc_assert (list
->next
== NULL
);
1161 for (ptr
= list
->next
; ptr
!= list
; )
1163 if (prev
!= ptr
->prev
)
1166 if (ptr
->use
== NULL
)
1167 goto error
; /* 2 roots, or SAFE guard node. */
1168 else if (*(ptr
->use
) != var
)
1174 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1176 if (count
++ > 50000000)
1180 /* Verify list in the other direction. */
1182 for (ptr
= list
->prev
; ptr
!= list
; )
1184 if (prev
!= ptr
->next
)
1198 if (ptr
->loc
.stmt
&& gimple_modified_p (ptr
->loc
.stmt
))
1200 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->loc
.stmt
);
1201 print_gimple_stmt (f
, ptr
->loc
.stmt
, 0, TDF_SLIM
);
1203 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
1205 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
1211 /* Dump all the immediate uses to FILE. */
1214 dump_immediate_uses_for (FILE *file
, tree var
)
1216 imm_use_iterator iter
;
1217 use_operand_p use_p
;
1219 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
1221 print_generic_expr (file
, var
, TDF_SLIM
);
1222 fprintf (file
, " : -->");
1223 if (has_zero_uses (var
))
1224 fprintf (file
, " no uses.\n");
1226 if (has_single_use (var
))
1227 fprintf (file
, " single use.\n");
1229 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
1231 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
1233 if (use_p
->loc
.stmt
== NULL
&& use_p
->use
== NULL
)
1234 fprintf (file
, "***end of stmt iterator marker***\n");
1236 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
1237 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_VOPS
|TDF_MEMSYMS
);
1239 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_SLIM
);
1241 fprintf(file
, "\n");
1245 /* Dump all the immediate uses to FILE. */
1248 dump_immediate_uses (FILE *file
)
1253 fprintf (file
, "Immediate_uses: \n\n");
1254 for (x
= 1; x
< num_ssa_names
; x
++)
1259 dump_immediate_uses_for (file
, var
);
1264 /* Dump def-use edges on stderr. */
1267 debug_immediate_uses (void)
1269 dump_immediate_uses (stderr
);
1273 /* Dump def-use edges on stderr. */
1276 debug_immediate_uses_for (tree var
)
1278 dump_immediate_uses_for (stderr
, var
);
1282 /* Return true if OP, an SSA name or a DECL is a virtual operand. */
1285 virtual_operand_p (tree op
)
1287 if (TREE_CODE (op
) == SSA_NAME
)
1289 op
= SSA_NAME_VAR (op
);
1294 if (TREE_CODE (op
) == VAR_DECL
)
1295 return VAR_DECL_IS_VIRTUAL_OPERAND (op
);
1300 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1303 unlink_stmt_vdef (gimple stmt
)
1305 use_operand_p use_p
;
1306 imm_use_iterator iter
;
1308 tree vdef
= gimple_vdef (stmt
);
1309 tree vuse
= gimple_vuse (stmt
);
1312 || TREE_CODE (vdef
) != SSA_NAME
)
1315 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vdef
)
1317 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1318 SET_USE (use_p
, vuse
);
1321 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef
))
1322 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 1;