1 /* SSA operands management for trees.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "print-tree.h"
32 #include "hard-reg-set.h"
35 #include "gimple-pretty-print.h"
38 #include "basic-block.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-expr.h"
44 #include "gimple-ssa.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "stringpool.h"
48 #include "tree-ssanames.h"
49 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "diagnostic-core.h"
57 /* This file contains the code required to manage the operands cache of the
58 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
59 annotation. This cache contains operands that will be of interest to
60 optimizers and other passes wishing to manipulate the IL.
62 The operand type are broken up into REAL and VIRTUAL operands. The real
63 operands are represented as pointers into the stmt's operand tree. Thus
64 any manipulation of the real operands will be reflected in the actual tree.
65 Virtual operands are represented solely in the cache, although the base
66 variable for the SSA_NAME may, or may not occur in the stmt's tree.
67 Manipulation of the virtual operands will not be reflected in the stmt tree.
69 The routines in this file are concerned with creating this operand cache
72 The operand tree is the parsed by the various get_* routines which look
73 through the stmt tree for the occurrence of operands which may be of
74 interest, and calls are made to the append_* routines whenever one is
75 found. There are 4 of these routines, each representing one of the
76 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
78 The append_* routines check for duplication, and simply keep a list of
79 unique objects for each operand type in the build_* extendable vectors.
81 Once the stmt tree is completely parsed, the finalize_ssa_operands()
82 routine is called, which proceeds to perform the finalization routine
83 on each of the 4 operand vectors which have been built up.
85 If the stmt had a previous operand cache, the finalization routines
86 attempt to match up the new operands with the old ones. If it's a perfect
87 match, the old vector is simply reused. If it isn't a perfect match, then
88 a new vector is created and the new operands are placed there. For
89 virtual operands, if the previous cache had SSA_NAME version of a
90 variable, and that same variable occurs in the same operands cache, then
91 the new cache vector will also get the same SSA_NAME.
93 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
94 operand vector for VUSE, then the new vector will also be modified
95 such that it contains 'a_5' rather than 'a'. */
98 /* Flags to describe operand properties in helpers. */
100 /* By default, operands are loaded. */
103 /* Operand is the target of an assignment expression or a
104 call-clobbered variable. */
105 #define opf_def (1 << 0)
107 /* No virtual operands should be created in the expression. This is used
108 when traversing ADDR_EXPR nodes which have different semantics than
109 other expressions. Inside an ADDR_EXPR node, the only operands that we
110 need to consider are indices into arrays. For instance, &a.b[i] should
111 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
113 #define opf_no_vops (1 << 1)
115 /* Operand is in a place where address-taken does not imply addressable. */
116 #define opf_non_addressable (1 << 3)
118 /* Operand is in a place where opf_non_addressable does not apply. */
119 #define opf_not_non_addressable (1 << 4)
121 /* Operand is having its address taken. */
122 #define opf_address_taken (1 << 5)
124 /* Array for building all the use operands. */
125 static vec
<tree
> build_uses
;
127 /* The built VDEF operand. */
128 static tree build_vdef
;
130 /* The built VUSE operand. */
131 static tree build_vuse
;
133 /* Bitmap obstack for our datastructures that needs to survive across
134 compilations of multiple functions. */
135 static bitmap_obstack operands_bitmap_obstack
;
137 static void get_expr_operands (struct function
*, gimple
, tree
*, int);
139 /* Number of functions with initialized ssa_operands. */
140 static int n_initialized
= 0;
142 /* Accessor to tree-ssa-operands.c caches. */
143 static inline struct ssa_operands
*
144 gimple_ssa_operands (const struct function
*fun
)
146 return &fun
->gimple_df
->ssa_operands
;
150 /* Return true if the SSA operands cache is active. */
153 ssa_operands_active (struct function
*fun
)
158 return fun
->gimple_df
&& gimple_ssa_operands (fun
)->ops_active
;
162 /* Create the VOP variable, an artificial global variable to act as a
163 representative of all of the virtual operands FUD chain. */
166 create_vop_var (struct function
*fn
)
170 gcc_assert (fn
->gimple_df
->vop
== NULL_TREE
);
172 global_var
= build_decl (BUILTINS_LOCATION
, VAR_DECL
,
173 get_identifier (".MEM"),
175 DECL_ARTIFICIAL (global_var
) = 1;
176 DECL_IGNORED_P (global_var
) = 1;
177 TREE_READONLY (global_var
) = 0;
178 DECL_EXTERNAL (global_var
) = 1;
179 TREE_STATIC (global_var
) = 1;
180 TREE_USED (global_var
) = 1;
181 DECL_CONTEXT (global_var
) = NULL_TREE
;
182 TREE_THIS_VOLATILE (global_var
) = 0;
183 TREE_ADDRESSABLE (global_var
) = 0;
184 VAR_DECL_IS_VIRTUAL_OPERAND (global_var
) = 1;
186 fn
->gimple_df
->vop
= global_var
;
189 /* These are the sizes of the operand memory buffer in bytes which gets
190 allocated each time more operands space is required. The final value is
191 the amount that is allocated every time after that.
192 In 1k we can fit 25 use operands (or 63 def operands) on a host with
193 8 byte pointers, that would be 10 statements each with 1 def and 2
196 #define OP_SIZE_INIT 0
197 #define OP_SIZE_1 (1024 - sizeof (void *))
198 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
199 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
201 /* Initialize the operand cache routines. */
204 init_ssa_operands (struct function
*fn
)
206 if (!n_initialized
++)
208 build_uses
.create (10);
209 build_vuse
= NULL_TREE
;
210 build_vdef
= NULL_TREE
;
211 bitmap_obstack_initialize (&operands_bitmap_obstack
);
214 gcc_assert (gimple_ssa_operands (fn
)->operand_memory
== NULL
);
215 gimple_ssa_operands (fn
)->operand_memory_index
216 = gimple_ssa_operands (fn
)->ssa_operand_mem_size
;
217 gimple_ssa_operands (fn
)->ops_active
= true;
218 gimple_ssa_operands (fn
)->ssa_operand_mem_size
= OP_SIZE_INIT
;
223 /* Dispose of anything required by the operand routines. */
226 fini_ssa_operands (struct function
*fn
)
228 struct ssa_operand_memory_d
*ptr
;
230 if (!--n_initialized
)
232 build_uses
.release ();
233 build_vdef
= NULL_TREE
;
234 build_vuse
= NULL_TREE
;
237 gimple_ssa_operands (fn
)->free_uses
= NULL
;
239 while ((ptr
= gimple_ssa_operands (fn
)->operand_memory
) != NULL
)
241 gimple_ssa_operands (fn
)->operand_memory
242 = gimple_ssa_operands (fn
)->operand_memory
->next
;
246 gimple_ssa_operands (fn
)->ops_active
= false;
249 bitmap_obstack_release (&operands_bitmap_obstack
);
251 fn
->gimple_df
->vop
= NULL_TREE
;
255 /* Return memory for an operand of size SIZE. */
258 ssa_operand_alloc (struct function
*fn
, unsigned size
)
262 gcc_assert (size
== sizeof (struct use_optype_d
));
264 if (gimple_ssa_operands (fn
)->operand_memory_index
+ size
265 >= gimple_ssa_operands (fn
)->ssa_operand_mem_size
)
267 struct ssa_operand_memory_d
*ptr
;
269 switch (gimple_ssa_operands (fn
)->ssa_operand_mem_size
)
272 gimple_ssa_operands (fn
)->ssa_operand_mem_size
= OP_SIZE_1
;
275 gimple_ssa_operands (fn
)->ssa_operand_mem_size
= OP_SIZE_2
;
279 gimple_ssa_operands (fn
)->ssa_operand_mem_size
= OP_SIZE_3
;
286 ptr
= (ssa_operand_memory_d
*) ggc_internal_alloc
287 (sizeof (void *) + gimple_ssa_operands (fn
)->ssa_operand_mem_size
);
289 ptr
->next
= gimple_ssa_operands (fn
)->operand_memory
;
290 gimple_ssa_operands (fn
)->operand_memory
= ptr
;
291 gimple_ssa_operands (fn
)->operand_memory_index
= 0;
294 ptr
= &(gimple_ssa_operands (fn
)->operand_memory
295 ->mem
[gimple_ssa_operands (fn
)->operand_memory_index
]);
296 gimple_ssa_operands (fn
)->operand_memory_index
+= size
;
301 /* Allocate a USE operand. */
303 static inline struct use_optype_d
*
304 alloc_use (struct function
*fn
)
306 struct use_optype_d
*ret
;
307 if (gimple_ssa_operands (fn
)->free_uses
)
309 ret
= gimple_ssa_operands (fn
)->free_uses
;
310 gimple_ssa_operands (fn
)->free_uses
311 = gimple_ssa_operands (fn
)->free_uses
->next
;
314 ret
= (struct use_optype_d
*)
315 ssa_operand_alloc (fn
, sizeof (struct use_optype_d
));
320 /* Adds OP to the list of uses of statement STMT after LAST. */
322 static inline use_optype_p
323 add_use_op (struct function
*fn
, gimple stmt
, tree
*op
, use_optype_p last
)
325 use_optype_p new_use
;
327 new_use
= alloc_use (fn
);
328 USE_OP_PTR (new_use
)->use
= op
;
329 link_imm_use_stmt (USE_OP_PTR (new_use
), *op
, stmt
);
330 last
->next
= new_use
;
331 new_use
->next
= NULL
;
337 /* Takes elements from build_defs and turns them into def operands of STMT.
338 TODO -- Make build_defs vec of tree *. */
341 finalize_ssa_defs (struct function
*fn
, gimple stmt
)
343 /* Pre-pend the vdef we may have built. */
344 if (build_vdef
!= NULL_TREE
)
346 tree oldvdef
= gimple_vdef (stmt
);
348 && TREE_CODE (oldvdef
) == SSA_NAME
)
349 oldvdef
= SSA_NAME_VAR (oldvdef
);
350 if (oldvdef
!= build_vdef
)
351 gimple_set_vdef (stmt
, build_vdef
);
354 /* Clear and unlink a no longer necessary VDEF. */
355 if (build_vdef
== NULL_TREE
356 && gimple_vdef (stmt
) != NULL_TREE
)
358 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
360 unlink_stmt_vdef (stmt
);
361 release_ssa_name_fn (fn
, gimple_vdef (stmt
));
363 gimple_set_vdef (stmt
, NULL_TREE
);
366 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
367 if (gimple_vdef (stmt
)
368 && TREE_CODE (gimple_vdef (stmt
)) != SSA_NAME
)
370 fn
->gimple_df
->rename_vops
= 1;
371 fn
->gimple_df
->ssa_renaming_needed
= 1;
376 /* Takes elements from build_uses and turns them into use operands of STMT.
377 TODO -- Make build_uses vec of tree *. */
380 finalize_ssa_uses (struct function
*fn
, gimple stmt
)
383 struct use_optype_d new_list
;
384 use_optype_p old_ops
, ptr
, last
;
386 /* Pre-pend the VUSE we may have built. */
387 if (build_vuse
!= NULL_TREE
)
389 tree oldvuse
= gimple_vuse (stmt
);
391 && TREE_CODE (oldvuse
) == SSA_NAME
)
392 oldvuse
= SSA_NAME_VAR (oldvuse
);
393 if (oldvuse
!= (build_vuse
!= NULL_TREE
394 ? build_vuse
: build_vdef
))
395 gimple_set_vuse (stmt
, NULL_TREE
);
396 build_uses
.safe_insert (0, (tree
)gimple_vuse_ptr (stmt
));
399 new_list
.next
= NULL
;
402 old_ops
= gimple_use_ops (stmt
);
404 /* Clear a no longer necessary VUSE. */
405 if (build_vuse
== NULL_TREE
406 && gimple_vuse (stmt
) != NULL_TREE
)
407 gimple_set_vuse (stmt
, NULL_TREE
);
409 /* If there is anything in the old list, free it. */
412 for (ptr
= old_ops
; ptr
->next
; ptr
= ptr
->next
)
413 delink_imm_use (USE_OP_PTR (ptr
));
414 delink_imm_use (USE_OP_PTR (ptr
));
415 ptr
->next
= gimple_ssa_operands (fn
)->free_uses
;
416 gimple_ssa_operands (fn
)->free_uses
= old_ops
;
419 /* If we added a VUSE, make sure to set the operand if it is not already
420 present and mark it for renaming. */
421 if (build_vuse
!= NULL_TREE
422 && gimple_vuse (stmt
) == NULL_TREE
)
424 gimple_set_vuse (stmt
, gimple_vop (fn
));
425 fn
->gimple_df
->rename_vops
= 1;
426 fn
->gimple_df
->ssa_renaming_needed
= 1;
429 /* Now create nodes for all the new nodes. */
430 for (new_i
= 0; new_i
< build_uses
.length (); new_i
++)
432 tree
*op
= (tree
*) build_uses
[new_i
];
433 last
= add_use_op (fn
, stmt
, op
, last
);
436 /* Now set the stmt's operands. */
437 gimple_set_use_ops (stmt
, new_list
.next
);
441 /* Clear the in_list bits and empty the build array for VDEFs and
445 cleanup_build_arrays (void)
447 build_vdef
= NULL_TREE
;
448 build_vuse
= NULL_TREE
;
449 build_uses
.truncate (0);
453 /* Finalize all the build vectors, fill the new ones into INFO. */
456 finalize_ssa_stmt_operands (struct function
*fn
, gimple stmt
)
458 finalize_ssa_defs (fn
, stmt
);
459 finalize_ssa_uses (fn
, stmt
);
460 cleanup_build_arrays ();
464 /* Start the process of building up operands vectors in INFO. */
467 start_ssa_stmt_operands (void)
469 gcc_assert (build_uses
.length () == 0);
470 gcc_assert (build_vuse
== NULL_TREE
);
471 gcc_assert (build_vdef
== NULL_TREE
);
475 /* Add USE_P to the list of pointers to operands. */
478 append_use (tree
*use_p
)
480 build_uses
.safe_push ((tree
) use_p
);
484 /* Add VAR to the set of variables that require a VDEF operator. */
487 append_vdef (tree var
)
489 gcc_assert ((build_vdef
== NULL_TREE
490 || build_vdef
== var
)
491 && (build_vuse
== NULL_TREE
492 || build_vuse
== var
));
499 /* Add VAR to the set of variables that require a VUSE operator. */
502 append_vuse (tree var
)
504 gcc_assert (build_vuse
== NULL_TREE
505 || build_vuse
== var
);
510 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
513 add_virtual_operand (struct function
*fn
,
514 gimple stmt ATTRIBUTE_UNUSED
, int flags
)
516 /* Add virtual operands to the stmt, unless the caller has specifically
517 requested not to do that (used when adding operands inside an
518 ADDR_EXPR expression). */
519 if (flags
& opf_no_vops
)
522 gcc_assert (!is_gimple_debug (stmt
));
525 append_vdef (gimple_vop (fn
));
527 append_vuse (gimple_vop (fn
));
531 /* Add *VAR_P to the appropriate operand array for statement STMT.
532 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
533 it will be added to the statement's real operands, otherwise it is
534 added to virtual operands. */
537 add_stmt_operand (struct function
*fn
, tree
*var_p
, gimple stmt
, int flags
)
541 gcc_assert (SSA_VAR_P (*var_p
));
543 if (is_gimple_reg (var
))
545 /* The variable is a GIMPLE register. Add it to real operands. */
551 fn
->gimple_df
->ssa_renaming_needed
= 1;
555 /* Mark statements with volatile operands. */
556 if (!(flags
& opf_no_vops
)
557 && TREE_THIS_VOLATILE (var
))
558 gimple_set_has_volatile_ops (stmt
, true);
560 /* The variable is a memory access. Add virtual operands. */
561 add_virtual_operand (fn
, stmt
, flags
);
565 /* Mark the base address of REF as having its address taken.
566 REF may be a single variable whose address has been taken or any
567 other valid GIMPLE memory reference (structure reference, array,
571 mark_address_taken (tree ref
)
575 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
576 as the only thing we take the address of. If VAR is a structure,
577 taking the address of a field means that the whole structure may
578 be referenced using pointer arithmetic. See PR 21407 and the
579 ensuing mailing list discussion. */
580 var
= get_base_address (ref
);
584 TREE_ADDRESSABLE (var
) = 1;
585 else if (TREE_CODE (var
) == MEM_REF
586 && TREE_CODE (TREE_OPERAND (var
, 0)) == ADDR_EXPR
587 && DECL_P (TREE_OPERAND (TREE_OPERAND (var
, 0), 0)))
588 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var
, 0), 0)) = 1;
593 /* A subroutine of get_expr_operands to handle MEM_REF.
595 STMT is the statement being processed, EXPR is the MEM_REF
598 FLAGS is as in get_expr_operands. */
601 get_mem_ref_operands (struct function
*fn
,
602 gimple stmt
, tree expr
, int flags
)
604 tree
*pptr
= &TREE_OPERAND (expr
, 0);
606 if (!(flags
& opf_no_vops
)
607 && TREE_THIS_VOLATILE (expr
))
608 gimple_set_has_volatile_ops (stmt
, true);
611 add_virtual_operand (fn
, stmt
, flags
);
613 /* If requested, add a USE operand for the base pointer. */
614 get_expr_operands (fn
, stmt
, pptr
,
615 opf_non_addressable
| opf_use
616 | (flags
& (opf_no_vops
|opf_not_non_addressable
)));
620 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
623 get_tmr_operands (struct function
*fn
, gimple stmt
, tree expr
, int flags
)
625 if (!(flags
& opf_no_vops
)
626 && TREE_THIS_VOLATILE (expr
))
627 gimple_set_has_volatile_ops (stmt
, true);
629 /* First record the real operands. */
630 get_expr_operands (fn
, stmt
,
631 &TMR_BASE (expr
), opf_use
| (flags
& opf_no_vops
));
632 get_expr_operands (fn
, stmt
,
633 &TMR_INDEX (expr
), opf_use
| (flags
& opf_no_vops
));
634 get_expr_operands (fn
, stmt
,
635 &TMR_INDEX2 (expr
), opf_use
| (flags
& opf_no_vops
));
637 add_virtual_operand (fn
, stmt
, flags
);
641 /* If STMT is a call that may clobber globals and other symbols that
642 escape, add them to the VDEF/VUSE lists for it. */
645 maybe_add_call_vops (struct function
*fn
, gimple stmt
)
647 int call_flags
= gimple_call_flags (stmt
);
649 /* If aliases have been computed already, add VDEF or VUSE
650 operands for all the symbols that have been found to be
652 if (!(call_flags
& ECF_NOVOPS
))
654 /* A 'pure' or a 'const' function never call-clobbers anything. */
655 if (!(call_flags
& (ECF_PURE
| ECF_CONST
)))
656 add_virtual_operand (fn
, stmt
, opf_def
);
657 else if (!(call_flags
& ECF_CONST
))
658 add_virtual_operand (fn
, stmt
, opf_use
);
663 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
666 get_asm_stmt_operands (struct function
*fn
, gimple stmt
)
669 const char **oconstraints
;
670 const char *constraint
;
671 bool allows_mem
, allows_reg
, is_inout
;
673 noutputs
= gimple_asm_noutputs (stmt
);
674 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
676 /* Gather all output operands. */
677 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
679 tree link
= gimple_asm_output_op (stmt
, i
);
680 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
681 oconstraints
[i
] = constraint
;
682 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
683 &allows_reg
, &is_inout
);
685 /* This should have been split in gimplify_asm_expr. */
686 gcc_assert (!allows_reg
|| !is_inout
);
688 /* Memory operands are addressable. Note that STMT needs the
689 address of this operand. */
690 if (!allows_reg
&& allows_mem
)
691 mark_address_taken (TREE_VALUE (link
));
693 get_expr_operands (fn
, stmt
,
694 &TREE_VALUE (link
), opf_def
| opf_not_non_addressable
);
697 /* Gather all input operands. */
698 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
700 tree link
= gimple_asm_input_op (stmt
, i
);
701 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
702 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
703 &allows_mem
, &allows_reg
);
705 /* Memory operands are addressable. Note that STMT needs the
706 address of this operand. */
707 if (!allows_reg
&& allows_mem
)
708 mark_address_taken (TREE_VALUE (link
));
710 get_expr_operands (fn
, stmt
, &TREE_VALUE (link
), opf_not_non_addressable
);
713 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
714 if (gimple_asm_clobbers_memory_p (stmt
))
715 add_virtual_operand (fn
, stmt
, opf_def
);
719 /* Recursively scan the expression pointed to by EXPR_P in statement
720 STMT. FLAGS is one of the OPF_* constants modifying how to
721 interpret the operands found. */
724 get_expr_operands (struct function
*fn
, gimple stmt
, tree
*expr_p
, int flags
)
727 enum tree_code_class codeclass
;
729 int uflags
= opf_use
;
734 if (is_gimple_debug (stmt
))
735 uflags
|= (flags
& opf_no_vops
);
737 code
= TREE_CODE (expr
);
738 codeclass
= TREE_CODE_CLASS (code
);
743 /* Taking the address of a variable does not represent a
744 reference to it, but the fact that the statement takes its
745 address will be of interest to some passes (e.g. alias
747 if ((!(flags
& opf_non_addressable
)
748 || (flags
& opf_not_non_addressable
))
749 && !is_gimple_debug (stmt
))
750 mark_address_taken (TREE_OPERAND (expr
, 0));
752 /* Otherwise, there may be variables referenced inside but there
753 should be no VUSEs created, since the referenced objects are
754 not really accessed. The only operands that we should find
755 here are ARRAY_REF indices which will always be real operands
756 (GIMPLE does not allow non-registers as array indices). */
757 flags
|= opf_no_vops
;
758 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 0),
759 flags
| opf_not_non_addressable
| opf_address_taken
);
766 if (!(flags
& opf_address_taken
))
767 add_stmt_operand (fn
, expr_p
, stmt
, flags
);
770 case DEBUG_EXPR_DECL
:
771 gcc_assert (gimple_debug_bind_p (stmt
));
775 get_mem_ref_operands (fn
, stmt
, expr
, flags
);
779 get_tmr_operands (fn
, stmt
, expr
, flags
);
783 case ARRAY_RANGE_REF
:
788 if (!(flags
& opf_no_vops
)
789 && TREE_THIS_VOLATILE (expr
))
790 gimple_set_has_volatile_ops (stmt
, true);
792 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 0), flags
);
794 if (code
== COMPONENT_REF
)
796 if (!(flags
& opf_no_vops
)
797 && TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
798 gimple_set_has_volatile_ops (stmt
, true);
799 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 2), uflags
);
801 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
803 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 1), uflags
);
804 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 2), uflags
);
805 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 3), uflags
);
812 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
813 and an rvalue reference to its second argument. */
814 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 1), uflags
);
815 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 0), flags
);
821 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 0), uflags
);
822 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 1), uflags
);
823 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 2), uflags
);
828 /* General aggregate CONSTRUCTORs have been decomposed, but they
829 are still in use as the COMPLEX_EXPR equivalent for vectors. */
831 unsigned HOST_WIDE_INT idx
;
833 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
834 the volatility to the statement, don't use TREE_CLOBBER_P for
835 mirroring the other uses of THIS_VOLATILE in this file. */
836 if (!(flags
& opf_no_vops
)
837 && TREE_THIS_VOLATILE (expr
))
838 gimple_set_has_volatile_ops (stmt
, true);
841 vec_safe_iterate (CONSTRUCTOR_ELTS (expr
), idx
, &ce
);
843 get_expr_operands (fn
, stmt
, &ce
->value
, uflags
);
849 if (!(flags
& opf_no_vops
)
850 && TREE_THIS_VOLATILE (expr
))
851 gimple_set_has_volatile_ops (stmt
, true);
854 case VIEW_CONVERT_EXPR
:
856 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 0), flags
);
864 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 0), flags
);
865 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 1), flags
);
871 case REALIGN_LOAD_EXPR
:
872 case WIDEN_MULT_PLUS_EXPR
:
873 case WIDEN_MULT_MINUS_EXPR
:
876 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 0), flags
);
877 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 1), flags
);
878 get_expr_operands (fn
, stmt
, &TREE_OPERAND (expr
, 2), flags
);
885 case CASE_LABEL_EXPR
:
886 /* Expressions that make no memory references. */
890 if (codeclass
== tcc_unary
)
892 if (codeclass
== tcc_binary
|| codeclass
== tcc_comparison
)
894 if (codeclass
== tcc_constant
|| codeclass
== tcc_type
)
898 /* If we get here, something has gone wrong. */
899 #ifdef ENABLE_CHECKING
900 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
902 fputs ("\n", stderr
);
908 /* Parse STMT looking for operands. When finished, the various
909 build_* operand vectors will have potential operands in them. */
912 parse_ssa_operands (struct function
*fn
, gimple stmt
)
914 enum gimple_code code
= gimple_code (stmt
);
915 size_t i
, n
, start
= 0;
920 get_asm_stmt_operands (fn
, stmt
);
923 case GIMPLE_TRANSACTION
:
924 /* The start of a transaction is a memory barrier. */
925 add_virtual_operand (fn
, stmt
, opf_def
| opf_use
);
929 if (gimple_debug_bind_p (stmt
)
930 && gimple_debug_bind_has_value_p (stmt
))
931 get_expr_operands (fn
, stmt
, gimple_debug_bind_get_value_ptr (stmt
),
932 opf_use
| opf_no_vops
);
936 append_vuse (gimple_vop (fn
));
940 /* Add call-clobbered operands, if needed. */
941 maybe_add_call_vops (fn
, stmt
);
945 get_expr_operands (fn
, stmt
, gimple_op_ptr (stmt
, 0), opf_def
);
951 n
= gimple_num_ops (stmt
);
952 for (i
= start
; i
< n
; i
++)
953 get_expr_operands (fn
, stmt
, gimple_op_ptr (stmt
, i
), opf_use
);
959 /* Create an operands cache for STMT. */
962 build_ssa_operands (struct function
*fn
, gimple stmt
)
964 /* Initially assume that the statement has no volatile operands. */
965 gimple_set_has_volatile_ops (stmt
, false);
967 start_ssa_stmt_operands ();
968 parse_ssa_operands (fn
, stmt
);
969 finalize_ssa_stmt_operands (fn
, stmt
);
972 /* Verifies SSA statement operands. */
975 verify_ssa_operands (struct function
*fn
, gimple stmt
)
982 bool volatile_p
= gimple_has_volatile_ops (stmt
);
984 /* build_ssa_operands w/o finalizing them. */
985 gimple_set_has_volatile_ops (stmt
, false);
986 start_ssa_stmt_operands ();
987 parse_ssa_operands (fn
, stmt
);
989 /* Now verify the built operands are the same as present in STMT. */
990 def
= gimple_vdef (stmt
);
992 && TREE_CODE (def
) == SSA_NAME
)
993 def
= SSA_NAME_VAR (def
);
994 if (build_vdef
!= def
)
996 error ("virtual definition of statement not up-to-date");
999 if (gimple_vdef (stmt
)
1000 && ((def_p
= gimple_vdef_op (stmt
)) == NULL_DEF_OPERAND_P
1001 || DEF_FROM_PTR (def_p
) != gimple_vdef (stmt
)))
1003 error ("virtual def operand missing for stmt");
1007 use
= gimple_vuse (stmt
);
1009 && TREE_CODE (use
) == SSA_NAME
)
1010 use
= SSA_NAME_VAR (use
);
1011 if (build_vuse
!= use
)
1013 error ("virtual use of statement not up-to-date");
1016 if (gimple_vuse (stmt
)
1017 && ((use_p
= gimple_vuse_op (stmt
)) == NULL_USE_OPERAND_P
1018 || USE_FROM_PTR (use_p
) != gimple_vuse (stmt
)))
1020 error ("virtual use operand missing for stmt");
1024 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
1026 FOR_EACH_VEC_ELT (build_uses
, i
, use
)
1028 if (use_p
->use
== (tree
*)use
)
1030 build_uses
[i
] = NULL_TREE
;
1034 if (i
== build_uses
.length ())
1036 error ("excess use operand for stmt");
1037 debug_generic_expr (USE_FROM_PTR (use_p
));
1041 FOR_EACH_VEC_ELT (build_uses
, i
, use
)
1042 if (use
!= NULL_TREE
)
1044 error ("use operand missing for stmt");
1045 debug_generic_expr (*(tree
*)use
);
1049 if (gimple_has_volatile_ops (stmt
) != volatile_p
)
1051 error ("stmt volatile flag not up-to-date");
1055 cleanup_build_arrays ();
1060 /* Releases the operands of STMT back to their freelists, and clears
1061 the stmt operand lists. */
1064 free_stmt_operands (struct function
*fn
, gimple stmt
)
1066 use_optype_p uses
= gimple_use_ops (stmt
), last_use
;
1070 for (last_use
= uses
; last_use
->next
; last_use
= last_use
->next
)
1071 delink_imm_use (USE_OP_PTR (last_use
));
1072 delink_imm_use (USE_OP_PTR (last_use
));
1073 last_use
->next
= gimple_ssa_operands (fn
)->free_uses
;
1074 gimple_ssa_operands (fn
)->free_uses
= uses
;
1075 gimple_set_use_ops (stmt
, NULL
);
1078 if (gimple_has_mem_ops (stmt
))
1080 gimple_set_vuse (stmt
, NULL_TREE
);
1081 gimple_set_vdef (stmt
, NULL_TREE
);
1086 /* Get the operands of statement STMT. */
1089 update_stmt_operands (struct function
*fn
, gimple stmt
)
1091 /* If update_stmt_operands is called before SSA is initialized, do
1093 if (!ssa_operands_active (fn
))
1096 timevar_push (TV_TREE_OPS
);
1098 gcc_assert (gimple_modified_p (stmt
));
1099 build_ssa_operands (fn
, stmt
);
1100 gimple_set_modified (stmt
, false);
1102 timevar_pop (TV_TREE_OPS
);
1106 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1107 to test the validity of the swap operation. */
1110 swap_ssa_operands (gimple stmt
, tree
*exp0
, tree
*exp1
)
1118 /* Attempt to preserve the relative positions of these two operands in
1119 their * respective immediate use lists by adjusting their use pointer
1120 to point to the new operand position. */
1121 use_optype_p use0
, use1
, ptr
;
1124 /* Find the 2 operands in the cache, if they are there. */
1125 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
1126 if (USE_OP_PTR (ptr
)->use
== exp0
)
1132 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
1133 if (USE_OP_PTR (ptr
)->use
== exp1
)
1139 /* And adjust their location to point to the new position of the
1142 USE_OP_PTR (use0
)->use
= exp1
;
1144 USE_OP_PTR (use1
)->use
= exp0
;
1146 /* Now swap the data. */
1153 /* Scan the immediate_use list for VAR making sure its linked properly.
1154 Return TRUE if there is a problem and emit an error message to F. */
1157 verify_imm_links (FILE *f
, tree var
)
1159 use_operand_p ptr
, prev
, list
;
1162 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
1164 list
= &(SSA_NAME_IMM_USE_NODE (var
));
1165 gcc_assert (list
->use
== NULL
);
1167 if (list
->prev
== NULL
)
1169 gcc_assert (list
->next
== NULL
);
1175 for (ptr
= list
->next
; ptr
!= list
; )
1177 if (prev
!= ptr
->prev
)
1180 if (ptr
->use
== NULL
)
1181 goto error
; /* 2 roots, or SAFE guard node. */
1182 else if (*(ptr
->use
) != var
)
1188 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1190 if (count
++ > 50000000)
1194 /* Verify list in the other direction. */
1196 for (ptr
= list
->prev
; ptr
!= list
; )
1198 if (prev
!= ptr
->next
)
1212 if (ptr
->loc
.stmt
&& gimple_modified_p (ptr
->loc
.stmt
))
1214 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->loc
.stmt
);
1215 print_gimple_stmt (f
, ptr
->loc
.stmt
, 0, TDF_SLIM
);
1217 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
1219 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
1225 /* Dump all the immediate uses to FILE. */
1228 dump_immediate_uses_for (FILE *file
, tree var
)
1230 imm_use_iterator iter
;
1231 use_operand_p use_p
;
1233 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
1235 print_generic_expr (file
, var
, TDF_SLIM
);
1236 fprintf (file
, " : -->");
1237 if (has_zero_uses (var
))
1238 fprintf (file
, " no uses.\n");
1240 if (has_single_use (var
))
1241 fprintf (file
, " single use.\n");
1243 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
1245 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
1247 if (use_p
->loc
.stmt
== NULL
&& use_p
->use
== NULL
)
1248 fprintf (file
, "***end of stmt iterator marker***\n");
1250 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
1251 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_VOPS
|TDF_MEMSYMS
);
1253 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_SLIM
);
1255 fprintf (file
, "\n");
1259 /* Dump all the immediate uses to FILE. */
1262 dump_immediate_uses (FILE *file
)
1267 fprintf (file
, "Immediate_uses: \n\n");
1268 for (x
= 1; x
< num_ssa_names
; x
++)
1273 dump_immediate_uses_for (file
, var
);
1278 /* Dump def-use edges on stderr. */
1281 debug_immediate_uses (void)
1283 dump_immediate_uses (stderr
);
1287 /* Dump def-use edges on stderr. */
1290 debug_immediate_uses_for (tree var
)
1292 dump_immediate_uses_for (stderr
, var
);
1296 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1299 unlink_stmt_vdef (gimple stmt
)
1301 use_operand_p use_p
;
1302 imm_use_iterator iter
;
1304 tree vdef
= gimple_vdef (stmt
);
1305 tree vuse
= gimple_vuse (stmt
);
1308 || TREE_CODE (vdef
) != SSA_NAME
)
1311 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vdef
)
1313 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1314 SET_USE (use_p
, vuse
);
1317 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef
))
1318 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 1;
1322 /* Return true if the var whose chain of uses starts at PTR has no
1325 has_zero_uses_1 (const ssa_use_operand_t
*head
)
1327 const ssa_use_operand_t
*ptr
;
1329 for (ptr
= head
->next
; ptr
!= head
; ptr
= ptr
->next
)
1330 if (!is_gimple_debug (USE_STMT (ptr
)))
1337 /* Return true if the var whose chain of uses starts at PTR has a
1338 single nondebug use. Set USE_P and STMT to that single nondebug
1339 use, if so, or to NULL otherwise. */
1341 single_imm_use_1 (const ssa_use_operand_t
*head
,
1342 use_operand_p
*use_p
, gimple
*stmt
)
1344 ssa_use_operand_t
*ptr
, *single_use
= 0;
1346 for (ptr
= head
->next
; ptr
!= head
; ptr
= ptr
->next
)
1347 if (!is_gimple_debug (USE_STMT (ptr
)))
1358 *use_p
= single_use
;
1361 *stmt
= single_use
? single_use
->loc
.stmt
: NULL
;