1 /* SSA operands management for trees.
2 Copyright (C) 2003-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "gimple-pretty-print.h"
29 #include "diagnostic-core.h"
31 #include "print-tree.h"
35 /* This file contains the code required to manage the operands cache of the
36 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
37 annotation. This cache contains operands that will be of interest to
38 optimizers and other passes wishing to manipulate the IL.
40 The operand type are broken up into REAL and VIRTUAL operands. The real
41 operands are represented as pointers into the stmt's operand tree. Thus
42 any manipulation of the real operands will be reflected in the actual tree.
43 Virtual operands are represented solely in the cache, although the base
44 variable for the SSA_NAME may, or may not occur in the stmt's tree.
45 Manipulation of the virtual operands will not be reflected in the stmt tree.
47 The routines in this file are concerned with creating this operand cache
50 The operand tree is the parsed by the various get_* routines which look
51 through the stmt tree for the occurrence of operands which may be of
52 interest, and calls are made to the append_* routines whenever one is
53 found. There are 4 of these routines, each representing one of the
54 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
56 The append_* routines check for duplication, and simply keep a list of
57 unique objects for each operand type in the build_* extendable vectors.
59 Once the stmt tree is completely parsed, the finalize_ssa_operands()
60 routine is called, which proceeds to perform the finalization routine
61 on each of the 4 operand vectors which have been built up.
63 If the stmt had a previous operand cache, the finalization routines
64 attempt to match up the new operands with the old ones. If it's a perfect
65 match, the old vector is simply reused. If it isn't a perfect match, then
66 a new vector is created and the new operands are placed there. For
67 virtual operands, if the previous cache had SSA_NAME version of a
68 variable, and that same variable occurs in the same operands cache, then
69 the new cache vector will also get the same SSA_NAME.
71 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
72 operand vector for VUSE, then the new vector will also be modified
73 such that it contains 'a_5' rather than 'a'. */
76 /* Flags to describe operand properties in helpers. */
78 /* By default, operands are loaded. */
81 /* Operand is the target of an assignment expression or a
82 call-clobbered variable. */
83 #define opf_def (1 << 0)
85 /* No virtual operands should be created in the expression. This is used
86 when traversing ADDR_EXPR nodes which have different semantics than
87 other expressions. Inside an ADDR_EXPR node, the only operands that we
88 need to consider are indices into arrays. For instance, &a.b[i] should
89 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
91 #define opf_no_vops (1 << 1)
93 /* Operand is in a place where address-taken does not imply addressable. */
94 #define opf_non_addressable (1 << 3)
96 /* Operand is in a place where opf_non_addressable does not apply. */
97 #define opf_not_non_addressable (1 << 4)
99 /* Operand is having its address taken. */
100 #define opf_address_taken (1 << 5)
102 /* Class containing temporary per-stmt state. */
104 class operands_scanner
107 operands_scanner (struct function
*fun
, gimple
*statement
)
109 build_vuse
= NULL_TREE
;
110 build_vdef
= NULL_TREE
;
115 /* Create an operands cache for STMT. */
116 void build_ssa_operands ();
118 /* Verifies SSA statement operands. */
119 DEBUG_FUNCTION
bool verify_ssa_operands ();
122 /* Disable copy and assign of this class, as it may have problems with
124 DISABLE_COPY_AND_ASSIGN (operands_scanner
);
126 /* Array for building all the use operands. */
127 auto_vec
<tree
*, 16> build_uses
;
129 /* The built VDEF operand. */
132 /* The built VUSE operand. */
135 /* Function which STMT belongs to. */
138 /* Statement to work on. */
141 /* Takes elements from build_uses and turns them into use operands of STMT. */
142 void finalize_ssa_uses ();
144 /* Clear the in_list bits and empty the build array for VDEFs and
146 void cleanup_build_arrays ();
148 /* Finalize all the build vectors, fill the new ones into INFO. */
149 void finalize_ssa_stmt_operands ();
151 /* Start the process of building up operands vectors in INFO. */
152 void start_ssa_stmt_operands ();
154 /* Add USE_P to the list of pointers to operands. */
155 void append_use (tree
*use_p
);
157 /* Add VAR to the set of variables that require a VDEF operator. */
158 void append_vdef (tree var
);
160 /* Add VAR to the set of variables that require a VUSE operator. */
161 void append_vuse (tree var
);
163 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
164 void add_virtual_operand (int flags
);
167 /* Add *VAR_P to the appropriate operand array for statement STMT.
168 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
169 it will be added to the statement's real operands, otherwise it is
170 added to virtual operands. */
171 void add_stmt_operand (tree
*var_p
, int flags
);
173 /* A subroutine of get_expr_operands to handle MEM_REF.
175 STMT is the statement being processed, EXPR is the MEM_REF
178 FLAGS is as in get_expr_operands. */
179 void get_mem_ref_operands (tree expr
, int flags
);
181 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
182 void get_tmr_operands (tree expr
, int flags
);
185 /* If STMT is a call that may clobber globals and other symbols that
186 escape, add them to the VDEF/VUSE lists for it. */
187 void maybe_add_call_vops (gcall
*stmt
);
189 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
190 void get_asm_stmt_operands (gasm
*stmt
);
193 /* Recursively scan the expression pointed to by EXPR_P in statement
194 STMT. FLAGS is one of the OPF_* constants modifying how to
195 interpret the operands found. */
196 void get_expr_operands (tree
*expr_p
, int flags
);
198 /* Parse STMT looking for operands. When finished, the various
199 build_* operand vectors will have potential operands in them. */
200 void parse_ssa_operands ();
203 /* Takes elements from build_defs and turns them into def operands of STMT.
204 TODO -- Make build_defs vec of tree *. */
205 void finalize_ssa_defs ();
208 /* Accessor to tree-ssa-operands.cc caches. */
209 static inline struct ssa_operands
*
210 gimple_ssa_operands (const struct function
*fun
)
212 return &fun
->gimple_df
->ssa_operands
;
216 /* Return true if the SSA operands cache is active. */
219 ssa_operands_active (struct function
*fun
)
224 return fun
->gimple_df
&& gimple_ssa_operands (fun
)->ops_active
;
228 /* Create the VOP variable, an artificial global variable to act as a
229 representative of all of the virtual operands FUD chain. */
232 create_vop_var (struct function
*fn
)
236 gcc_assert (fn
->gimple_df
->vop
== NULL_TREE
);
238 global_var
= build_decl (BUILTINS_LOCATION
, VAR_DECL
,
239 get_identifier (".MEM"),
241 DECL_ARTIFICIAL (global_var
) = 1;
242 DECL_IGNORED_P (global_var
) = 1;
243 TREE_READONLY (global_var
) = 0;
244 DECL_EXTERNAL (global_var
) = 1;
245 TREE_STATIC (global_var
) = 1;
246 TREE_USED (global_var
) = 1;
247 DECL_CONTEXT (global_var
) = NULL_TREE
;
248 TREE_THIS_VOLATILE (global_var
) = 0;
249 TREE_ADDRESSABLE (global_var
) = 0;
250 VAR_DECL_IS_VIRTUAL_OPERAND (global_var
) = 1;
252 fn
->gimple_df
->vop
= global_var
;
255 /* These are the sizes of the operand memory buffer in bytes which gets
256 allocated each time more operands space is required. The final value is
257 the amount that is allocated every time after that.
258 In 1k we can fit 25 use operands (or 63 def operands) on a host with
259 8 byte pointers, that would be 10 statements each with 1 def and 2
262 #define OP_SIZE_INIT 0
263 #define OP_SIZE_1 (1024 - sizeof (void *))
264 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
265 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
267 /* Initialize the operand cache routines. */
270 init_ssa_operands (struct function
*fn
)
272 gcc_assert (gimple_ssa_operands (fn
)->operand_memory
== NULL
);
273 gimple_ssa_operands (fn
)->operand_memory_index
274 = gimple_ssa_operands (fn
)->ssa_operand_mem_size
;
275 gimple_ssa_operands (fn
)->ops_active
= true;
276 gimple_ssa_operands (fn
)->ssa_operand_mem_size
= OP_SIZE_INIT
;
281 /* Dispose of anything required by the operand routines. */
284 fini_ssa_operands (struct function
*fn
)
286 struct ssa_operand_memory_d
*ptr
;
288 gimple_ssa_operands (fn
)->free_uses
= NULL
;
290 while ((ptr
= gimple_ssa_operands (fn
)->operand_memory
) != NULL
)
292 gimple_ssa_operands (fn
)->operand_memory
293 = gimple_ssa_operands (fn
)->operand_memory
->next
;
297 gimple_ssa_operands (fn
)->ops_active
= false;
299 fn
->gimple_df
->vop
= NULL_TREE
;
303 /* Return memory for an operand of size SIZE. */
306 ssa_operand_alloc (struct function
*fn
, unsigned size
)
310 gcc_assert (size
== sizeof (struct use_optype_d
));
312 if (gimple_ssa_operands (fn
)->operand_memory_index
+ size
313 >= gimple_ssa_operands (fn
)->ssa_operand_mem_size
)
315 struct ssa_operand_memory_d
*ptr
;
317 switch (gimple_ssa_operands (fn
)->ssa_operand_mem_size
)
320 gimple_ssa_operands (fn
)->ssa_operand_mem_size
= OP_SIZE_1
;
323 gimple_ssa_operands (fn
)->ssa_operand_mem_size
= OP_SIZE_2
;
327 gimple_ssa_operands (fn
)->ssa_operand_mem_size
= OP_SIZE_3
;
334 ptr
= (ssa_operand_memory_d
*) ggc_internal_alloc
335 (sizeof (void *) + gimple_ssa_operands (fn
)->ssa_operand_mem_size
);
337 ptr
->next
= gimple_ssa_operands (fn
)->operand_memory
;
338 gimple_ssa_operands (fn
)->operand_memory
= ptr
;
339 gimple_ssa_operands (fn
)->operand_memory_index
= 0;
342 ptr
= &(gimple_ssa_operands (fn
)->operand_memory
343 ->mem
[gimple_ssa_operands (fn
)->operand_memory_index
]);
344 gimple_ssa_operands (fn
)->operand_memory_index
+= size
;
349 /* Allocate a USE operand. */
351 static inline struct use_optype_d
*
352 alloc_use (struct function
*fn
)
354 struct use_optype_d
*ret
;
355 if (gimple_ssa_operands (fn
)->free_uses
)
357 ret
= gimple_ssa_operands (fn
)->free_uses
;
358 gimple_ssa_operands (fn
)->free_uses
359 = gimple_ssa_operands (fn
)->free_uses
->next
;
362 ret
= (struct use_optype_d
*)
363 ssa_operand_alloc (fn
, sizeof (struct use_optype_d
));
368 /* Adds OP to the list of uses of statement STMT after LAST. */
370 static inline use_optype_p
371 add_use_op (struct function
*fn
, gimple
*stmt
, tree
*op
, use_optype_p last
)
373 use_optype_p new_use
;
375 new_use
= alloc_use (fn
);
376 USE_OP_PTR (new_use
)->use
= op
;
377 link_imm_use_stmt (USE_OP_PTR (new_use
), *op
, stmt
);
378 last
->next
= new_use
;
379 new_use
->next
= NULL
;
385 /* Takes elements from build_defs and turns them into def operands of STMT.
386 TODO -- Make build_defs vec of tree *. */
389 operands_scanner::finalize_ssa_defs ()
391 /* Pre-pend the vdef we may have built. */
392 if (build_vdef
!= NULL_TREE
)
394 tree oldvdef
= gimple_vdef (stmt
);
396 && TREE_CODE (oldvdef
) == SSA_NAME
)
397 oldvdef
= SSA_NAME_VAR (oldvdef
);
398 if (oldvdef
!= build_vdef
)
399 gimple_set_vdef (stmt
, build_vdef
);
402 /* Clear and unlink a no longer necessary VDEF. */
403 if (build_vdef
== NULL_TREE
404 && gimple_vdef (stmt
) != NULL_TREE
)
406 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
408 unlink_stmt_vdef (stmt
);
409 release_ssa_name_fn (fn
, gimple_vdef (stmt
));
411 gimple_set_vdef (stmt
, NULL_TREE
);
414 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
415 if (gimple_vdef (stmt
)
416 && TREE_CODE (gimple_vdef (stmt
)) != SSA_NAME
)
418 fn
->gimple_df
->rename_vops
= 1;
419 fn
->gimple_df
->ssa_renaming_needed
= 1;
424 /* Takes elements from build_uses and turns them into use operands of STMT. */
427 operands_scanner::finalize_ssa_uses ()
430 struct use_optype_d new_list
;
431 use_optype_p old_ops
, ptr
, last
;
433 /* Pre-pend the VUSE we may have built. */
434 if (build_vuse
!= NULL_TREE
)
436 tree oldvuse
= gimple_vuse (stmt
);
438 && TREE_CODE (oldvuse
) == SSA_NAME
)
439 oldvuse
= SSA_NAME_VAR (oldvuse
);
440 if (oldvuse
!= (build_vuse
!= NULL_TREE
441 ? build_vuse
: build_vdef
))
442 gimple_set_vuse (stmt
, NULL_TREE
);
443 build_uses
.safe_insert (0, gimple_vuse_ptr (stmt
));
446 new_list
.next
= NULL
;
449 old_ops
= gimple_use_ops (stmt
);
451 /* Clear a no longer necessary VUSE. */
452 if (build_vuse
== NULL_TREE
453 && gimple_vuse (stmt
) != NULL_TREE
)
454 gimple_set_vuse (stmt
, NULL_TREE
);
456 /* If there is anything in the old list, free it. */
459 for (ptr
= old_ops
; ptr
->next
; ptr
= ptr
->next
)
460 delink_imm_use (USE_OP_PTR (ptr
));
461 delink_imm_use (USE_OP_PTR (ptr
));
462 ptr
->next
= gimple_ssa_operands (fn
)->free_uses
;
463 gimple_ssa_operands (fn
)->free_uses
= old_ops
;
466 /* If we added a VUSE, make sure to set the operand if it is not already
467 present and mark it for renaming. */
468 if (build_vuse
!= NULL_TREE
469 && gimple_vuse (stmt
) == NULL_TREE
)
471 gimple_set_vuse (stmt
, gimple_vop (fn
));
472 fn
->gimple_df
->rename_vops
= 1;
473 fn
->gimple_df
->ssa_renaming_needed
= 1;
476 /* Now create nodes for all the new nodes. */
477 for (new_i
= 0; new_i
< build_uses
.length (); new_i
++)
479 tree
*op
= build_uses
[new_i
];
480 last
= add_use_op (fn
, stmt
, op
, last
);
483 /* Now set the stmt's operands. */
484 gimple_set_use_ops (stmt
, new_list
.next
);
488 /* Clear the in_list bits and empty the build array for VDEFs and
492 operands_scanner::cleanup_build_arrays ()
494 build_vdef
= NULL_TREE
;
495 build_vuse
= NULL_TREE
;
496 build_uses
.truncate (0);
500 /* Finalize all the build vectors, fill the new ones into INFO. */
503 operands_scanner::finalize_ssa_stmt_operands ()
505 finalize_ssa_defs ();
506 finalize_ssa_uses ();
507 cleanup_build_arrays ();
511 /* Start the process of building up operands vectors in INFO. */
514 operands_scanner::start_ssa_stmt_operands ()
516 gcc_assert (build_uses
.length () == 0);
517 gcc_assert (build_vuse
== NULL_TREE
);
518 gcc_assert (build_vdef
== NULL_TREE
);
522 /* Add USE_P to the list of pointers to operands. */
525 operands_scanner::append_use (tree
*use_p
)
527 build_uses
.safe_push (use_p
);
531 /* Add VAR to the set of variables that require a VDEF operator. */
534 operands_scanner::append_vdef (tree var
)
536 gcc_assert ((build_vdef
== NULL_TREE
537 || build_vdef
== var
)
538 && (build_vuse
== NULL_TREE
539 || build_vuse
== var
));
546 /* Add VAR to the set of variables that require a VUSE operator. */
549 operands_scanner::append_vuse (tree var
)
551 gcc_assert (build_vuse
== NULL_TREE
552 || build_vuse
== var
);
557 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
560 operands_scanner::add_virtual_operand (int flags
)
562 /* Add virtual operands to the stmt, unless the caller has specifically
563 requested not to do that (used when adding operands inside an
564 ADDR_EXPR expression). */
565 if (flags
& opf_no_vops
)
568 gcc_assert (!is_gimple_debug (stmt
));
571 append_vdef (gimple_vop (fn
));
573 append_vuse (gimple_vop (fn
));
577 /* Add *VAR_P to the appropriate operand array for statement STMT.
578 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
579 it will be added to the statement's real operands, otherwise it is
580 added to virtual operands. */
583 operands_scanner::add_stmt_operand (tree
*var_p
, int flags
)
587 gcc_assert (SSA_VAR_P (*var_p
)
588 || TREE_CODE (*var_p
) == STRING_CST
589 || TREE_CODE (*var_p
) == CONST_DECL
);
591 if (is_gimple_reg (var
))
593 /* The variable is a GIMPLE register. Add it to real operands. */
599 fn
->gimple_df
->ssa_renaming_needed
= 1;
603 /* Mark statements with volatile operands. */
604 if (!(flags
& opf_no_vops
)
605 && TREE_THIS_VOLATILE (var
))
606 gimple_set_has_volatile_ops (stmt
, true);
608 /* The variable is a memory access. Add virtual operands. */
609 add_virtual_operand (flags
);
613 /* Mark the base address of REF as having its address taken.
614 REF may be a single variable whose address has been taken or any
615 other valid GIMPLE memory reference (structure reference, array,
619 mark_address_taken (tree ref
)
623 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
624 as the only thing we take the address of. If VAR is a structure,
625 taking the address of a field means that the whole structure may
626 be referenced using pointer arithmetic. See PR 21407 and the
627 ensuing mailing list discussion. */
628 var
= get_base_address (ref
);
630 || TREE_CODE (var
) == RESULT_DECL
631 || TREE_CODE (var
) == PARM_DECL
)
632 TREE_ADDRESSABLE (var
) = 1;
636 /* A subroutine of get_expr_operands to handle MEM_REF.
638 STMT is the statement being processed, EXPR is the MEM_REF
641 FLAGS is as in get_expr_operands. */
644 operands_scanner::get_mem_ref_operands (tree expr
, int flags
)
646 tree
*pptr
= &TREE_OPERAND (expr
, 0);
648 if (!(flags
& opf_no_vops
)
649 && TREE_THIS_VOLATILE (expr
))
650 gimple_set_has_volatile_ops (stmt
, true);
653 add_virtual_operand (flags
);
655 /* If requested, add a USE operand for the base pointer. */
656 get_expr_operands (pptr
,
657 opf_non_addressable
| opf_use
658 | (flags
& (opf_no_vops
|opf_not_non_addressable
)));
662 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
665 operands_scanner::get_tmr_operands(tree expr
, int flags
)
667 if (!(flags
& opf_no_vops
)
668 && TREE_THIS_VOLATILE (expr
))
669 gimple_set_has_volatile_ops (stmt
, true);
671 /* First record the real operands. */
672 get_expr_operands (&TMR_BASE (expr
),
673 opf_non_addressable
| opf_use
674 | (flags
& (opf_no_vops
|opf_not_non_addressable
)));
675 get_expr_operands (&TMR_INDEX (expr
), opf_use
| (flags
& opf_no_vops
));
676 get_expr_operands (&TMR_INDEX2 (expr
), opf_use
| (flags
& opf_no_vops
));
678 add_virtual_operand (flags
);
682 /* If STMT is a call that may clobber globals and other symbols that
683 escape, add them to the VDEF/VUSE lists for it. */
686 operands_scanner::maybe_add_call_vops (gcall
*stmt
)
688 int call_flags
= gimple_call_flags (stmt
);
690 /* If aliases have been computed already, add VDEF or VUSE
691 operands for all the symbols that have been found to be
693 if (!(call_flags
& ECF_NOVOPS
))
695 /* A 'pure' or a 'const' function never call-clobbers anything. */
696 if (!(call_flags
& (ECF_PURE
| ECF_CONST
)))
697 add_virtual_operand (opf_def
);
698 else if (!(call_flags
& ECF_CONST
))
699 add_virtual_operand (opf_use
);
704 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
707 operands_scanner::get_asm_stmt_operands (gasm
*stmt
)
710 const char **oconstraints
;
711 const char *constraint
;
712 bool allows_mem
, allows_reg
, is_inout
;
714 noutputs
= gimple_asm_noutputs (stmt
);
715 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
717 /* Gather all output operands. */
718 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
720 tree link
= gimple_asm_output_op (stmt
, i
);
721 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
722 oconstraints
[i
] = constraint
;
723 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
724 &allows_reg
, &is_inout
);
726 /* This should have been split in gimplify_asm_expr. */
727 gcc_assert (!allows_reg
|| !is_inout
);
729 /* Memory operands are addressable. Note that STMT needs the
730 address of this operand. */
731 if (!allows_reg
&& allows_mem
)
732 mark_address_taken (TREE_VALUE (link
));
734 get_expr_operands (&TREE_VALUE (link
), opf_def
| opf_not_non_addressable
);
737 /* Gather all input operands. */
738 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
740 tree link
= gimple_asm_input_op (stmt
, i
);
741 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
742 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
743 &allows_mem
, &allows_reg
);
745 /* Memory operands are addressable. Note that STMT needs the
746 address of this operand. */
747 if (!allows_reg
&& allows_mem
)
748 mark_address_taken (TREE_VALUE (link
));
750 get_expr_operands (&TREE_VALUE (link
), opf_not_non_addressable
);
753 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
754 if (gimple_asm_clobbers_memory_p (stmt
))
755 add_virtual_operand (opf_def
);
759 /* Recursively scan the expression pointed to by EXPR_P in statement
760 STMT. FLAGS is one of the OPF_* constants modifying how to
761 interpret the operands found. */
764 operands_scanner::get_expr_operands (tree
*expr_p
, int flags
)
767 enum tree_code_class codeclass
;
769 int uflags
= opf_use
;
774 if (is_gimple_debug (stmt
))
775 uflags
|= (flags
& opf_no_vops
);
777 code
= TREE_CODE (expr
);
778 codeclass
= TREE_CODE_CLASS (code
);
783 /* Taking the address of a variable does not represent a
784 reference to it, but the fact that the statement takes its
785 address will be of interest to some passes (e.g. alias
787 if ((!(flags
& opf_non_addressable
)
788 || (flags
& opf_not_non_addressable
))
789 && !is_gimple_debug (stmt
))
790 mark_address_taken (TREE_OPERAND (expr
, 0));
792 /* Otherwise, there may be variables referenced inside but there
793 should be no VUSEs created, since the referenced objects are
794 not really accessed. The only operands that we should find
795 here are ARRAY_REF indices which will always be real operands
796 (GIMPLE does not allow non-registers as array indices). */
797 flags
|= opf_no_vops
;
798 get_expr_operands (&TREE_OPERAND (expr
, 0),
799 flags
| opf_not_non_addressable
| opf_address_taken
);
808 if (!(flags
& opf_address_taken
))
809 add_stmt_operand (expr_p
, flags
);
812 case DEBUG_EXPR_DECL
:
813 gcc_assert (gimple_debug_bind_p (stmt
));
817 get_mem_ref_operands (expr
, flags
);
821 get_tmr_operands (expr
, flags
);
825 case ARRAY_RANGE_REF
:
830 if (!(flags
& opf_no_vops
)
831 && TREE_THIS_VOLATILE (expr
))
832 gimple_set_has_volatile_ops (stmt
, true);
834 get_expr_operands (&TREE_OPERAND (expr
, 0), flags
);
836 if (code
== COMPONENT_REF
)
837 get_expr_operands (&TREE_OPERAND (expr
, 2), uflags
);
838 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
840 get_expr_operands (&TREE_OPERAND (expr
, 1), uflags
);
841 get_expr_operands (&TREE_OPERAND (expr
, 2), uflags
);
842 get_expr_operands (&TREE_OPERAND (expr
, 3), uflags
);
849 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
850 and an rvalue reference to its second argument. */
851 get_expr_operands (&TREE_OPERAND (expr
, 1), uflags
);
852 get_expr_operands (&TREE_OPERAND (expr
, 0), flags
);
858 get_expr_operands (&TREE_OPERAND (expr
, 0), uflags
);
859 get_expr_operands (&TREE_OPERAND (expr
, 1), uflags
);
860 get_expr_operands (&TREE_OPERAND (expr
, 2), uflags
);
865 /* General aggregate CONSTRUCTORs have been decomposed, but they
866 are still in use as the COMPLEX_EXPR equivalent for vectors. */
868 unsigned HOST_WIDE_INT idx
;
870 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
871 the volatility to the statement, don't use TREE_CLOBBER_P for
872 mirroring the other uses of THIS_VOLATILE in this file. */
873 if (!(flags
& opf_no_vops
)
874 && TREE_THIS_VOLATILE (expr
))
875 gimple_set_has_volatile_ops (stmt
, true);
878 vec_safe_iterate (CONSTRUCTOR_ELTS (expr
), idx
, &ce
);
880 get_expr_operands (&ce
->value
, uflags
);
886 if (!(flags
& opf_no_vops
)
887 && TREE_THIS_VOLATILE (expr
))
888 gimple_set_has_volatile_ops (stmt
, true);
891 case VIEW_CONVERT_EXPR
:
893 get_expr_operands (&TREE_OPERAND (expr
, 0), flags
);
896 case BIT_INSERT_EXPR
:
902 get_expr_operands (&TREE_OPERAND (expr
, 0), flags
);
903 get_expr_operands (&TREE_OPERAND (expr
, 1), flags
);
909 case REALIGN_LOAD_EXPR
:
910 case WIDEN_MULT_PLUS_EXPR
:
911 case WIDEN_MULT_MINUS_EXPR
:
913 get_expr_operands (&TREE_OPERAND (expr
, 0), flags
);
914 get_expr_operands (&TREE_OPERAND (expr
, 1), flags
);
915 get_expr_operands (&TREE_OPERAND (expr
, 2), flags
);
921 case CASE_LABEL_EXPR
:
922 /* Expressions that make no memory references. */
926 if (codeclass
== tcc_unary
)
928 if (codeclass
== tcc_binary
|| codeclass
== tcc_comparison
)
930 if (codeclass
== tcc_constant
|| codeclass
== tcc_type
)
934 /* If we get here, something has gone wrong. */
937 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
939 fputs ("\n", stderr
);
945 /* Parse STMT looking for operands. When finished, the various
946 build_* operand vectors will have potential operands in them. */
949 operands_scanner::parse_ssa_operands ()
951 enum gimple_code code
= gimple_code (stmt
);
952 size_t i
, n
, start
= 0;
957 get_asm_stmt_operands (as_a
<gasm
*> (stmt
));
960 case GIMPLE_TRANSACTION
:
961 /* The start of a transaction is a memory barrier. */
962 add_virtual_operand (opf_def
| opf_use
);
966 if (gimple_debug_bind_p (stmt
)
967 && gimple_debug_bind_has_value_p (stmt
))
968 get_expr_operands (gimple_debug_bind_get_value_ptr (stmt
),
969 opf_use
| opf_no_vops
);
973 append_vuse (gimple_vop (fn
));
977 /* Add call-clobbered operands, if needed. */
978 maybe_add_call_vops (as_a
<gcall
*> (stmt
));
982 get_expr_operands (gimple_op_ptr (stmt
, 0), opf_def
);
988 n
= gimple_num_ops (stmt
);
989 for (i
= start
; i
< n
; i
++)
990 get_expr_operands (gimple_op_ptr (stmt
, i
), opf_use
);
996 /* Create an operands cache for STMT. */
999 operands_scanner::build_ssa_operands ()
1001 /* Initially assume that the statement has no volatile operands. */
1002 gimple_set_has_volatile_ops (stmt
, false);
1004 start_ssa_stmt_operands ();
1005 parse_ssa_operands ();
1006 finalize_ssa_stmt_operands ();
1009 /* Verifies SSA statement operands. */
1012 operands_scanner::verify_ssa_operands ()
1014 use_operand_p use_p
;
1015 def_operand_p def_p
;
1019 bool volatile_p
= gimple_has_volatile_ops (stmt
);
1021 /* build_ssa_operands w/o finalizing them. */
1022 gimple_set_has_volatile_ops (stmt
, false);
1023 start_ssa_stmt_operands ();
1024 parse_ssa_operands ();
1026 /* Now verify the built operands are the same as present in STMT. */
1027 def
= gimple_vdef (stmt
);
1029 && TREE_CODE (def
) == SSA_NAME
)
1030 def
= SSA_NAME_VAR (def
);
1031 if (build_vdef
!= def
)
1033 error ("virtual definition of statement not up to date");
1036 if (gimple_vdef (stmt
)
1037 && ((def_p
= gimple_vdef_op (stmt
)) == NULL_DEF_OPERAND_P
1038 || DEF_FROM_PTR (def_p
) != gimple_vdef (stmt
)))
1040 error ("virtual def operand missing for statement");
1044 tree use
= gimple_vuse (stmt
);
1046 && TREE_CODE (use
) == SSA_NAME
)
1047 use
= SSA_NAME_VAR (use
);
1048 if (build_vuse
!= use
)
1050 error ("virtual use of statement not up to date");
1053 if (gimple_vuse (stmt
)
1054 && ((use_p
= gimple_vuse_op (stmt
)) == NULL_USE_OPERAND_P
1055 || USE_FROM_PTR (use_p
) != gimple_vuse (stmt
)))
1057 error ("virtual use operand missing for statement");
1061 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
1064 FOR_EACH_VEC_ELT (build_uses
, i
, op
)
1066 if (use_p
->use
== op
)
1068 build_uses
[i
] = NULL
;
1072 if (i
== build_uses
.length ())
1074 error ("excess use operand for statement");
1075 debug_generic_expr (USE_FROM_PTR (use_p
));
1081 FOR_EACH_VEC_ELT (build_uses
, i
, op
)
1084 error ("use operand missing for statement");
1085 debug_generic_expr (*op
);
1089 if (gimple_has_volatile_ops (stmt
) != volatile_p
)
1091 error ("statement volatile flag not up to date");
1095 cleanup_build_arrays ();
1099 /* Interface for external use. */
1102 verify_ssa_operands (struct function
*fn
, gimple
*stmt
)
1104 return operands_scanner (fn
, stmt
).verify_ssa_operands ();
1108 /* Releases the operands of STMT back to their freelists, and clears
1109 the stmt operand lists. */
1112 free_stmt_operands (struct function
*fn
, gimple
*stmt
)
1114 use_optype_p uses
= gimple_use_ops (stmt
), last_use
;
1118 for (last_use
= uses
; last_use
->next
; last_use
= last_use
->next
)
1119 delink_imm_use (USE_OP_PTR (last_use
));
1120 delink_imm_use (USE_OP_PTR (last_use
));
1121 last_use
->next
= gimple_ssa_operands (fn
)->free_uses
;
1122 gimple_ssa_operands (fn
)->free_uses
= uses
;
1123 gimple_set_use_ops (stmt
, NULL
);
1126 if (gimple_has_mem_ops (stmt
))
1128 gimple_set_vuse (stmt
, NULL_TREE
);
1129 gimple_set_vdef (stmt
, NULL_TREE
);
1134 /* Get the operands of statement STMT. */
1137 update_stmt_operands (struct function
*fn
, gimple
*stmt
)
1139 /* If update_stmt_operands is called before SSA is initialized, do
1141 if (!ssa_operands_active (fn
))
1144 timevar_push (TV_TREE_OPS
);
1146 gcc_assert (gimple_modified_p (stmt
));
1147 operands_scanner (fn
, stmt
).build_ssa_operands ();
1148 gimple_set_modified (stmt
, false);
1150 timevar_pop (TV_TREE_OPS
);
1154 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1155 to test the validity of the swap operation. */
1158 swap_ssa_operands (gimple
*stmt
, tree
*exp0
, tree
*exp1
)
1166 /* Attempt to preserve the relative positions of these two operands in
1167 their * respective immediate use lists by adjusting their use pointer
1168 to point to the new operand position. */
1169 use_optype_p use0
, use1
, ptr
;
1172 /* Find the 2 operands in the cache, if they are there. */
1173 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
1174 if (USE_OP_PTR (ptr
)->use
== exp0
)
1180 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
1181 if (USE_OP_PTR (ptr
)->use
== exp1
)
1187 /* And adjust their location to point to the new position of the
1190 USE_OP_PTR (use0
)->use
= exp1
;
1192 USE_OP_PTR (use1
)->use
= exp0
;
1194 /* Now swap the data. */
1201 /* Scan the immediate_use list for VAR making sure its linked properly.
1202 Return TRUE if there is a problem and emit an error message to F. */
1205 verify_imm_links (FILE *f
, tree var
)
1207 use_operand_p ptr
, prev
, list
;
1210 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
1212 list
= &(SSA_NAME_IMM_USE_NODE (var
));
1213 gcc_assert (list
->use
== NULL
);
1215 if (list
->prev
== NULL
)
1217 gcc_assert (list
->next
== NULL
);
1223 for (ptr
= list
->next
; ptr
!= list
; )
1225 if (prev
!= ptr
->prev
)
1227 fprintf (f
, "prev != ptr->prev\n");
1231 if (ptr
->use
== NULL
)
1233 fprintf (f
, "ptr->use == NULL\n");
1234 goto error
; /* 2 roots, or SAFE guard node. */
1236 else if (*(ptr
->use
) != var
)
1238 fprintf (f
, "*(ptr->use) != var\n");
1248 fprintf (f
, "number of immediate uses doesn't fit unsigned int\n");
1253 /* Verify list in the other direction. */
1255 for (ptr
= list
->prev
; ptr
!= list
; )
1257 if (prev
!= ptr
->next
)
1259 fprintf (f
, "prev != ptr->next\n");
1266 fprintf (f
, "count-- < 0\n");
1274 fprintf (f
, "count != 0\n");
1281 if (ptr
->loc
.stmt
&& gimple_modified_p (ptr
->loc
.stmt
))
1283 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->loc
.stmt
);
1284 print_gimple_stmt (f
, ptr
->loc
.stmt
, 0, TDF_SLIM
);
1286 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
1288 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
1294 /* Dump all the immediate uses to FILE. */
1297 dump_immediate_uses_for (FILE *file
, tree var
)
1299 imm_use_iterator iter
;
1300 use_operand_p use_p
;
1302 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
1304 print_generic_expr (file
, var
, TDF_SLIM
);
1305 fprintf (file
, " : -->");
1306 if (has_zero_uses (var
))
1307 fprintf (file
, " no uses.\n");
1309 if (has_single_use (var
))
1310 fprintf (file
, " single use.\n");
1312 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
1314 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
1316 if (use_p
->loc
.stmt
== NULL
&& use_p
->use
== NULL
)
1317 fprintf (file
, "***end of stmt iterator marker***\n");
1319 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
1320 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_VOPS
|TDF_MEMSYMS
);
1322 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_SLIM
);
1324 fprintf (file
, "\n");
1328 /* Dump all the immediate uses to FILE. */
1331 dump_immediate_uses (FILE *file
)
1336 fprintf (file
, "Immediate_uses: \n\n");
1337 FOR_EACH_SSA_NAME (x
, var
, cfun
)
1339 dump_immediate_uses_for (file
, var
);
1344 /* Dump def-use edges on stderr. */
1347 debug_immediate_uses (void)
1349 dump_immediate_uses (stderr
);
1353 /* Dump def-use edges on stderr. */
1356 debug_immediate_uses_for (tree var
)
1358 dump_immediate_uses_for (stderr
, var
);
1362 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1365 unlink_stmt_vdef (gimple
*stmt
)
1367 use_operand_p use_p
;
1368 imm_use_iterator iter
;
1370 tree vdef
= gimple_vdef (stmt
);
1371 tree vuse
= gimple_vuse (stmt
);
1374 || TREE_CODE (vdef
) != SSA_NAME
)
1377 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vdef
)
1379 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1380 SET_USE (use_p
, vuse
);
1383 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef
))
1384 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 1;
1387 /* Return true if the var whose chain of uses starts at PTR has a
1388 single nondebug use. Set USE_P and STMT to that single nondebug
1389 use, if so, or to NULL otherwise. */
1391 single_imm_use_1 (const ssa_use_operand_t
*head
,
1392 use_operand_p
*use_p
, gimple
**stmt
)
1394 ssa_use_operand_t
*ptr
, *single_use
= 0;
1396 for (ptr
= head
->next
; ptr
!= head
; ptr
= ptr
->next
)
1397 if (USE_STMT(ptr
) && !is_gimple_debug (USE_STMT (ptr
)))
1408 *use_p
= single_use
;
1411 *stmt
= single_use
? single_use
->loc
.stmt
: NULL
;