1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
100 #define opf_no_vops (1 << 2)
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
107 /* Array for building all the def operands. */
108 static VEC(tree
,heap
) *build_defs
;
110 /* Array for building all the use operands. */
111 static VEC(tree
,heap
) *build_uses
;
113 /* Array for building all the v_may_def operands. */
114 static VEC(tree
,heap
) *build_v_may_defs
;
116 /* Array for building all the vuse operands. */
117 static VEC(tree
,heap
) *build_vuses
;
119 /* Array for building all the v_must_def operands. */
120 static VEC(tree
,heap
) *build_v_must_defs
;
122 /* True if the operands for call clobbered vars are cached and valid. */
123 bool ssa_call_clobbered_cache_valid
;
124 bool ssa_ro_call_cache_valid
;
126 /* These arrays are the cached operand vectors for call clobbered calls. */
127 static VEC(tree
,heap
) *clobbered_v_may_defs
;
128 static VEC(tree
,heap
) *clobbered_vuses
;
129 static VEC(tree
,heap
) *ro_call_vuses
;
130 static bool ops_active
= false;
132 static GTY (()) struct ssa_operand_memory_d
*operand_memory
= NULL
;
133 static unsigned operand_memory_index
;
135 static void get_expr_operands (tree
, tree
*, int);
136 static void get_asm_expr_operands (tree
);
137 static void get_indirect_ref_operands (tree
, tree
, int);
138 static void get_tmr_operands (tree
, tree
, int);
139 static void get_call_expr_operands (tree
, tree
);
140 static inline void append_def (tree
*);
141 static inline void append_use (tree
*);
142 static void append_v_may_def (tree
);
143 static void append_v_must_def (tree
);
144 static void add_call_clobber_ops (tree
, tree
);
145 static void add_call_read_ops (tree
);
146 static void add_stmt_operand (tree
*, stmt_ann_t
, int);
147 static void build_ssa_operands (tree stmt
);
149 static def_optype_p free_defs
= NULL
;
150 static use_optype_p free_uses
= NULL
;
151 static vuse_optype_p free_vuses
= NULL
;
152 static maydef_optype_p free_maydefs
= NULL
;
153 static mustdef_optype_p free_mustdefs
= NULL
;
156 /* Return the DECL_UID of the base variable of T. */
158 static inline unsigned
159 get_name_decl (tree t
)
161 if (TREE_CODE (t
) != SSA_NAME
)
164 return DECL_UID (SSA_NAME_VAR (t
));
167 /* Comparison function for qsort used in operand_build_sort_virtual. */
170 operand_build_cmp (const void *p
, const void *q
)
172 tree e1
= *((const tree
*)p
);
173 tree e2
= *((const tree
*)q
);
176 u1
= get_name_decl (e1
);
177 u2
= get_name_decl (e2
);
179 /* We want to sort in ascending order. They can never be equal. */
180 #ifdef ENABLE_CHECKING
181 gcc_assert (u1
!= u2
);
183 return (u1
> u2
? 1 : -1);
186 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
189 operand_build_sort_virtual (VEC(tree
,heap
) *list
)
191 int num
= VEC_length (tree
, list
);
196 if (get_name_decl (VEC_index (tree
, list
, 0))
197 > get_name_decl (VEC_index (tree
, list
, 1)))
199 /* Swap elements if in the wrong order. */
200 tree tmp
= VEC_index (tree
, list
, 0);
201 VEC_replace (tree
, list
, 0, VEC_index (tree
, list
, 1));
202 VEC_replace (tree
, list
, 1, tmp
);
206 /* There are 3 or more elements, call qsort. */
207 qsort (VEC_address (tree
, list
),
208 VEC_length (tree
, list
),
215 /* Return true if the ssa operands cache is active. */
218 ssa_operands_active (void)
224 /* Initialize the operand cache routines. */
227 init_ssa_operands (void)
229 build_defs
= VEC_alloc (tree
, heap
, 5);
230 build_uses
= VEC_alloc (tree
, heap
, 10);
231 build_vuses
= VEC_alloc (tree
, heap
, 25);
232 build_v_may_defs
= VEC_alloc (tree
, heap
, 25);
233 build_v_must_defs
= VEC_alloc (tree
, heap
, 25);
235 gcc_assert (operand_memory
== NULL
);
236 operand_memory_index
= SSA_OPERAND_MEMORY_SIZE
;
241 /* Dispose of anything required by the operand routines. */
244 fini_ssa_operands (void)
246 struct ssa_operand_memory_d
*ptr
;
247 VEC_free (tree
, heap
, build_defs
);
248 VEC_free (tree
, heap
, build_uses
);
249 VEC_free (tree
, heap
, build_v_must_defs
);
250 VEC_free (tree
, heap
, build_v_may_defs
);
251 VEC_free (tree
, heap
, build_vuses
);
256 free_mustdefs
= NULL
;
257 while ((ptr
= operand_memory
) != NULL
)
259 operand_memory
= operand_memory
->next
;
263 VEC_free (tree
, heap
, clobbered_v_may_defs
);
264 VEC_free (tree
, heap
, clobbered_vuses
);
265 VEC_free (tree
, heap
, ro_call_vuses
);
270 /* Return memory for operands of SIZE chunks. */
273 ssa_operand_alloc (unsigned size
)
276 if (operand_memory_index
+ size
>= SSA_OPERAND_MEMORY_SIZE
)
278 struct ssa_operand_memory_d
*ptr
;
279 ptr
= GGC_NEW (struct ssa_operand_memory_d
);
280 ptr
->next
= operand_memory
;
281 operand_memory
= ptr
;
282 operand_memory_index
= 0;
284 ptr
= &(operand_memory
->mem
[operand_memory_index
]);
285 operand_memory_index
+= size
;
290 /* Make sure PTR is in the correct immediate use list. Since uses are simply
291 pointers into the stmt TREE, there is no way of telling if anyone has
292 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
293 The contents are different, but the pointer is still the same. This
294 routine will check to make sure PTR is in the correct list, and if it isn't
295 put it in the correct list. We cannot simply check the previous node
296 because all nodes in the same stmt might have be changed. */
299 correct_use_link (use_operand_p ptr
, tree stmt
)
304 /* Fold_stmt () may have changed the stmt pointers. */
305 if (ptr
->stmt
!= stmt
)
311 /* Find the root element, making sure we skip any safe iterators. */
312 while (prev
->use
!= NULL
|| prev
->stmt
== NULL
)
315 /* Get the ssa_name of the list the node is in. */
317 /* If it's the right list, simply return. */
318 if (root
== *(ptr
->use
))
321 /* Its in the wrong list if we reach here. */
322 delink_imm_use (ptr
);
323 link_imm_use (ptr
, *(ptr
->use
));
327 /* This routine makes sure that PTR is in an immediate use list, and makes
328 sure the stmt pointer is set to the current stmt. Virtual uses do not need
329 the overhead of correct_use_link since they cannot be directly manipulated
330 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
332 set_virtual_use_link (use_operand_p ptr
, tree stmt
)
334 /* Fold_stmt () may have changed the stmt pointers. */
335 if (ptr
->stmt
!= stmt
)
338 /* If this use isn't in a list, add it to the correct list. */
340 link_imm_use (ptr
, *(ptr
->use
));
345 #define FINALIZE_OPBUILD build_defs
346 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
348 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
350 #define FINALIZE_FUNC finalize_ssa_def_ops
351 #define FINALIZE_ALLOC alloc_def
352 #define FINALIZE_FREE free_defs
353 #define FINALIZE_TYPE struct def_optype_d
354 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
355 #define FINALIZE_OPS DEF_OPS
356 #define FINALIZE_BASE(VAR) VAR
357 #define FINALIZE_BASE_TYPE tree *
358 #define FINALIZE_BASE_ZERO NULL
359 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
360 #include "tree-ssa-opfinalize.h"
363 /* This routine will create stmt operands for STMT from the def build list. */
366 finalize_ssa_defs (tree stmt
)
368 unsigned int num
= VEC_length (tree
, build_defs
);
369 /* There should only be a single real definition per assignment. */
370 gcc_assert ((stmt
&& TREE_CODE (stmt
) != MODIFY_EXPR
) || num
<= 1);
372 /* If there is an old list, often the new list is identical, or close, so
373 find the elements at the beginning that are the same as the vector. */
375 finalize_ssa_def_ops (stmt
);
376 VEC_truncate (tree
, build_defs
, 0);
379 #define FINALIZE_OPBUILD build_uses
380 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
382 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
384 #define FINALIZE_FUNC finalize_ssa_use_ops
385 #define FINALIZE_ALLOC alloc_use
386 #define FINALIZE_FREE free_uses
387 #define FINALIZE_TYPE struct use_optype_d
388 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
389 #define FINALIZE_OPS USE_OPS
390 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
391 #define FINALIZE_CORRECT_USE correct_use_link
392 #define FINALIZE_BASE(VAR) VAR
393 #define FINALIZE_BASE_TYPE tree *
394 #define FINALIZE_BASE_ZERO NULL
395 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
396 (PTR)->use_ptr.use = (VAL); \
397 link_imm_use_stmt (&((PTR)->use_ptr), \
399 #include "tree-ssa-opfinalize.h"
401 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
404 finalize_ssa_uses (tree stmt
)
406 #ifdef ENABLE_CHECKING
409 unsigned num
= VEC_length (tree
, build_uses
);
411 /* If the pointer to the operand is the statement itself, something is
412 wrong. It means that we are pointing to a local variable (the
413 initial call to get_stmt_operands does not pass a pointer to a
415 for (x
= 0; x
< num
; x
++)
416 gcc_assert (*((tree
*)VEC_index (tree
, build_uses
, x
)) != stmt
);
419 finalize_ssa_use_ops (stmt
);
420 VEC_truncate (tree
, build_uses
, 0);
424 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
425 #define FINALIZE_OPBUILD build_v_may_defs
426 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
427 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
428 build_v_may_defs, (I)))
429 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
430 #define FINALIZE_ALLOC alloc_maydef
431 #define FINALIZE_FREE free_maydefs
432 #define FINALIZE_TYPE struct maydef_optype_d
433 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
434 #define FINALIZE_OPS MAYDEF_OPS
435 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
436 #define FINALIZE_CORRECT_USE set_virtual_use_link
437 #define FINALIZE_BASE_ZERO 0
438 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
439 #define FINALIZE_BASE_TYPE unsigned
440 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
441 (PTR)->def_var = (VAL); \
442 (PTR)->use_var = (VAL); \
443 (PTR)->use_ptr.use = &((PTR)->use_var); \
444 link_imm_use_stmt (&((PTR)->use_ptr), \
446 #include "tree-ssa-opfinalize.h"
450 finalize_ssa_v_may_defs (tree stmt
)
452 finalize_ssa_v_may_def_ops (stmt
);
456 /* Clear the in_list bits and empty the build array for v_may_defs. */
459 cleanup_v_may_defs (void)
462 num
= VEC_length (tree
, build_v_may_defs
);
464 for (x
= 0; x
< num
; x
++)
466 tree t
= VEC_index (tree
, build_v_may_defs
, x
);
467 if (TREE_CODE (t
) != SSA_NAME
)
469 var_ann_t ann
= var_ann (t
);
470 ann
->in_v_may_def_list
= 0;
473 VEC_truncate (tree
, build_v_may_defs
, 0);
477 #define FINALIZE_OPBUILD build_vuses
478 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
479 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
481 #define FINALIZE_FUNC finalize_ssa_vuse_ops
482 #define FINALIZE_ALLOC alloc_vuse
483 #define FINALIZE_FREE free_vuses
484 #define FINALIZE_TYPE struct vuse_optype_d
485 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
486 #define FINALIZE_OPS VUSE_OPS
487 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
488 #define FINALIZE_CORRECT_USE set_virtual_use_link
489 #define FINALIZE_BASE_ZERO 0
490 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
491 #define FINALIZE_BASE_TYPE unsigned
492 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
493 (PTR)->use_var = (VAL); \
494 (PTR)->use_ptr.use = &((PTR)->use_var); \
495 link_imm_use_stmt (&((PTR)->use_ptr), \
497 #include "tree-ssa-opfinalize.h"
500 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
503 finalize_ssa_vuses (tree stmt
)
505 unsigned num
, num_v_may_defs
;
508 /* Remove superfluous VUSE operands. If the statement already has a
509 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
510 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
511 suppose that variable 'a' is aliased:
514 # a_3 = V_MAY_DEF <a_2>
517 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
520 num
= VEC_length (tree
, build_vuses
);
521 num_v_may_defs
= VEC_length (tree
, build_v_may_defs
);
523 if (num
> 0 && num_v_may_defs
> 0)
525 for (vuse_index
= 0; vuse_index
< VEC_length (tree
, build_vuses
); )
528 vuse
= VEC_index (tree
, build_vuses
, vuse_index
);
529 if (TREE_CODE (vuse
) != SSA_NAME
)
531 var_ann_t ann
= var_ann (vuse
);
532 ann
->in_vuse_list
= 0;
533 if (ann
->in_v_may_def_list
)
535 VEC_ordered_remove (tree
, build_vuses
, vuse_index
);
543 /* Clear out the in_list bits. */
545 vuse_index
< VEC_length (tree
, build_vuses
);
548 tree t
= VEC_index (tree
, build_vuses
, vuse_index
);
549 if (TREE_CODE (t
) != SSA_NAME
)
551 var_ann_t ann
= var_ann (t
);
552 ann
->in_vuse_list
= 0;
556 finalize_ssa_vuse_ops (stmt
);
557 /* The v_may_def build vector wasn't cleaned up because we needed it. */
558 cleanup_v_may_defs ();
560 /* Free the vuses build vector. */
561 VEC_truncate (tree
, build_vuses
, 0);
565 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
567 #define FINALIZE_OPBUILD build_v_must_defs
568 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
569 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
570 build_v_must_defs, (I)))
571 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
572 #define FINALIZE_ALLOC alloc_mustdef
573 #define FINALIZE_FREE free_mustdefs
574 #define FINALIZE_TYPE struct mustdef_optype_d
575 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
576 #define FINALIZE_OPS MUSTDEF_OPS
577 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
578 #define FINALIZE_CORRECT_USE set_virtual_use_link
579 #define FINALIZE_BASE_ZERO 0
580 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
581 #define FINALIZE_BASE_TYPE unsigned
582 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
583 (PTR)->def_var = (VAL); \
584 (PTR)->kill_var = (VAL); \
585 (PTR)->use_ptr.use = &((PTR)->kill_var);\
586 link_imm_use_stmt (&((PTR)->use_ptr), \
588 #include "tree-ssa-opfinalize.h"
592 finalize_ssa_v_must_defs (tree stmt
)
594 /* In the presence of subvars, there may be more than one V_MUST_DEF per
595 statement (one for each subvar). It is a bit expensive to verify that
596 all must-defs in a statement belong to subvars if there is more than one
597 MUST-def, so we don't do it. Suffice to say, if you reach here without
598 having subvars, and have num >1, you have hit a bug. */
600 finalize_ssa_v_must_def_ops (stmt
);
601 VEC_truncate (tree
, build_v_must_defs
, 0);
605 /* Finalize all the build vectors, fill the new ones into INFO. */
608 finalize_ssa_stmt_operands (tree stmt
)
610 finalize_ssa_defs (stmt
);
611 finalize_ssa_uses (stmt
);
612 finalize_ssa_v_must_defs (stmt
);
613 finalize_ssa_v_may_defs (stmt
);
614 finalize_ssa_vuses (stmt
);
618 /* Start the process of building up operands vectors in INFO. */
621 start_ssa_stmt_operands (void)
623 gcc_assert (VEC_length (tree
, build_defs
) == 0);
624 gcc_assert (VEC_length (tree
, build_uses
) == 0);
625 gcc_assert (VEC_length (tree
, build_vuses
) == 0);
626 gcc_assert (VEC_length (tree
, build_v_may_defs
) == 0);
627 gcc_assert (VEC_length (tree
, build_v_must_defs
) == 0);
631 /* Add DEF_P to the list of pointers to operands. */
634 append_def (tree
*def_p
)
636 VEC_safe_push (tree
, heap
, build_defs
, (tree
)def_p
);
640 /* Add USE_P to the list of pointers to operands. */
643 append_use (tree
*use_p
)
645 VEC_safe_push (tree
, heap
, build_uses
, (tree
)use_p
);
649 /* Add a new virtual may def for variable VAR to the build array. */
652 append_v_may_def (tree var
)
654 if (TREE_CODE (var
) != SSA_NAME
)
656 var_ann_t ann
= get_var_ann (var
);
658 /* Don't allow duplicate entries. */
659 if (ann
->in_v_may_def_list
)
661 ann
->in_v_may_def_list
= 1;
664 VEC_safe_push (tree
, heap
, build_v_may_defs
, (tree
)var
);
668 /* Add VAR to the list of virtual uses. */
671 append_vuse (tree var
)
674 /* Don't allow duplicate entries. */
675 if (TREE_CODE (var
) != SSA_NAME
)
677 var_ann_t ann
= get_var_ann (var
);
679 if (ann
->in_vuse_list
|| ann
->in_v_may_def_list
)
681 ann
->in_vuse_list
= 1;
684 VEC_safe_push (tree
, heap
, build_vuses
, (tree
)var
);
688 /* Add VAR to the list of virtual must definitions for INFO. */
691 append_v_must_def (tree var
)
695 /* Don't allow duplicate entries. */
696 for (i
= 0; i
< VEC_length (tree
, build_v_must_defs
); i
++)
697 if (var
== VEC_index (tree
, build_v_must_defs
, i
))
700 VEC_safe_push (tree
, heap
, build_v_must_defs
, (tree
)var
);
704 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
705 cache for STMT, if it existed before. When finished, the various build_*
706 operand vectors will have potential operands. in them. */
709 parse_ssa_operands (tree stmt
)
713 code
= TREE_CODE (stmt
);
717 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
718 either only part of LHS is modified or if the RHS might throw,
719 otherwise, use V_MUST_DEF.
721 ??? If it might throw, we should represent somehow that it is killed
722 on the fallthrough path. */
724 tree lhs
= TREE_OPERAND (stmt
, 0);
725 int lhs_flags
= opf_is_def
;
727 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 1), opf_none
);
729 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
730 or not the entire LHS is modified; that depends on what's
731 inside the VIEW_CONVERT_EXPR. */
732 if (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
733 lhs
= TREE_OPERAND (lhs
, 0);
735 if (TREE_CODE (lhs
) != ARRAY_REF
736 && TREE_CODE (lhs
) != ARRAY_RANGE_REF
737 && TREE_CODE (lhs
) != BIT_FIELD_REF
)
738 lhs_flags
|= opf_kill_def
;
740 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), lhs_flags
);
745 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_none
);
749 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_none
);
753 get_asm_expr_operands (stmt
);
757 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_none
);
761 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_none
);
765 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_none
);
768 /* These nodes contain no variable references. */
770 case CASE_LABEL_EXPR
:
772 case TRY_FINALLY_EXPR
:
779 /* Notice that if get_expr_operands tries to use &STMT as the operand
780 pointer (which may only happen for USE operands), we will fail in
781 append_use. This default will handle statements like empty
782 statements, or CALL_EXPRs that may appear on the RHS of a statement
783 or as statements themselves. */
784 get_expr_operands (stmt
, &stmt
, opf_none
);
789 /* Create an operands cache for STMT. */
792 build_ssa_operands (tree stmt
)
794 stmt_ann_t ann
= get_stmt_ann (stmt
);
796 /* Initially assume that the statement has no volatile operands. */
798 ann
->has_volatile_ops
= false;
800 start_ssa_stmt_operands ();
802 parse_ssa_operands (stmt
);
803 operand_build_sort_virtual (build_vuses
);
804 operand_build_sort_virtual (build_v_may_defs
);
805 operand_build_sort_virtual (build_v_must_defs
);
807 finalize_ssa_stmt_operands (stmt
);
811 /* Free any operands vectors in OPS. */
813 free_ssa_operands (stmt_operands_p ops
)
817 ops
->maydef_ops
= NULL
;
818 ops
->mustdef_ops
= NULL
;
819 ops
->vuse_ops
= NULL
;
823 /* Get the operands of statement STMT. Note that repeated calls to
824 get_stmt_operands for the same statement will do nothing until the
825 statement is marked modified by a call to mark_stmt_modified(). */
828 update_stmt_operands (tree stmt
)
830 stmt_ann_t ann
= get_stmt_ann (stmt
);
831 /* If get_stmt_operands is called before SSA is initialized, dont
833 if (!ssa_operands_active ())
835 /* The optimizers cannot handle statements that are nothing but a
836 _DECL. This indicates a bug in the gimplifier. */
837 gcc_assert (!SSA_VAR_P (stmt
));
839 gcc_assert (ann
->modified
);
841 timevar_push (TV_TREE_OPS
);
843 build_ssa_operands (stmt
);
845 /* Clear the modified bit for STMT. Subsequent calls to
846 get_stmt_operands for this statement will do nothing until the
847 statement is marked modified by a call to mark_stmt_modified(). */
850 timevar_pop (TV_TREE_OPS
);
854 /* Copies virtual operands from SRC to DST. */
857 copy_virtual_operands (tree dest
, tree src
)
860 ssa_op_iter iter
, old_iter
;
861 use_operand_p use_p
, u2
;
862 def_operand_p def_p
, d2
;
864 build_ssa_operands (dest
);
866 /* Copy all the virtual fields. */
867 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VUSE
)
869 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMAYDEF
)
870 append_v_may_def (t
);
871 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMUSTDEF
)
872 append_v_must_def (t
);
874 if (VEC_length (tree
, build_vuses
) == 0
875 && VEC_length (tree
, build_v_may_defs
) == 0
876 && VEC_length (tree
, build_v_must_defs
) == 0)
879 /* Now commit the virtual operands to this stmt. */
880 finalize_ssa_v_must_defs (dest
);
881 finalize_ssa_v_may_defs (dest
);
882 finalize_ssa_vuses (dest
);
884 /* Finally, set the field to the same values as then originals. */
887 t
= op_iter_init_tree (&old_iter
, src
, SSA_OP_VUSE
);
888 FOR_EACH_SSA_USE_OPERAND (use_p
, dest
, iter
, SSA_OP_VUSE
)
890 gcc_assert (!op_iter_done (&old_iter
));
892 t
= op_iter_next_tree (&old_iter
);
894 gcc_assert (op_iter_done (&old_iter
));
896 op_iter_init_maydef (&old_iter
, src
, &u2
, &d2
);
897 FOR_EACH_SSA_MAYDEF_OPERAND (def_p
, use_p
, dest
, iter
)
899 gcc_assert (!op_iter_done (&old_iter
));
900 SET_USE (use_p
, USE_FROM_PTR (u2
));
901 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
902 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
904 gcc_assert (op_iter_done (&old_iter
));
906 op_iter_init_mustdef (&old_iter
, src
, &u2
, &d2
);
907 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p
, use_p
, dest
, iter
)
909 gcc_assert (!op_iter_done (&old_iter
));
910 SET_USE (use_p
, USE_FROM_PTR (u2
));
911 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
912 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
914 gcc_assert (op_iter_done (&old_iter
));
919 /* Specifically for use in DOM's expression analysis. Given a store, we
920 create an artificial stmt which looks like a load from the store, this can
921 be used to eliminate redundant loads. OLD_OPS are the operands from the
922 store stmt, and NEW_STMT is the new load which represents a load of the
926 create_ssa_artficial_load_stmt (tree new_stmt
, tree old_stmt
)
934 ann
= get_stmt_ann (new_stmt
);
936 /* process the stmt looking for operands. */
937 start_ssa_stmt_operands ();
938 parse_ssa_operands (new_stmt
);
940 for (x
= 0; x
< VEC_length (tree
, build_vuses
); x
++)
942 tree t
= VEC_index (tree
, build_vuses
, x
);
943 if (TREE_CODE (t
) != SSA_NAME
)
945 var_ann_t ann
= var_ann (t
);
946 ann
->in_vuse_list
= 0;
950 for (x
= 0; x
< VEC_length (tree
, build_v_may_defs
); x
++)
952 tree t
= VEC_index (tree
, build_v_may_defs
, x
);
953 if (TREE_CODE (t
) != SSA_NAME
)
955 var_ann_t ann
= var_ann (t
);
956 ann
->in_v_may_def_list
= 0;
959 /* Remove any virtual operands that were found. */
960 VEC_truncate (tree
, build_v_may_defs
, 0);
961 VEC_truncate (tree
, build_v_must_defs
, 0);
962 VEC_truncate (tree
, build_vuses
, 0);
964 /* For each VDEF on the original statement, we want to create a
965 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
967 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
,
968 (SSA_OP_VMAYDEF
| SSA_OP_VMUSTDEF
))
971 /* Now build the operands for this new stmt. */
972 finalize_ssa_stmt_operands (new_stmt
);
974 /* All uses in this fake stmt must not be in the immediate use lists. */
975 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
976 delink_imm_use (use_p
);
980 swap_tree_operands (tree stmt
, tree
*exp0
, tree
*exp1
)
986 /* If the operand cache is active, attempt to preserve the relative positions
987 of these two operands in their respective immediate use lists. */
988 if (ssa_operands_active () && op0
!= op1
)
990 use_optype_p use0
, use1
, ptr
;
992 /* Find the 2 operands in the cache, if they are there. */
993 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
994 if (USE_OP_PTR (ptr
)->use
== exp0
)
999 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
1000 if (USE_OP_PTR (ptr
)->use
== exp1
)
1005 /* If both uses don't have operand entries, there isn't much we can do
1006 at this point. Presumably we dont need to worry about it. */
1009 tree
*tmp
= USE_OP_PTR (use1
)->use
;
1010 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
1011 USE_OP_PTR (use0
)->use
= tmp
;
1015 /* Now swap the data. */
1020 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1021 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1022 the operands found. */
1025 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
1027 enum tree_code code
;
1028 enum tree_code_class
class;
1029 tree expr
= *expr_p
;
1030 stmt_ann_t s_ann
= stmt_ann (stmt
);
1035 code
= TREE_CODE (expr
);
1036 class = TREE_CODE_CLASS (code
);
1041 /* We could have the address of a component, array member,
1042 etc which has interesting variable references. */
1043 /* Taking the address of a variable does not represent a
1044 reference to it, but the fact that the stmt takes its address will be
1045 of interest to some passes (e.g. alias resolution). */
1046 add_stmt_operand (expr_p
, s_ann
, 0);
1048 /* If the address is invariant, there may be no interesting variable
1049 references inside. */
1050 if (is_gimple_min_invariant (expr
))
1053 /* There should be no VUSEs created, since the referenced objects are
1054 not really accessed. The only operands that we should find here
1055 are ARRAY_REF indices which will always be real operands (GIMPLE
1056 does not allow non-registers as array indices). */
1057 flags
|= opf_no_vops
;
1059 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1063 case STRUCT_FIELD_TAG
:
1064 case TYPE_MEMORY_TAG
:
1065 case NAME_MEMORY_TAG
:
1067 add_stmt_operand (expr_p
, s_ann
, flags
);
1077 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1078 Otherwise, add the variable itself.
1079 Whether it goes to USES or DEFS depends on the operand flags. */
1080 if (var_can_have_subvars (expr
)
1081 && (svars
= get_subvars_for_var (expr
)))
1084 for (sv
= svars
; sv
; sv
= sv
->next
)
1085 add_stmt_operand (&sv
->var
, s_ann
, flags
);
1089 add_stmt_operand (expr_p
, s_ann
, flags
);
1093 case MISALIGNED_INDIRECT_REF
:
1094 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1097 case ALIGN_INDIRECT_REF
:
1099 get_indirect_ref_operands (stmt
, expr
, flags
);
1102 case TARGET_MEM_REF
:
1103 get_tmr_operands (stmt
, expr
, flags
);
1107 case ARRAY_RANGE_REF
:
1108 /* Treat array references as references to the virtual variable
1109 representing the array. The virtual variable for an ARRAY_REF
1110 is the VAR_DECL for the array. */
1112 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1113 according to the value of IS_DEF. Recurse if the LHS of the
1114 ARRAY_REF node is not a regular variable. */
1115 if (SSA_VAR_P (TREE_OPERAND (expr
, 0)))
1116 add_stmt_operand (expr_p
, s_ann
, flags
);
1118 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1120 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1121 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1122 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_none
);
1130 HOST_WIDE_INT offset
, size
, maxsize
;
1132 /* This component ref becomes an access to all of the subvariables
1133 it can touch, if we can determine that, but *NOT* the real one.
1134 If we can't determine which fields we could touch, the recursion
1135 will eventually get to a variable and add *all* of its subvars, or
1136 whatever is the minimum correct subset. */
1138 ref
= get_ref_base_and_extent (expr
, &offset
, &size
, &maxsize
);
1139 if (SSA_VAR_P (ref
) && get_subvars_for_var (ref
))
1141 subvar_t svars
= get_subvars_for_var (ref
);
1143 for (sv
= svars
; sv
; sv
= sv
->next
)
1146 if (overlap_subvar (offset
, maxsize
, sv
, &exact
))
1148 int subvar_flags
= flags
;
1152 subvar_flags
&= ~opf_kill_def
;
1153 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
1157 flags
|= opf_no_vops
;
1160 /* Even if we found subvars above we need to ensure to see
1161 immediate uses for d in s.a[d]. In case of s.a having
1162 a subvar we'd miss it otherwise. */
1163 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0),
1164 flags
& ~opf_kill_def
);
1166 if (code
== COMPONENT_REF
)
1168 if (s_ann
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
1169 s_ann
->has_volatile_ops
= true;
1170 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1174 case WITH_SIZE_EXPR
:
1175 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1176 and an rvalue reference to its second argument. */
1177 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1178 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1182 get_call_expr_operands (stmt
, expr
);
1187 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1188 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1189 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1197 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1199 op
= TREE_OPERAND (expr
, 0);
1200 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
1201 op
= TREE_OPERAND (expr
, 0);
1202 if (TREE_CODE (op
) == ARRAY_REF
1203 || TREE_CODE (op
) == ARRAY_RANGE_REF
1204 || TREE_CODE (op
) == REALPART_EXPR
1205 || TREE_CODE (op
) == IMAGPART_EXPR
)
1206 subflags
= opf_is_def
;
1208 subflags
= opf_is_def
| opf_kill_def
;
1210 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), subflags
);
1216 /* General aggregate CONSTRUCTORs have been decomposed, but they
1217 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1218 constructor_elt
*ce
;
1219 unsigned HOST_WIDE_INT idx
;
1222 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
1224 get_expr_operands (stmt
, &ce
->value
, opf_none
);
1229 case TRUTH_NOT_EXPR
:
1231 case VIEW_CONVERT_EXPR
:
1233 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1236 case TRUTH_AND_EXPR
:
1238 case TRUTH_XOR_EXPR
:
1244 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1245 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1249 case REALIGN_LOAD_EXPR
:
1251 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1252 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1253 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
1262 /* Expressions that make no memory references. */
1266 if (class == tcc_unary
)
1268 if (class == tcc_binary
|| class == tcc_comparison
)
1270 if (class == tcc_constant
|| class == tcc_type
)
1274 /* If we get here, something has gone wrong. */
1275 #ifdef ENABLE_CHECKING
1276 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
1278 fputs ("\n", stderr
);
1279 internal_error ("internal error");
1285 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1288 get_asm_expr_operands (tree stmt
)
1290 stmt_ann_t s_ann
= stmt_ann (stmt
);
1291 int noutputs
= list_length (ASM_OUTPUTS (stmt
));
1292 const char **oconstraints
1293 = (const char **) alloca ((noutputs
) * sizeof (const char *));
1296 const char *constraint
;
1297 bool allows_mem
, allows_reg
, is_inout
;
1299 for (i
=0, link
= ASM_OUTPUTS (stmt
); link
; ++i
, link
= TREE_CHAIN (link
))
1301 oconstraints
[i
] = constraint
1302 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1303 parse_output_constraint (&constraint
, i
, 0, 0,
1304 &allows_mem
, &allows_reg
, &is_inout
);
1306 /* This should have been split in gimplify_asm_expr. */
1307 gcc_assert (!allows_reg
|| !is_inout
);
1309 /* Memory operands are addressable. Note that STMT needs the
1310 address of this operand. */
1311 if (!allows_reg
&& allows_mem
)
1313 tree t
= get_base_address (TREE_VALUE (link
));
1314 if (t
&& DECL_P (t
) && s_ann
)
1315 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1318 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_is_def
);
1321 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1324 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1325 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1326 oconstraints
, &allows_mem
, &allows_reg
);
1328 /* Memory operands are addressable. Note that STMT needs the
1329 address of this operand. */
1330 if (!allows_reg
&& allows_mem
)
1332 tree t
= get_base_address (TREE_VALUE (link
));
1333 if (t
&& DECL_P (t
) && s_ann
)
1334 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1337 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1341 /* Clobber memory for asm ("" : : : "memory"); */
1342 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1343 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1348 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1349 decided to group them). */
1351 add_stmt_operand (&global_var
, s_ann
, opf_is_def
);
1353 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
, bi
)
1355 tree var
= referenced_var (i
);
1356 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1359 /* Now clobber all addressables. */
1360 EXECUTE_IF_SET_IN_BITMAP (addressable_vars
, 0, i
, bi
)
1362 tree var
= referenced_var (i
);
1364 /* Subvars are explicitly represented in this list, so
1365 we don't need the original to be added to the clobber
1366 ops, but the original *will* be in this list because
1367 we keep the addressability of the original
1368 variable up-to-date so we don't screw up the rest of
1370 if (var_can_have_subvars (var
)
1371 && get_subvars_for_var (var
) != NULL
)
1374 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1381 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1382 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1385 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
)
1387 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1389 stmt_ann_t s_ann
= stmt_ann (stmt
);
1391 /* Stores into INDIRECT_REF operands are never killing definitions. */
1392 flags
&= ~opf_kill_def
;
1394 if (SSA_VAR_P (ptr
))
1396 struct ptr_info_def
*pi
= NULL
;
1398 /* If PTR has flow-sensitive points-to information, use it. */
1399 if (TREE_CODE (ptr
) == SSA_NAME
1400 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1401 && pi
->name_mem_tag
)
1403 /* PTR has its own memory tag. Use it. */
1404 add_stmt_operand (&pi
->name_mem_tag
, s_ann
, flags
);
1408 /* If PTR is not an SSA_NAME or it doesn't have a name
1409 tag, use its type memory tag. */
1412 /* If we are emitting debugging dumps, display a warning if
1413 PTR is an SSA_NAME with no flow-sensitive alias
1414 information. That means that we may need to compute
1417 && TREE_CODE (ptr
) == SSA_NAME
1421 "NOTE: no flow-sensitive alias info for ");
1422 print_generic_expr (dump_file
, ptr
, dump_flags
);
1423 fprintf (dump_file
, " in ");
1424 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1427 if (TREE_CODE (ptr
) == SSA_NAME
)
1428 ptr
= SSA_NAME_VAR (ptr
);
1429 v_ann
= var_ann (ptr
);
1430 if (v_ann
->type_mem_tag
)
1431 add_stmt_operand (&v_ann
->type_mem_tag
, s_ann
, flags
);
1435 /* If a constant is used as a pointer, we can't generate a real
1436 operand for it but we mark the statement volatile to prevent
1437 optimizations from messing things up. */
1438 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1441 s_ann
->has_volatile_ops
= true;
1445 /* Everything else *should* have been folded elsewhere, but users
1446 are smarter than we in finding ways to write invalid code. We
1447 cannot just assert here. If we were absolutely certain that we
1448 do handle all valid cases, then we could just do nothing here.
1449 That seems optimistic, so attempt to do something logical... */
1450 else if ((TREE_CODE (ptr
) == PLUS_EXPR
|| TREE_CODE (ptr
) == MINUS_EXPR
)
1451 && TREE_CODE (TREE_OPERAND (ptr
, 0)) == ADDR_EXPR
1452 && TREE_CODE (TREE_OPERAND (ptr
, 1)) == INTEGER_CST
)
1454 /* Make sure we know the object is addressable. */
1455 pptr
= &TREE_OPERAND (ptr
, 0);
1456 add_stmt_operand (pptr
, s_ann
, 0);
1458 /* Mark the object itself with a VUSE. */
1459 pptr
= &TREE_OPERAND (*pptr
, 0);
1460 get_expr_operands (stmt
, pptr
, flags
);
1464 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1468 /* Add a USE operand for the base pointer. */
1469 get_expr_operands (stmt
, pptr
, opf_none
);
1472 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1475 get_tmr_operands (tree stmt
, tree expr
, int flags
)
1477 tree tag
= TMR_TAG (expr
), ref
;
1478 HOST_WIDE_INT offset
, size
, maxsize
;
1480 stmt_ann_t s_ann
= stmt_ann (stmt
);
1482 /* First record the real operands. */
1483 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_none
);
1484 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_none
);
1486 /* MEM_REFs should never be killing. */
1487 flags
&= ~opf_kill_def
;
1489 if (TMR_SYMBOL (expr
))
1491 stmt_ann_t ann
= stmt_ann (stmt
);
1492 add_to_addressable_set (TMR_SYMBOL (expr
), &ann
->addresses_taken
);
1497 /* Something weird, so ensure that we will be careful. */
1498 stmt_ann (stmt
)->has_volatile_ops
= true;
1504 get_expr_operands (stmt
, &tag
, flags
);
1508 ref
= get_ref_base_and_extent (tag
, &offset
, &size
, &maxsize
);
1509 gcc_assert (ref
!= NULL_TREE
);
1510 svars
= get_subvars_for_var (ref
);
1511 for (sv
= svars
; sv
; sv
= sv
->next
)
1514 if (overlap_subvar (offset
, maxsize
, sv
, &exact
))
1516 int subvar_flags
= flags
;
1517 if (!exact
|| size
!= maxsize
)
1518 subvar_flags
&= ~opf_kill_def
;
1519 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
1524 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1527 get_call_expr_operands (tree stmt
, tree expr
)
1530 int call_flags
= call_expr_flags (expr
);
1532 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1533 operands for all the symbols that have been found to be
1536 Note that if aliases have not been computed, the global effects
1537 of calls will not be included in the SSA web. This is fine
1538 because no optimizer should run before aliases have been
1539 computed. By not bothering with virtual operands for CALL_EXPRs
1540 we avoid adding superfluous virtual operands, which can be a
1541 significant compile time sink (See PR 15855). */
1542 if (aliases_computed_p
1543 && !bitmap_empty_p (call_clobbered_vars
)
1544 && !(call_flags
& ECF_NOVOPS
))
1546 /* A 'pure' or a 'const' function never call-clobbers anything.
1547 A 'noreturn' function might, but since we don't return anyway
1548 there is no point in recording that. */
1549 if (TREE_SIDE_EFFECTS (expr
)
1550 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1551 add_call_clobber_ops (stmt
, get_callee_fndecl (expr
));
1552 else if (!(call_flags
& ECF_CONST
))
1553 add_call_read_ops (stmt
);
1556 /* Find uses in the called function. */
1557 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1559 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1560 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_none
);
1562 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1567 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1568 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1569 the statement's real operands, otherwise it is added to virtual
1573 add_stmt_operand (tree
*var_p
, stmt_ann_t s_ann
, int flags
)
1582 /* If the operand is an ADDR_EXPR, add its operand to the list of
1583 variables that have had their address taken in this statement. */
1584 if (TREE_CODE (var
) == ADDR_EXPR
&& s_ann
)
1586 add_to_addressable_set (TREE_OPERAND (var
, 0), &s_ann
->addresses_taken
);
1590 /* If the original variable is not a scalar, it will be added to the list
1591 of virtual operands. In that case, use its base symbol as the virtual
1592 variable representing it. */
1593 is_real_op
= is_gimple_reg (var
);
1594 if (!is_real_op
&& !DECL_P (var
))
1595 var
= get_virtual_var (var
);
1597 /* If VAR is not a variable that we care to optimize, do nothing. */
1598 if (var
== NULL_TREE
|| !SSA_VAR_P (var
))
1601 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1602 v_ann
= var_ann (sym
);
1604 /* Mark statements with volatile operands. Optimizers should back
1605 off from statements having volatile operands. */
1606 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1607 s_ann
->has_volatile_ops
= true;
1609 /* If the variable cannot be modified and this is a V_MAY_DEF change
1610 it into a VUSE. This happens when read-only variables are marked
1611 call-clobbered and/or aliased to writable variables. So we only
1612 check that this only happens on non-specific stores.
1614 Note that if this is a specific store, i.e. associated with a
1615 modify_expr, then we can't suppress the V_DEF, lest we run into
1616 validation problems.
1618 This can happen when programs cast away const, leaving us with a
1619 store to read-only memory. If the statement is actually executed
1620 at runtime, then the program is ill formed. If the statement is
1621 not executed then all is well. At the very least, we cannot ICE. */
1622 if ((flags
& opf_non_specific
) && unmodifiable_var_p (var
))
1624 gcc_assert (!is_real_op
);
1625 flags
&= ~(opf_is_def
| opf_kill_def
);
1630 /* The variable is a GIMPLE register. Add it to real operands. */
1631 if (flags
& opf_is_def
)
1638 VEC(tree
,gc
) *aliases
;
1640 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1641 virtual operands, unless the caller has specifically requested
1642 not to add virtual operands (used when adding operands inside an
1643 ADDR_EXPR expression). */
1644 if (flags
& opf_no_vops
)
1647 aliases
= v_ann
->may_aliases
;
1649 if (aliases
== NULL
)
1651 /* The variable is not aliased or it is an alias tag. */
1652 if (flags
& opf_is_def
)
1654 if (flags
& opf_kill_def
)
1656 /* Only regular variables or struct fields may get a
1657 V_MUST_DEF operand. */
1658 gcc_assert (!MTAG_P (var
)
1659 || TREE_CODE (var
) == STRUCT_FIELD_TAG
);
1660 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1661 variable definitions. */
1662 append_v_must_def (var
);
1666 /* Add a V_MAY_DEF for call-clobbered variables and
1668 append_v_may_def (var
);
1679 /* The variable is aliased. Add its aliases to the virtual
1681 gcc_assert (VEC_length (tree
, aliases
) != 0);
1683 if (flags
& opf_is_def
)
1685 /* If the variable is also an alias tag, add a virtual
1686 operand for it, otherwise we will miss representing
1687 references to the members of the variable's alias set.
1688 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1689 if (v_ann
->is_alias_tag
)
1690 append_v_may_def (var
);
1692 for (i
= 0; VEC_iterate (tree
, aliases
, i
, al
); i
++)
1693 append_v_may_def (al
);
1697 /* Similarly, append a virtual uses for VAR itself, when
1698 it is an alias tag. */
1699 if (v_ann
->is_alias_tag
)
1702 for (i
= 0; VEC_iterate (tree
, aliases
, i
, al
); i
++)
1710 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1711 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1712 a single variable whose address has been taken or any other valid
1713 GIMPLE memory reference (structure reference, array, etc). If the
1714 base address of REF is a decl that has sub-variables, also add all
1715 of its sub-variables. */
1718 add_to_addressable_set (tree ref
, bitmap
*addresses_taken
)
1723 gcc_assert (addresses_taken
);
1725 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1726 as the only thing we take the address of. If VAR is a structure,
1727 taking the address of a field means that the whole structure may
1728 be referenced using pointer arithmetic. See PR 21407 and the
1729 ensuing mailing list discussion. */
1730 var
= get_base_address (ref
);
1731 if (var
&& SSA_VAR_P (var
))
1733 if (*addresses_taken
== NULL
)
1734 *addresses_taken
= BITMAP_GGC_ALLOC ();
1736 if (var_can_have_subvars (var
)
1737 && (svars
= get_subvars_for_var (var
)))
1740 for (sv
= svars
; sv
; sv
= sv
->next
)
1742 bitmap_set_bit (*addresses_taken
, DECL_UID (sv
->var
));
1743 TREE_ADDRESSABLE (sv
->var
) = 1;
1748 bitmap_set_bit (*addresses_taken
, DECL_UID (var
));
1749 TREE_ADDRESSABLE (var
) = 1;
1755 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1756 clobbered variables in the function. */
1759 add_call_clobber_ops (tree stmt
, tree callee
)
1764 stmt_ann_t s_ann
= stmt_ann (stmt
);
1765 struct stmt_ann_d empty_ann
;
1766 bitmap not_read_b
, not_written_b
;
1768 /* Functions that are not const, pure or never return may clobber
1769 call-clobbered variables. */
1771 s_ann
->makes_clobbering_call
= true;
1773 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1774 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1777 add_stmt_operand (&global_var
, s_ann
, opf_is_def
);
1781 /* FIXME - if we have better information from the static vars
1782 analysis, we need to make the cache call site specific. This way
1783 we can have the performance benefits even if we are doing good
1786 /* Get info for local and module level statics. There is a bit
1787 set for each static if the call being processed does not read
1788 or write that variable. */
1790 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1791 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1793 /* If cache is valid, copy the elements into the build vectors. */
1794 if (ssa_call_clobbered_cache_valid
1795 && (!not_read_b
|| bitmap_empty_p (not_read_b
))
1796 && (!not_written_b
|| bitmap_empty_p (not_written_b
)))
1798 for (u
= 0 ; u
< VEC_length (tree
, clobbered_vuses
); u
++)
1800 t
= VEC_index (tree
, clobbered_vuses
, u
);
1801 gcc_assert (TREE_CODE (t
) != SSA_NAME
);
1802 var_ann (t
)->in_vuse_list
= 1;
1803 VEC_safe_push (tree
, heap
, build_vuses
, (tree
)t
);
1805 for (u
= 0; u
< VEC_length (tree
, clobbered_v_may_defs
); u
++)
1807 t
= VEC_index (tree
, clobbered_v_may_defs
, u
);
1808 gcc_assert (TREE_CODE (t
) != SSA_NAME
);
1809 var_ann (t
)->in_v_may_def_list
= 1;
1810 VEC_safe_push (tree
, heap
, build_v_may_defs
, (tree
)t
);
1815 memset (&empty_ann
, 0, sizeof (struct stmt_ann_d
));
1817 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1818 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, u
, bi
)
1820 tree var
= referenced_var (u
);
1821 if (unmodifiable_var_p (var
))
1822 add_stmt_operand (&var
, &empty_ann
, opf_none
);
1826 = not_read_b
? bitmap_bit_p (not_read_b
, u
) : false;
1828 = not_written_b
? bitmap_bit_p (not_written_b
, u
) : false;
1833 add_stmt_operand (&var
, &empty_ann
, opf_none
);
1836 add_stmt_operand (&var
, &empty_ann
, opf_is_def
);
1840 if ((!not_read_b
|| bitmap_empty_p (not_read_b
))
1841 && (!not_written_b
|| bitmap_empty_p (not_written_b
)))
1843 /* Prepare empty cache vectors. */
1844 VEC_truncate (tree
, clobbered_vuses
, 0);
1845 VEC_truncate (tree
, clobbered_v_may_defs
, 0);
1847 /* Now fill the clobbered cache with the values that have been found. */
1848 for (u
= 0; u
< VEC_length (tree
, build_vuses
); u
++)
1849 VEC_safe_push (tree
, heap
, clobbered_vuses
,
1850 VEC_index (tree
, build_vuses
, u
));
1852 gcc_assert (VEC_length (tree
, build_vuses
)
1853 == VEC_length (tree
, clobbered_vuses
));
1855 for (u
= 0; u
< VEC_length (tree
, build_v_may_defs
); u
++)
1856 VEC_safe_push (tree
, heap
, clobbered_v_may_defs
,
1857 VEC_index (tree
, build_v_may_defs
, u
));
1859 gcc_assert (VEC_length (tree
, build_v_may_defs
)
1860 == VEC_length (tree
, clobbered_v_may_defs
));
1862 ssa_call_clobbered_cache_valid
= true;
1867 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1871 add_call_read_ops (tree stmt
)
1876 stmt_ann_t s_ann
= stmt_ann (stmt
);
1877 struct stmt_ann_d empty_ann
;
1879 /* if the function is not pure, it may reference memory. Add
1880 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1881 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1884 add_stmt_operand (&global_var
, s_ann
, opf_none
);
1888 /* If cache is valid, copy the elements into the build vector. */
1889 if (ssa_ro_call_cache_valid
)
1891 for (u
= 0; u
< VEC_length (tree
, ro_call_vuses
); u
++)
1893 t
= VEC_index (tree
, ro_call_vuses
, u
);
1894 gcc_assert (TREE_CODE (t
) != SSA_NAME
);
1895 var_ann (t
)->in_vuse_list
= 1;
1896 VEC_safe_push (tree
, heap
, build_vuses
, (tree
)t
);
1901 memset (&empty_ann
, 0, sizeof (struct stmt_ann_d
));
1903 /* Add a VUSE for each call-clobbered variable. */
1904 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, u
, bi
)
1906 tree var
= referenced_var (u
);
1907 add_stmt_operand (&var
, &empty_ann
, opf_none
| opf_non_specific
);
1910 /* Prepare empty cache vectors. */
1911 VEC_truncate (tree
, ro_call_vuses
, 0);
1913 /* Now fill the clobbered cache with the values that have been found. */
1914 for (u
= 0; u
< VEC_length (tree
, build_vuses
); u
++)
1915 VEC_safe_push (tree
, heap
, ro_call_vuses
,
1916 VEC_index (tree
, build_vuses
, u
));
1918 gcc_assert (VEC_length (tree
, build_vuses
)
1919 == VEC_length (tree
, ro_call_vuses
));
1921 ssa_ro_call_cache_valid
= true;
1925 /* Scan the immediate_use list for VAR making sure its linked properly.
1926 return RTUE iof there is a problem. */
1929 verify_imm_links (FILE *f
, tree var
)
1931 use_operand_p ptr
, prev
, list
;
1934 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
1936 list
= &(SSA_NAME_IMM_USE_NODE (var
));
1937 gcc_assert (list
->use
== NULL
);
1939 if (list
->prev
== NULL
)
1941 gcc_assert (list
->next
== NULL
);
1947 for (ptr
= list
->next
; ptr
!= list
; )
1949 if (prev
!= ptr
->prev
)
1952 if (ptr
->use
== NULL
)
1953 goto error
; /* 2 roots, or SAFE guard node. */
1954 else if (*(ptr
->use
) != var
)
1959 /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
1960 if (count
++ > 50000000)
1964 /* Verify list in the other direction. */
1966 for (ptr
= list
->prev
; ptr
!= list
; )
1968 if (prev
!= ptr
->next
)
1982 if (ptr
->stmt
&& stmt_modified_p (ptr
->stmt
))
1984 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->stmt
);
1985 print_generic_stmt (f
, ptr
->stmt
, TDF_SLIM
);
1987 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
1989 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
1995 /* Dump all the immediate uses to FILE. */
1998 dump_immediate_uses_for (FILE *file
, tree var
)
2000 imm_use_iterator iter
;
2001 use_operand_p use_p
;
2003 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2005 print_generic_expr (file
, var
, TDF_SLIM
);
2006 fprintf (file
, " : -->");
2007 if (has_zero_uses (var
))
2008 fprintf (file
, " no uses.\n");
2010 if (has_single_use (var
))
2011 fprintf (file
, " single use.\n");
2013 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2015 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2017 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2018 print_generic_stmt (file
, USE_STMT (use_p
), TDF_VOPS
);
2020 print_generic_stmt (file
, USE_STMT (use_p
), TDF_SLIM
);
2022 fprintf(file
, "\n");
2025 /* Dump all the immediate uses to FILE. */
2028 dump_immediate_uses (FILE *file
)
2033 fprintf (file
, "Immediate_uses: \n\n");
2034 for (x
= 1; x
< num_ssa_names
; x
++)
2039 dump_immediate_uses_for (file
, var
);
2044 /* Dump def-use edges on stderr. */
2047 debug_immediate_uses (void)
2049 dump_immediate_uses (stderr
);
2052 /* Dump def-use edges on stderr. */
2055 debug_immediate_uses_for (tree var
)
2057 dump_immediate_uses_for (stderr
, var
);
2059 #include "gt-tree-ssa-operands.h"