1 /* SSA operands management for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 /* Flags to describe operand properties in get_stmt_operands and helpers. */
37 /* By default, operands are loaded. */
40 /* Operand is the target of an assignment expression or a
41 call-clobbered variable */
42 #define opf_is_def (1 << 0)
44 /* Operand is the target of an assignment expression. */
45 #define opf_kill_def (1 << 2)
47 /* No virtual operands should be created in the expression. This is used
48 when traversing ADDR_EXPR nodes which have different semantics than
49 other expressions. Inside an ADDR_EXPR node, the only operands that we
50 need to consider are indices into arrays. For instance, &a.b[i] should
51 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
53 #define opf_no_vops (1 << 1)
55 /* Array for building all the def operands. */
56 static GTY (()) varray_type build_defs
;
58 /* Array for building all the use operands. */
59 static GTY (()) varray_type build_uses
;
61 /* Array for building all the v_may_def operands. */
62 static GTY (()) varray_type build_v_may_defs
;
64 /* Array for building all the vuse operands. */
65 static GTY (()) varray_type build_vuses
;
67 /* Array for building all the v_must_def operands. */
68 static GTY (()) varray_type build_v_must_defs
;
70 #ifdef ENABLE_CHECKING
71 tree check_build_stmt
;
74 typedef struct voperands_d
76 v_may_def_optype v_may_def_ops
;
78 v_must_def_optype v_must_def_ops
;
81 static void note_addressable (tree
, stmt_ann_t
);
82 static void get_expr_operands (tree
, tree
*, int, voperands_t
);
83 static void get_asm_expr_operands (tree
, voperands_t
);
84 static void get_indirect_ref_operands (tree
, tree
, int, voperands_t
);
85 static void get_call_expr_operands (tree
, tree
, voperands_t
);
86 static inline void append_def (tree
*, tree
);
87 static inline void append_use (tree
*, tree
);
88 static void append_v_may_def (tree
, tree
, voperands_t
);
89 static void append_v_must_def (tree
, tree
, voperands_t
);
90 static void add_call_clobber_ops (tree
, voperands_t
);
91 static void add_call_read_ops (tree
, voperands_t
);
92 static void add_stmt_operand (tree
*, tree
, int, voperands_t
);
94 /* Return a vector of contiguous memory of a specified size. */
96 static inline def_optype
97 allocate_def_optype (unsigned num
)
101 size
= sizeof (struct def_optype_d
) + sizeof (tree
*) * (num
- 1);
102 def_ops
= ggc_alloc (size
);
103 def_ops
->num_defs
= num
;
107 static inline use_optype
108 allocate_use_optype (unsigned num
)
112 size
= sizeof (struct use_optype_d
) + sizeof (tree
*) * (num
- 1);
113 use_ops
= ggc_alloc (size
);
114 use_ops
->num_uses
= num
;
118 static inline v_may_def_optype
119 allocate_v_may_def_optype (unsigned num
)
121 v_may_def_optype v_may_def_ops
;
123 size
= sizeof (struct v_may_def_optype_d
) + sizeof (tree
) * ((num
* 2) - 1);
124 v_may_def_ops
= ggc_alloc (size
);
125 v_may_def_ops
->num_v_may_defs
= num
;
126 return v_may_def_ops
;
129 static inline vuse_optype
130 allocate_vuse_optype (unsigned num
)
132 vuse_optype vuse_ops
;
134 size
= sizeof (struct vuse_optype_d
) + sizeof (tree
) * (num
- 1);
135 vuse_ops
= ggc_alloc (size
);
136 vuse_ops
->num_vuses
= num
;
140 static inline v_must_def_optype
141 allocate_v_must_def_optype (unsigned num
)
143 v_must_def_optype v_must_def_ops
;
145 size
= sizeof (struct v_must_def_optype_d
) + sizeof (tree
*) * (num
- 1);
146 v_must_def_ops
= ggc_alloc (size
);
147 v_must_def_ops
->num_v_must_defs
= num
;
148 return v_must_def_ops
;
152 free_uses (use_optype
*uses
, bool dealloc
)
163 free_defs (def_optype
*defs
, bool dealloc
)
174 free_vuses (vuse_optype
*vuses
, bool dealloc
)
185 free_v_may_defs (v_may_def_optype
*v_may_defs
, bool dealloc
)
190 ggc_free (*v_may_defs
);
196 free_v_must_defs (v_must_def_optype
*v_must_defs
, bool dealloc
)
201 ggc_free (*v_must_defs
);
207 remove_vuses (tree stmt
)
211 ann
= stmt_ann (stmt
);
213 free_vuses (&(ann
->vuse_ops
), true);
217 remove_v_may_defs (tree stmt
)
221 ann
= stmt_ann (stmt
);
223 free_v_may_defs (&(ann
->v_may_def_ops
), true);
227 remove_v_must_defs (tree stmt
)
231 ann
= stmt_ann (stmt
);
233 free_v_must_defs (&(ann
->v_must_def_ops
), true);
237 init_ssa_operands (void)
239 VARRAY_TREE_PTR_INIT (build_defs
, 5, "build defs");
240 VARRAY_TREE_PTR_INIT (build_uses
, 10, "build uses");
241 VARRAY_TREE_INIT (build_v_may_defs
, 10, "build v_may_defs");
242 VARRAY_TREE_INIT (build_vuses
, 10, "build vuses");
243 VARRAY_TREE_INIT (build_v_must_defs
, 10, "build v_must_defs");
247 fini_ssa_operands (void)
252 finalize_ssa_defs (tree stmt
)
258 num
= VARRAY_ACTIVE_SIZE (build_defs
);
262 #ifdef ENABLE_CHECKING
263 /* There should only be a single real definition per assignment. */
264 if (TREE_CODE (stmt
) == MODIFY_EXPR
&& num
> 1)
268 def_ops
= allocate_def_optype (num
);
269 for (x
= 0; x
< num
; x
++)
270 def_ops
->defs
[x
].def
= VARRAY_TREE_PTR (build_defs
, x
);
271 VARRAY_POP_ALL (build_defs
);
273 ann
= stmt_ann (stmt
);
274 ann
->def_ops
= def_ops
;
278 finalize_ssa_uses (tree stmt
)
284 num
= VARRAY_ACTIVE_SIZE (build_uses
);
288 #ifdef ENABLE_CHECKING
291 /* If the pointer to the operand is the statement itself, something is
292 wrong. It means that we are pointing to a local variable (the
293 initial call to get_stmt_operands does not pass a pointer to a
295 for (x
= 0; x
< num
; x
++)
296 if (*(VARRAY_TREE_PTR (build_uses
, x
)) == stmt
)
301 use_ops
= allocate_use_optype (num
);
302 for (x
= 0; x
< num
; x
++)
303 use_ops
->uses
[x
].use
= VARRAY_TREE_PTR (build_uses
, x
);
304 VARRAY_POP_ALL (build_uses
);
306 ann
= stmt_ann (stmt
);
307 ann
->use_ops
= use_ops
;
311 finalize_ssa_v_may_defs (tree stmt
)
314 v_may_def_optype v_may_def_ops
;
317 num
= VARRAY_ACTIVE_SIZE (build_v_may_defs
);
321 #ifdef ENABLE_CHECKING
322 /* V_MAY_DEFs must be entered in pairs of result/uses. */
327 v_may_def_ops
= allocate_v_may_def_optype (num
/ 2);
328 for (x
= 0; x
< num
; x
++)
329 v_may_def_ops
->v_may_defs
[x
] = VARRAY_TREE (build_v_may_defs
, x
);
330 VARRAY_CLEAR (build_v_may_defs
);
332 ann
= stmt_ann (stmt
);
333 ann
->v_may_def_ops
= v_may_def_ops
;
337 finalize_ssa_vuses (tree stmt
)
341 vuse_optype vuse_ops
;
342 v_may_def_optype v_may_defs
;
344 #ifdef ENABLE_CHECKING
345 if (VARRAY_ACTIVE_SIZE (build_v_may_defs
) > 0)
347 fprintf (stderr
, "Please finalize V_MAY_DEFs before finalize VUSES.\n");
352 num
= VARRAY_ACTIVE_SIZE (build_vuses
);
356 /* Remove superfluous VUSE operands. If the statement already has a
357 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
358 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
359 suppose that variable 'a' is aliased:
362 # a_3 = V_MAY_DEF <a_2>
365 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
368 ann
= stmt_ann (stmt
);
369 v_may_defs
= V_MAY_DEF_OPS (ann
);
370 if (NUM_V_MAY_DEFS (v_may_defs
) > 0)
373 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_vuses
); i
++)
376 for (j
= 0; j
< NUM_V_MAY_DEFS (v_may_defs
); j
++)
378 tree vuse_var
, v_may_def_var
;
379 tree vuse
= VARRAY_TREE (build_vuses
, i
);
380 tree v_may_def
= V_MAY_DEF_OP (v_may_defs
, j
);
382 if (TREE_CODE (vuse
) == SSA_NAME
)
383 vuse_var
= SSA_NAME_VAR (vuse
);
387 if (TREE_CODE (v_may_def
) == SSA_NAME
)
388 v_may_def_var
= SSA_NAME_VAR (v_may_def
);
390 v_may_def_var
= v_may_def
;
392 if (vuse_var
== v_may_def_var
)
399 /* If we found a useless VUSE operand, remove it from the
400 operand array by replacing it with the last active element
401 in the operand array (unless the useless VUSE was the
402 last operand, in which case we simply remove it. */
405 if (i
!= VARRAY_ACTIVE_SIZE (build_vuses
) - 1)
407 VARRAY_TREE (build_vuses
, i
)
408 = VARRAY_TREE (build_vuses
,
409 VARRAY_ACTIVE_SIZE (build_vuses
) - 1);
411 VARRAY_POP (build_vuses
);
413 /* We want to rescan the element at this index, unless
414 this was the last element, in which case the loop
421 num
= VARRAY_ACTIVE_SIZE (build_vuses
);
422 /* We could have reduced the size to zero now, however. */
426 vuse_ops
= allocate_vuse_optype (num
);
427 for (x
= 0; x
< num
; x
++)
428 vuse_ops
->vuses
[x
] = VARRAY_TREE (build_vuses
, x
);
429 VARRAY_CLEAR (build_vuses
);
430 ann
->vuse_ops
= vuse_ops
;
434 finalize_ssa_v_must_defs (tree stmt
)
438 v_must_def_optype v_must_def_ops
;
440 num
= VARRAY_ACTIVE_SIZE (build_v_must_defs
);
444 #ifdef ENABLE_CHECKING
445 /* There should only be a single V_MUST_DEF per assignment. */
446 if (TREE_CODE (stmt
) == MODIFY_EXPR
&& num
> 1)
450 v_must_def_ops
= allocate_v_must_def_optype (num
);
451 for (x
= 0; x
< num
; x
++)
452 v_must_def_ops
->v_must_defs
[x
] = VARRAY_TREE (build_v_must_defs
, x
);
453 VARRAY_POP_ALL (build_v_must_defs
);
455 ann
= stmt_ann (stmt
);
456 ann
->v_must_def_ops
= v_must_def_ops
;
460 finalize_ssa_stmt_operands (tree stmt
)
462 #ifdef ENABLE_CHECKING
463 if (check_build_stmt
== NULL
)
467 finalize_ssa_defs (stmt
);
468 finalize_ssa_uses (stmt
);
469 finalize_ssa_v_must_defs (stmt
);
470 finalize_ssa_v_may_defs (stmt
);
471 finalize_ssa_vuses (stmt
);
473 #ifdef ENABLE_CHECKING
474 check_build_stmt
= NULL
;
480 verify_start_operands (tree stmt ATTRIBUTE_UNUSED
)
482 #ifdef ENABLE_CHECKING
483 if (VARRAY_ACTIVE_SIZE (build_defs
) > 0
484 || VARRAY_ACTIVE_SIZE (build_uses
) > 0
485 || VARRAY_ACTIVE_SIZE (build_vuses
) > 0
486 || VARRAY_ACTIVE_SIZE (build_v_may_defs
) > 0
487 || VARRAY_ACTIVE_SIZE (build_v_must_defs
) > 0)
489 if (check_build_stmt
!= NULL
)
491 check_build_stmt
= stmt
;
496 /* Add DEF_P to the list of pointers to operands defined by STMT. */
499 append_def (tree
*def_p
, tree stmt ATTRIBUTE_UNUSED
)
501 #ifdef ENABLE_CHECKING
502 if (check_build_stmt
!= stmt
)
505 VARRAY_PUSH_TREE_PTR (build_defs
, def_p
);
509 /* Add USE_P to the list of pointers to operands used by STMT. */
512 append_use (tree
*use_p
, tree stmt ATTRIBUTE_UNUSED
)
514 #ifdef ENABLE_CHECKING
515 if (check_build_stmt
!= stmt
)
518 VARRAY_PUSH_TREE_PTR (build_uses
, use_p
);
522 /* Add a new virtual def for variable VAR to statement STMT. If PREV_VOPS
523 is not NULL, the existing entries are preserved and no new entries are
524 added here. This is done to preserve the SSA numbering of virtual
528 append_v_may_def (tree var
, tree stmt
, voperands_t prev_vops
)
534 #ifdef ENABLE_CHECKING
535 if (check_build_stmt
!= stmt
)
539 ann
= stmt_ann (stmt
);
541 /* Don't allow duplicate entries. */
543 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_v_may_defs
); i
+= 2)
545 tree result
= VARRAY_TREE (build_v_may_defs
, i
);
547 || (TREE_CODE (result
) == SSA_NAME
548 && var
== SSA_NAME_VAR (result
)))
552 /* If the statement already had virtual definitions, see if any of the
553 existing V_MAY_DEFs matches VAR. If so, re-use it, otherwise add a new
554 V_MAY_DEF for VAR. */
558 for (i
= 0; i
< NUM_V_MAY_DEFS (prev_vops
->v_may_def_ops
); i
++)
560 result
= V_MAY_DEF_RESULT (prev_vops
->v_may_def_ops
, i
);
562 || (TREE_CODE (result
) == SSA_NAME
563 && SSA_NAME_VAR (result
) == var
))
565 source
= V_MAY_DEF_OP (prev_vops
->v_may_def_ops
, i
);
570 /* If no previous V_MAY_DEF operand was found for VAR, create one now. */
571 if (source
== NULL_TREE
)
577 VARRAY_PUSH_TREE (build_v_may_defs
, result
);
578 VARRAY_PUSH_TREE (build_v_may_defs
, source
);
582 /* Add VAR to the list of virtual uses for STMT. If PREV_VOPS
583 is not NULL, the existing entries are preserved and no new entries are
584 added here. This is done to preserve the SSA numbering of virtual
588 append_vuse (tree var
, tree stmt
, voperands_t prev_vops
)
595 #ifdef ENABLE_CHECKING
596 if (check_build_stmt
!= stmt
)
600 ann
= stmt_ann (stmt
);
602 /* Don't allow duplicate entries. */
603 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_vuses
); i
++)
605 tree vuse_var
= VARRAY_TREE (build_vuses
, i
);
607 || (TREE_CODE (vuse_var
) == SSA_NAME
608 && var
== SSA_NAME_VAR (vuse_var
)))
612 /* If the statement already had virtual uses, see if any of the
613 existing VUSEs matches VAR. If so, re-use it, otherwise add a new
618 for (i
= 0; i
< NUM_VUSES (prev_vops
->vuse_ops
); i
++)
620 vuse
= VUSE_OP (prev_vops
->vuse_ops
, i
);
622 || (TREE_CODE (vuse
) == SSA_NAME
623 && SSA_NAME_VAR (vuse
) == var
))
630 /* If VAR existed already in PREV_VOPS, re-use it. */
634 VARRAY_PUSH_TREE (build_vuses
, var
);
637 /* Add VAR to the list of virtual must definitions for STMT. If PREV_VOPS
638 is not NULL, the existing entries are preserved and no new entries are
639 added here. This is done to preserve the SSA numbering of virtual
643 append_v_must_def (tree var
, tree stmt
, voperands_t prev_vops
)
650 #ifdef ENABLE_CHECKING
651 if (check_build_stmt
!= stmt
)
655 ann
= stmt_ann (stmt
);
657 /* Don't allow duplicate entries. */
658 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_v_must_defs
); i
++)
660 tree v_must_def_var
= VARRAY_TREE (build_v_must_defs
, i
);
661 if (var
== v_must_def_var
662 || (TREE_CODE (v_must_def_var
) == SSA_NAME
663 && var
== SSA_NAME_VAR (v_must_def_var
)))
667 /* If the statement already had virtual must defs, see if any of the
668 existing V_MUST_DEFs matches VAR. If so, re-use it, otherwise add a new
669 V_MUST_DEF for VAR. */
671 v_must_def
= NULL_TREE
;
673 for (i
= 0; i
< NUM_V_MUST_DEFS (prev_vops
->v_must_def_ops
); i
++)
675 v_must_def
= V_MUST_DEF_OP (prev_vops
->v_must_def_ops
, i
);
676 if (v_must_def
== var
677 || (TREE_CODE (v_must_def
) == SSA_NAME
678 && SSA_NAME_VAR (v_must_def
) == var
))
685 /* If VAR existed already in PREV_VOPS, re-use it. */
689 VARRAY_PUSH_TREE (build_v_must_defs
, var
);
693 /* External entry point which by-passes the previous vops mechanism. */
695 add_vuse (tree var
, tree stmt
)
697 append_vuse (var
, stmt
, NULL
);
701 /* Get the operands of statement STMT. Note that repeated calls to
702 get_stmt_operands for the same statement will do nothing until the
703 statement is marked modified by a call to modify_stmt(). */
706 get_stmt_operands (tree stmt
)
710 struct voperands_d prev_vops
;
712 #if defined ENABLE_CHECKING
713 /* The optimizers cannot handle statements that are nothing but a
714 _DECL. This indicates a bug in the gimplifier. */
715 if (SSA_VAR_P (stmt
))
719 /* Ignore error statements. */
720 if (TREE_CODE (stmt
) == ERROR_MARK
)
723 ann
= get_stmt_ann (stmt
);
725 /* If the statement has not been modified, the operands are still valid. */
729 timevar_push (TV_TREE_OPS
);
731 /* Initially assume that the statement has no volatile operands, nor
732 makes aliased loads or stores. */
733 ann
->has_volatile_ops
= false;
734 ann
->makes_aliased_stores
= false;
735 ann
->makes_aliased_loads
= false;
737 /* Remove any existing operands as they will be scanned again. */
738 free_defs (&(ann
->def_ops
), true);
739 free_uses (&(ann
->use_ops
), true);
741 /* Before removing existing virtual operands, save them in PREV_VOPS so
742 that we can re-use their SSA versions. */
743 prev_vops
.v_may_def_ops
= V_MAY_DEF_OPS (ann
);
744 prev_vops
.vuse_ops
= VUSE_OPS (ann
);
745 prev_vops
.v_must_def_ops
= V_MUST_DEF_OPS (ann
);
747 /* Don't free the previous values to memory since we're still using them. */
748 free_v_may_defs (&(ann
->v_may_def_ops
), false);
749 free_vuses (&(ann
->vuse_ops
), false);
750 free_v_must_defs (&(ann
->v_must_def_ops
), false);
752 start_ssa_stmt_operands (stmt
);
754 code
= TREE_CODE (stmt
);
758 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 1), opf_none
, &prev_vops
);
759 if (TREE_CODE (TREE_OPERAND (stmt
, 0)) == ARRAY_REF
760 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == COMPONENT_REF
761 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == REALPART_EXPR
762 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == IMAGPART_EXPR
763 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
764 modified in that case. FIXME we should represent somehow
765 that it is killed on the fallthrough path. */
766 || tree_could_throw_p (TREE_OPERAND (stmt
, 1)))
767 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_is_def
,
770 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0),
771 opf_is_def
| opf_kill_def
, &prev_vops
);
775 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_none
, &prev_vops
);
779 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_none
, &prev_vops
);
783 get_asm_expr_operands (stmt
, &prev_vops
);
787 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_none
, &prev_vops
);
791 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_none
, &prev_vops
);
795 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_none
, &prev_vops
);
798 /* These nodes contain no variable references. */
800 case CASE_LABEL_EXPR
:
802 case TRY_FINALLY_EXPR
:
809 /* Notice that if get_expr_operands tries to use &STMT as the operand
810 pointer (which may only happen for USE operands), we will abort in
811 append_use. This default will handle statements like empty
812 statements, or CALL_EXPRs that may appear on the RHS of a statement
813 or as statements themselves. */
814 get_expr_operands (stmt
, &stmt
, opf_none
, &prev_vops
);
818 finalize_ssa_stmt_operands (stmt
);
820 /* Now free the previous virtual ops to memory. */
821 free_v_may_defs (&(prev_vops
.v_may_def_ops
), true);
822 free_vuses (&(prev_vops
.vuse_ops
), true);
823 free_v_must_defs (&(prev_vops
.v_must_def_ops
), true);
825 /* Clear the modified bit for STMT. Subsequent calls to
826 get_stmt_operands for this statement will do nothing until the
827 statement is marked modified by a call to modify_stmt(). */
830 timevar_pop (TV_TREE_OPS
);
834 /* Recursively scan the expression pointed by EXPR_P in statement STMT.
835 FLAGS is one of the OPF_* constants modifying how to interpret the
836 operands found. PREV_VOPS is as in append_v_may_def and append_vuse. */
839 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
, voperands_t prev_vops
)
845 if (expr
== NULL
|| expr
== error_mark_node
)
848 code
= TREE_CODE (expr
);
849 class = TREE_CODE_CLASS (code
);
854 /* We could have the address of a component, array member,
855 etc which has interesting variable references. */
856 /* Taking the address of a variable does not represent a
857 reference to it, but the fact that STMT takes its address will be
858 of interest to some passes (e.g. alias resolution). */
859 add_stmt_operand (expr_p
, stmt
, 0, NULL
);
861 /* If the address is constant (invariant is not sufficient), there will
862 be no interesting variable references inside. */
863 if (TREE_CONSTANT (expr
))
866 /* There should be no VUSEs created, since the referenced objects are
867 not really accessed. The only operands that we should find here
868 are ARRAY_REF indices which will always be real operands (GIMPLE
869 does not allow non-registers as array indices). */
870 flags
|= opf_no_vops
;
872 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
, prev_vops
);
879 /* If we found a variable, add it to DEFS or USES depending
880 on the operand flags. */
881 add_stmt_operand (expr_p
, stmt
, flags
, prev_vops
);
885 get_indirect_ref_operands (stmt
, expr
, flags
, prev_vops
);
889 case ARRAY_RANGE_REF
:
890 /* Treat array references as references to the virtual variable
891 representing the array. The virtual variable for an ARRAY_REF
892 is the VAR_DECL for the array. */
894 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
895 according to the value of IS_DEF. Recurse if the LHS of the
896 ARRAY_REF node is not a regular variable. */
897 if (SSA_VAR_P (TREE_OPERAND (expr
, 0)))
898 add_stmt_operand (expr_p
, stmt
, flags
, prev_vops
);
900 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
, prev_vops
);
902 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
, prev_vops
);
903 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
, prev_vops
);
904 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_none
, prev_vops
);
910 /* Similarly to arrays, references to compound variables (complex
911 types and structures/unions) are globbed.
913 FIXME: This means that
919 will not be constant propagated because the two partial
920 definitions to 'a' will kill each other. Note that SRA may be
921 able to fix this problem if 'a' can be scalarized. */
923 /* If the LHS of the compound reference is not a regular variable,
924 recurse to keep looking for more operands in the subexpression. */
925 if (SSA_VAR_P (TREE_OPERAND (expr
, 0)))
926 add_stmt_operand (expr_p
, stmt
, flags
, prev_vops
);
928 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
, prev_vops
);
930 if (code
== COMPONENT_REF
)
931 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
, prev_vops
);
935 /* WITH_SIZE_EXPR is a pass-through reference to it's first argument,
936 and an rvalue reference to its second argument. */
937 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
, prev_vops
);
938 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
, prev_vops
);
942 get_call_expr_operands (stmt
, expr
, prev_vops
);
950 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
, prev_vops
);
952 op
= TREE_OPERAND (expr
, 0);
953 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
954 op
= TREE_OPERAND (expr
, 0);
955 if (TREE_CODE (op
) == ARRAY_REF
956 || TREE_CODE (op
) == COMPONENT_REF
957 || TREE_CODE (op
) == REALPART_EXPR
958 || TREE_CODE (op
) == IMAGPART_EXPR
)
959 subflags
= opf_is_def
;
961 subflags
= opf_is_def
| opf_kill_def
;
963 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), subflags
, prev_vops
);
969 /* General aggregate CONSTRUCTORs have been decomposed, but they
970 are still in use as the COMPLEX_EXPR equivalent for vectors. */
973 for (t
= TREE_OPERAND (expr
, 0); t
; t
= TREE_CHAIN (t
))
974 get_expr_operands (stmt
, &TREE_VALUE (t
), opf_none
, prev_vops
);
982 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
, prev_vops
);
992 tree op0
= TREE_OPERAND (expr
, 0);
993 tree op1
= TREE_OPERAND (expr
, 1);
995 /* If it would be profitable to swap the operands, then do so to
996 canonicalize the statement, enabling better optimization.
998 By placing canonicalization of such expressions here we
999 transparently keep statements in canonical form, even
1000 when the statement is modified. */
1001 if (tree_swap_operands_p (op0
, op1
, false))
1003 /* For relationals we need to swap the operands
1004 and change the code. */
1010 TREE_SET_CODE (expr
, swap_tree_comparison (code
));
1011 TREE_OPERAND (expr
, 0) = op1
;
1012 TREE_OPERAND (expr
, 1) = op0
;
1015 /* For a commutative operator we can just swap the operands. */
1016 else if (commutative_tree_code (code
))
1018 TREE_OPERAND (expr
, 0) = op1
;
1019 TREE_OPERAND (expr
, 1) = op0
;
1023 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
, prev_vops
);
1024 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
, prev_vops
);
1033 /* Expressions that make no memory references. */
1039 if (class == '2' || class == '<')
1041 if (class == 'c' || class == 't')
1045 /* If we get here, something has gone wrong. */
1046 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
1048 fputs ("\n", stderr
);
1053 /* Scan operands in ASM_EXPR STMT. PREV_VOPS is as in append_v_may_def and
1057 get_asm_expr_operands (tree stmt
, voperands_t prev_vops
)
1059 int noutputs
= list_length (ASM_OUTPUTS (stmt
));
1060 const char **oconstraints
1061 = (const char **) alloca ((noutputs
) * sizeof (const char *));
1064 const char *constraint
;
1065 bool allows_mem
, allows_reg
, is_inout
;
1066 stmt_ann_t s_ann
= stmt_ann (stmt
);
1068 for (i
=0, link
= ASM_OUTPUTS (stmt
); link
; ++i
, link
= TREE_CHAIN (link
))
1070 oconstraints
[i
] = constraint
1071 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1072 parse_output_constraint (&constraint
, i
, 0, 0,
1073 &allows_mem
, &allows_reg
, &is_inout
);
1075 #if defined ENABLE_CHECKING
1076 /* This should have been split in gimplify_asm_expr. */
1077 if (allows_reg
&& is_inout
)
1081 /* Memory operands are addressable. Note that STMT needs the
1082 address of this operand. */
1083 if (!allows_reg
&& allows_mem
)
1085 tree t
= get_base_address (TREE_VALUE (link
));
1086 if (t
&& DECL_P (t
))
1087 note_addressable (t
, s_ann
);
1090 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_is_def
, prev_vops
);
1093 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1096 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1097 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1098 oconstraints
, &allows_mem
, &allows_reg
);
1100 /* Memory operands are addressable. Note that STMT needs the
1101 address of this operand. */
1102 if (!allows_reg
&& allows_mem
)
1104 tree t
= get_base_address (TREE_VALUE (link
));
1105 if (t
&& DECL_P (t
))
1106 note_addressable (t
, s_ann
);
1109 get_expr_operands (stmt
, &TREE_VALUE (link
), 0, prev_vops
);
1113 /* Clobber memory for asm ("" : : : "memory"); */
1114 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1115 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1119 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1120 decided to group them). */
1122 add_stmt_operand (&global_var
, stmt
, opf_is_def
, prev_vops
);
1124 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1126 tree var
= referenced_var (i
);
1127 add_stmt_operand (&var
, stmt
, opf_is_def
, prev_vops
);
1130 /* Now clobber all addressables. */
1131 EXECUTE_IF_SET_IN_BITMAP (addressable_vars
, 0, i
,
1133 tree var
= referenced_var (i
);
1134 add_stmt_operand (&var
, stmt
, opf_is_def
, prev_vops
);
1137 /* If we don't have call-clobbered nor addressable vars and we
1138 still have not computed aliasing information, just mark the
1139 statement as having volatile operands. If the alias pass
1140 finds some, we will add them at that point. */
1141 if (!aliases_computed_p
)
1142 stmt_ann (stmt
)->has_volatile_ops
= true;
1148 /* A subroutine of get_expr_operands to handle INDIRECT_REF. */
1151 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
,
1152 voperands_t prev_vops
)
1154 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1157 if (SSA_VAR_P (ptr
))
1159 if (!aliases_computed_p
)
1161 /* If the pointer does not have a memory tag and aliases have not
1162 been computed yet, mark the statement as having volatile
1163 operands to prevent DOM from entering it in equivalence tables
1164 and DCE from killing it. */
1165 stmt_ann (stmt
)->has_volatile_ops
= true;
1169 struct ptr_info_def
*pi
= NULL
;
1171 /* If we have computed aliasing already, check if PTR has
1172 flow-sensitive points-to information. */
1173 if (TREE_CODE (ptr
) == SSA_NAME
1174 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1175 && pi
->name_mem_tag
)
1177 /* PTR has its own memory tag. Use it. */
1178 add_stmt_operand (&pi
->name_mem_tag
, stmt
, flags
, prev_vops
);
1182 /* If PTR is not an SSA_NAME or it doesn't have a name
1183 tag, use its type memory tag. */
1186 /* If we are emitting debugging dumps, display a warning if
1187 PTR is an SSA_NAME with no flow-sensitive alias
1188 information. That means that we may need to compute
1191 && TREE_CODE (ptr
) == SSA_NAME
1195 "NOTE: no flow-sensitive alias info for ");
1196 print_generic_expr (dump_file
, ptr
, dump_flags
);
1197 fprintf (dump_file
, " in ");
1198 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1201 if (TREE_CODE (ptr
) == SSA_NAME
)
1202 ptr
= SSA_NAME_VAR (ptr
);
1203 ann
= var_ann (ptr
);
1204 add_stmt_operand (&ann
->type_mem_tag
, stmt
, flags
, prev_vops
);
1209 /* If a constant is used as a pointer, we can't generate a real
1210 operand for it but we mark the statement volatile to prevent
1211 optimizations from messing things up. */
1212 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1214 stmt_ann (stmt
)->has_volatile_ops
= true;
1218 /* Everything else *should* have been folded elsewhere, but users
1219 are smarter than we in finding ways to write invalid code. We
1220 cannot just abort here. If we were absolutely certain that we
1221 do handle all valid cases, then we could just do nothing here.
1222 That seems optimistic, so attempt to do something logical... */
1223 else if ((TREE_CODE (ptr
) == PLUS_EXPR
|| TREE_CODE (ptr
) == MINUS_EXPR
)
1224 && TREE_CODE (TREE_OPERAND (ptr
, 0)) == ADDR_EXPR
1225 && TREE_CODE (TREE_OPERAND (ptr
, 1)) == INTEGER_CST
)
1227 /* Make sure we know the object is addressable. */
1228 pptr
= &TREE_OPERAND (ptr
, 0);
1229 add_stmt_operand (pptr
, stmt
, 0, NULL
);
1231 /* Mark the object itself with a VUSE. */
1232 pptr
= &TREE_OPERAND (*pptr
, 0);
1233 get_expr_operands (stmt
, pptr
, flags
, prev_vops
);
1237 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1241 /* Add a USE operand for the base pointer. */
1242 get_expr_operands (stmt
, pptr
, opf_none
, prev_vops
);
1245 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1248 get_call_expr_operands (tree stmt
, tree expr
, voperands_t prev_vops
)
1251 int call_flags
= call_expr_flags (expr
);
1253 /* Find uses in the called function. */
1254 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
, prev_vops
);
1256 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1257 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_none
, prev_vops
);
1259 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
, prev_vops
);
1261 if (bitmap_first_set_bit (call_clobbered_vars
) >= 0)
1263 /* A 'pure' or a 'const' functions never call clobber anything.
1264 A 'noreturn' function might, but since we don't return anyway
1265 there is no point in recording that. */
1267 & (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1268 add_call_clobber_ops (stmt
, prev_vops
);
1269 else if (!(call_flags
& (ECF_CONST
| ECF_NORETURN
)))
1270 add_call_read_ops (stmt
, prev_vops
);
1272 else if (!aliases_computed_p
)
1273 stmt_ann (stmt
)->has_volatile_ops
= true;
1277 /* Add *VAR_P to the appropriate operand array of STMT. FLAGS is as in
1278 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1279 the statement's real operands, otherwise it is added to virtual
1282 PREV_VOPS is used when adding virtual operands to statements that
1283 already had them (See append_v_may_def and append_vuse). */
1286 add_stmt_operand (tree
*var_p
, tree stmt
, int flags
, voperands_t prev_vops
)
1296 s_ann
= stmt_ann (stmt
);
1298 /* If the operand is an ADDR_EXPR, add its operand to the list of
1299 variables that have had their address taken in this statement. */
1300 if (TREE_CODE (var
) == ADDR_EXPR
)
1302 note_addressable (TREE_OPERAND (var
, 0), s_ann
);
1306 /* If the original variable is not a scalar, it will be added to the list
1307 of virtual operands. In that case, use its base symbol as the virtual
1308 variable representing it. */
1309 is_real_op
= is_gimple_reg (var
);
1310 if (!is_real_op
&& !DECL_P (var
))
1311 var
= get_virtual_var (var
);
1313 /* If VAR is not a variable that we care to optimize, do nothing. */
1314 if (var
== NULL_TREE
|| !SSA_VAR_P (var
))
1317 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1318 v_ann
= var_ann (sym
);
1320 /* Don't expose volatile variables to the optimizers. */
1321 if (TREE_THIS_VOLATILE (sym
))
1323 s_ann
->has_volatile_ops
= true;
1329 /* The variable is a GIMPLE register. Add it to real operands. */
1330 if (flags
& opf_is_def
)
1331 append_def (var_p
, stmt
);
1333 append_use (var_p
, stmt
);
1337 varray_type aliases
;
1339 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1340 virtual operands, unless the caller has specifically requested
1341 not to add virtual operands (used when adding operands inside an
1342 ADDR_EXPR expression). */
1343 if (flags
& opf_no_vops
)
1346 aliases
= v_ann
->may_aliases
;
1348 /* If alias information hasn't been computed yet, then
1349 addressable variables will not be an alias tag nor will they
1350 have aliases. In this case, mark the statement as having
1351 volatile operands. */
1352 if (!aliases_computed_p
&& may_be_aliased (var
))
1353 s_ann
->has_volatile_ops
= true;
1355 if (aliases
== NULL
)
1357 /* The variable is not aliased or it is an alias tag. */
1358 if (flags
& opf_is_def
)
1360 if (v_ann
->is_alias_tag
)
1362 /* Alias tagged vars get regular V_MAY_DEF */
1363 s_ann
->makes_aliased_stores
= 1;
1364 append_v_may_def (var
, stmt
, prev_vops
);
1366 else if ((flags
& opf_kill_def
)
1367 && v_ann
->mem_tag_kind
== NOT_A_TAG
)
1368 /* V_MUST_DEF for non-aliased non-GIMPLE register
1369 variable definitions. Avoid memory tags. */
1370 append_v_must_def (var
, stmt
, prev_vops
);
1372 /* Call-clobbered variables & memory tags get
1374 append_v_may_def (var
, stmt
, prev_vops
);
1378 append_vuse (var
, stmt
, prev_vops
);
1379 if (v_ann
->is_alias_tag
)
1380 s_ann
->makes_aliased_loads
= 1;
1387 /* The variable is aliased. Add its aliases to the virtual
1389 if (VARRAY_ACTIVE_SIZE (aliases
) == 0)
1392 if (flags
& opf_is_def
)
1394 /* If the variable is also an alias tag, add a virtual
1395 operand for it, otherwise we will miss representing
1396 references to the members of the variable's alias set.
1397 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1398 if (v_ann
->is_alias_tag
)
1399 append_v_may_def (var
, stmt
, prev_vops
);
1401 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1402 append_v_may_def (VARRAY_TREE (aliases
, i
), stmt
, prev_vops
);
1404 s_ann
->makes_aliased_stores
= 1;
1408 if (v_ann
->is_alias_tag
)
1409 append_vuse (var
, stmt
, prev_vops
);
1411 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1412 append_vuse (VARRAY_TREE (aliases
, i
), stmt
, prev_vops
);
1414 s_ann
->makes_aliased_loads
= 1;
1420 /* Record that VAR had its address taken in the statement with annotations
1424 note_addressable (tree var
, stmt_ann_t s_ann
)
1426 var
= get_base_address (var
);
1427 if (var
&& SSA_VAR_P (var
))
1429 if (s_ann
->addresses_taken
== NULL
)
1430 s_ann
->addresses_taken
= BITMAP_GGC_ALLOC ();
1431 bitmap_set_bit (s_ann
->addresses_taken
, var_ann (var
)->uid
);
1436 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1437 clobbered variables in the function. */
1440 add_call_clobber_ops (tree stmt
, voperands_t prev_vops
)
1442 /* Functions that are not const, pure or never return may clobber
1443 call-clobbered variables. */
1444 stmt_ann (stmt
)->makes_clobbering_call
= true;
1446 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1447 a V_MAY_DEF operand for every call clobbered variable. See
1448 compute_may_aliases for the heuristic used to decide whether
1449 to create .GLOBAL_VAR or not. */
1451 add_stmt_operand (&global_var
, stmt
, opf_is_def
, prev_vops
);
1456 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1458 tree var
= referenced_var (i
);
1460 /* If VAR is read-only, don't add a V_MAY_DEF, just a
1462 if (!TREE_READONLY (var
))
1463 add_stmt_operand (&var
, stmt
, opf_is_def
, prev_vops
);
1465 add_stmt_operand (&var
, stmt
, opf_none
, prev_vops
);
1471 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1475 add_call_read_ops (tree stmt
, voperands_t prev_vops
)
1477 /* Otherwise, if the function is not pure, it may reference memory. Add
1478 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1479 for each call-clobbered variable. See add_referenced_var for the
1480 heuristic used to decide whether to create .GLOBAL_VAR. */
1482 add_stmt_operand (&global_var
, stmt
, opf_none
, prev_vops
);
1487 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1489 tree var
= referenced_var (i
);
1490 add_stmt_operand (&var
, stmt
, opf_none
, prev_vops
);
1495 /* Copies virtual operands from SRC to DST. */
1498 copy_virtual_operands (tree dst
, tree src
)
1500 vuse_optype vuses
= STMT_VUSE_OPS (src
);
1501 v_may_def_optype v_may_defs
= STMT_V_MAY_DEF_OPS (src
);
1502 v_must_def_optype v_must_defs
= STMT_V_MUST_DEF_OPS (src
);
1503 vuse_optype
*vuses_new
= &stmt_ann (dst
)->vuse_ops
;
1504 v_may_def_optype
*v_may_defs_new
= &stmt_ann (dst
)->v_may_def_ops
;
1505 v_must_def_optype
*v_must_defs_new
= &stmt_ann (dst
)->v_must_def_ops
;
1510 *vuses_new
= allocate_vuse_optype (NUM_VUSES (vuses
));
1511 for (i
= 0; i
< NUM_VUSES (vuses
); i
++)
1512 SET_VUSE_OP (*vuses_new
, i
, VUSE_OP (vuses
, i
));
1517 *v_may_defs_new
= allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs
));
1518 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
1520 SET_V_MAY_DEF_OP (*v_may_defs_new
, i
, V_MAY_DEF_OP (v_may_defs
, i
));
1521 SET_V_MAY_DEF_RESULT (*v_may_defs_new
, i
,
1522 V_MAY_DEF_RESULT (v_may_defs
, i
));
1528 *v_must_defs_new
= allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs
));
1529 for (i
= 0; i
< NUM_V_MUST_DEFS (v_must_defs
); i
++)
1530 SET_V_MUST_DEF_OP (*v_must_defs_new
, i
, V_MUST_DEF_OP (v_must_defs
, i
));
1534 #include "gt-tree-ssa-operands.h"