* tree-flow.h (struct stmt_ann_d): Remove
[official-gcc.git] / gcc / tree-ssa-operands.c
blob674e9ce773d4ffe00d3d99347f8c7c0593ac4d49
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
85 #define opf_none 0
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
107 /* Array for building all the def operands. */
108 static VEC(tree,heap) *build_defs;
110 /* Array for building all the use operands. */
111 static VEC(tree,heap) *build_uses;
113 /* Array for building all the v_may_def operands. */
114 static VEC(tree,heap) *build_v_may_defs;
116 /* Array for building all the vuse operands. */
117 static VEC(tree,heap) *build_vuses;
119 /* Array for building all the v_must_def operands. */
120 static VEC(tree,heap) *build_v_must_defs;
122 /* True if the operands for call clobbered vars are cached and valid. */
123 bool ssa_call_clobbered_cache_valid;
124 bool ssa_ro_call_cache_valid;
126 /* These arrays are the cached operand vectors for call clobbered calls. */
127 static VEC(tree,heap) *clobbered_v_may_defs;
128 static VEC(tree,heap) *clobbered_vuses;
129 static VEC(tree,heap) *ro_call_vuses;
130 static bool ops_active = false;
132 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
133 static unsigned operand_memory_index;
135 static void get_expr_operands (tree, tree *, int);
136 static void get_asm_expr_operands (tree);
137 static void get_indirect_ref_operands (tree, tree, int);
138 static void get_tmr_operands (tree, tree, int);
139 static void get_call_expr_operands (tree, tree);
140 static inline void append_def (tree *);
141 static inline void append_use (tree *);
142 static void append_v_may_def (tree);
143 static void append_v_must_def (tree);
144 static void add_call_clobber_ops (tree, tree);
145 static void add_call_read_ops (tree);
146 static void add_stmt_operand (tree *, stmt_ann_t, int);
147 static void build_ssa_operands (tree stmt);
149 static def_optype_p free_defs = NULL;
150 static use_optype_p free_uses = NULL;
151 static vuse_optype_p free_vuses = NULL;
152 static maydef_optype_p free_maydefs = NULL;
153 static mustdef_optype_p free_mustdefs = NULL;
156 /* Return the DECL_UID of the base variable of T. */
158 static inline unsigned
159 get_name_decl (tree t)
161 if (TREE_CODE (t) != SSA_NAME)
162 return DECL_UID (t);
163 else
164 return DECL_UID (SSA_NAME_VAR (t));
167 /* Comparison function for qsort used in operand_build_sort_virtual. */
169 static int
170 operand_build_cmp (const void *p, const void *q)
172 tree e1 = *((const tree *)p);
173 tree e2 = *((const tree *)q);
174 unsigned int u1,u2;
176 u1 = get_name_decl (e1);
177 u2 = get_name_decl (e2);
179 /* We want to sort in ascending order. They can never be equal. */
180 #ifdef ENABLE_CHECKING
181 gcc_assert (u1 != u2);
182 #endif
183 return (u1 > u2 ? 1 : -1);
186 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
188 static inline void
189 operand_build_sort_virtual (VEC(tree,heap) *list)
191 int num = VEC_length (tree, list);
192 if (num < 2)
193 return;
194 if (num == 2)
196 if (get_name_decl (VEC_index (tree, list, 0))
197 > get_name_decl (VEC_index (tree, list, 1)))
199 /* Swap elements if in the wrong order. */
200 tree tmp = VEC_index (tree, list, 0);
201 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
202 VEC_replace (tree, list, 1, tmp);
204 return;
206 /* There are 3 or more elements, call qsort. */
207 qsort (VEC_address (tree, list),
208 VEC_length (tree, list),
209 sizeof (tree),
210 operand_build_cmp);
215 /* Return true if the ssa operands cache is active. */
217 bool
218 ssa_operands_active (void)
220 return ops_active;
224 /* Initialize the operand cache routines. */
226 void
227 init_ssa_operands (void)
229 build_defs = VEC_alloc (tree, heap, 5);
230 build_uses = VEC_alloc (tree, heap, 10);
231 build_vuses = VEC_alloc (tree, heap, 25);
232 build_v_may_defs = VEC_alloc (tree, heap, 25);
233 build_v_must_defs = VEC_alloc (tree, heap, 25);
235 gcc_assert (operand_memory == NULL);
236 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
237 ops_active = true;
241 /* Dispose of anything required by the operand routines. */
243 void
244 fini_ssa_operands (void)
246 struct ssa_operand_memory_d *ptr;
247 VEC_free (tree, heap, build_defs);
248 VEC_free (tree, heap, build_uses);
249 VEC_free (tree, heap, build_v_must_defs);
250 VEC_free (tree, heap, build_v_may_defs);
251 VEC_free (tree, heap, build_vuses);
252 free_defs = NULL;
253 free_uses = NULL;
254 free_vuses = NULL;
255 free_maydefs = NULL;
256 free_mustdefs = NULL;
257 while ((ptr = operand_memory) != NULL)
259 operand_memory = operand_memory->next;
260 ggc_free (ptr);
263 VEC_free (tree, heap, clobbered_v_may_defs);
264 VEC_free (tree, heap, clobbered_vuses);
265 VEC_free (tree, heap, ro_call_vuses);
266 ops_active = false;
270 /* Return memory for operands of SIZE chunks. */
272 static inline void *
273 ssa_operand_alloc (unsigned size)
275 char *ptr;
276 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
278 struct ssa_operand_memory_d *ptr;
279 ptr = GGC_NEW (struct ssa_operand_memory_d);
280 ptr->next = operand_memory;
281 operand_memory = ptr;
282 operand_memory_index = 0;
284 ptr = &(operand_memory->mem[operand_memory_index]);
285 operand_memory_index += size;
286 return ptr;
290 /* Make sure PTR is in the correct immediate use list. Since uses are simply
291 pointers into the stmt TREE, there is no way of telling if anyone has
292 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
293 The contents are different, but the pointer is still the same. This
294 routine will check to make sure PTR is in the correct list, and if it isn't
295 put it in the correct list. We cannot simply check the previous node
296 because all nodes in the same stmt might have be changed. */
298 static inline void
299 correct_use_link (use_operand_p ptr, tree stmt)
301 use_operand_p prev;
302 tree root;
304 /* Fold_stmt () may have changed the stmt pointers. */
305 if (ptr->stmt != stmt)
306 ptr->stmt = stmt;
308 prev = ptr->prev;
309 if (prev)
311 /* Find the root element, making sure we skip any safe iterators. */
312 while (prev->use != NULL || prev->stmt == NULL)
313 prev = prev->prev;
315 /* Get the ssa_name of the list the node is in. */
316 root = prev->stmt;
317 /* If it's the right list, simply return. */
318 if (root == *(ptr->use))
319 return;
321 /* Its in the wrong list if we reach here. */
322 delink_imm_use (ptr);
323 link_imm_use (ptr, *(ptr->use));
327 /* This routine makes sure that PTR is in an immediate use list, and makes
328 sure the stmt pointer is set to the current stmt. Virtual uses do not need
329 the overhead of correct_use_link since they cannot be directly manipulated
330 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
331 static inline void
332 set_virtual_use_link (use_operand_p ptr, tree stmt)
334 /* Fold_stmt () may have changed the stmt pointers. */
335 if (ptr->stmt != stmt)
336 ptr->stmt = stmt;
338 /* If this use isn't in a list, add it to the correct list. */
339 if (!ptr->prev)
340 link_imm_use (ptr, *(ptr->use));
345 #define FINALIZE_OPBUILD build_defs
346 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
347 build_defs, (I))
348 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
349 build_defs, (I))
350 #define FINALIZE_FUNC finalize_ssa_def_ops
351 #define FINALIZE_ALLOC alloc_def
352 #define FINALIZE_FREE free_defs
353 #define FINALIZE_TYPE struct def_optype_d
354 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
355 #define FINALIZE_OPS DEF_OPS
356 #define FINALIZE_BASE(VAR) VAR
357 #define FINALIZE_BASE_TYPE tree *
358 #define FINALIZE_BASE_ZERO NULL
359 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
360 #include "tree-ssa-opfinalize.h"
363 /* This routine will create stmt operands for STMT from the def build list. */
365 static void
366 finalize_ssa_defs (tree stmt)
368 unsigned int num = VEC_length (tree, build_defs);
369 /* There should only be a single real definition per assignment. */
370 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
372 /* If there is an old list, often the new list is identical, or close, so
373 find the elements at the beginning that are the same as the vector. */
375 finalize_ssa_def_ops (stmt);
376 VEC_truncate (tree, build_defs, 0);
379 #define FINALIZE_OPBUILD build_uses
380 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
381 build_uses, (I))
382 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
383 build_uses, (I))
384 #define FINALIZE_FUNC finalize_ssa_use_ops
385 #define FINALIZE_ALLOC alloc_use
386 #define FINALIZE_FREE free_uses
387 #define FINALIZE_TYPE struct use_optype_d
388 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
389 #define FINALIZE_OPS USE_OPS
390 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
391 #define FINALIZE_CORRECT_USE correct_use_link
392 #define FINALIZE_BASE(VAR) VAR
393 #define FINALIZE_BASE_TYPE tree *
394 #define FINALIZE_BASE_ZERO NULL
395 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
396 (PTR)->use_ptr.use = (VAL); \
397 link_imm_use_stmt (&((PTR)->use_ptr), \
398 *(VAL), (STMT))
399 #include "tree-ssa-opfinalize.h"
401 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
403 static void
404 finalize_ssa_uses (tree stmt)
406 #ifdef ENABLE_CHECKING
408 unsigned x;
409 unsigned num = VEC_length (tree, build_uses);
411 /* If the pointer to the operand is the statement itself, something is
412 wrong. It means that we are pointing to a local variable (the
413 initial call to get_stmt_operands does not pass a pointer to a
414 statement). */
415 for (x = 0; x < num; x++)
416 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
418 #endif
419 finalize_ssa_use_ops (stmt);
420 VEC_truncate (tree, build_uses, 0);
424 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
425 #define FINALIZE_OPBUILD build_v_may_defs
426 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
427 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
428 build_v_may_defs, (I)))
429 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
430 #define FINALIZE_ALLOC alloc_maydef
431 #define FINALIZE_FREE free_maydefs
432 #define FINALIZE_TYPE struct maydef_optype_d
433 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
434 #define FINALIZE_OPS MAYDEF_OPS
435 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
436 #define FINALIZE_CORRECT_USE set_virtual_use_link
437 #define FINALIZE_BASE_ZERO 0
438 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
439 #define FINALIZE_BASE_TYPE unsigned
440 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
441 (PTR)->def_var = (VAL); \
442 (PTR)->use_var = (VAL); \
443 (PTR)->use_ptr.use = &((PTR)->use_var); \
444 link_imm_use_stmt (&((PTR)->use_ptr), \
445 (VAL), (STMT))
446 #include "tree-ssa-opfinalize.h"
449 static void
450 finalize_ssa_v_may_defs (tree stmt)
452 finalize_ssa_v_may_def_ops (stmt);
456 /* Clear the in_list bits and empty the build array for v_may_defs. */
458 static inline void
459 cleanup_v_may_defs (void)
461 unsigned x, num;
462 num = VEC_length (tree, build_v_may_defs);
464 for (x = 0; x < num; x++)
466 tree t = VEC_index (tree, build_v_may_defs, x);
467 if (TREE_CODE (t) != SSA_NAME)
469 var_ann_t ann = var_ann (t);
470 ann->in_v_may_def_list = 0;
473 VEC_truncate (tree, build_v_may_defs, 0);
477 #define FINALIZE_OPBUILD build_vuses
478 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
479 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
480 build_vuses, (I)))
481 #define FINALIZE_FUNC finalize_ssa_vuse_ops
482 #define FINALIZE_ALLOC alloc_vuse
483 #define FINALIZE_FREE free_vuses
484 #define FINALIZE_TYPE struct vuse_optype_d
485 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
486 #define FINALIZE_OPS VUSE_OPS
487 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
488 #define FINALIZE_CORRECT_USE set_virtual_use_link
489 #define FINALIZE_BASE_ZERO 0
490 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
491 #define FINALIZE_BASE_TYPE unsigned
492 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
493 (PTR)->use_var = (VAL); \
494 (PTR)->use_ptr.use = &((PTR)->use_var); \
495 link_imm_use_stmt (&((PTR)->use_ptr), \
496 (VAL), (STMT))
497 #include "tree-ssa-opfinalize.h"
500 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
502 static void
503 finalize_ssa_vuses (tree stmt)
505 unsigned num, num_v_may_defs;
506 unsigned vuse_index;
508 /* Remove superfluous VUSE operands. If the statement already has a
509 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
510 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
511 suppose that variable 'a' is aliased:
513 # VUSE <a_2>
514 # a_3 = V_MAY_DEF <a_2>
515 a = a + 1;
517 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
518 operation. */
520 num = VEC_length (tree, build_vuses);
521 num_v_may_defs = VEC_length (tree, build_v_may_defs);
523 if (num > 0 && num_v_may_defs > 0)
525 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
527 tree vuse;
528 vuse = VEC_index (tree, build_vuses, vuse_index);
529 if (TREE_CODE (vuse) != SSA_NAME)
531 var_ann_t ann = var_ann (vuse);
532 ann->in_vuse_list = 0;
533 if (ann->in_v_may_def_list)
535 VEC_ordered_remove (tree, build_vuses, vuse_index);
536 continue;
539 vuse_index++;
542 else
543 /* Clear out the in_list bits. */
544 for (vuse_index = 0;
545 vuse_index < VEC_length (tree, build_vuses);
546 vuse_index++)
548 tree t = VEC_index (tree, build_vuses, vuse_index);
549 if (TREE_CODE (t) != SSA_NAME)
551 var_ann_t ann = var_ann (t);
552 ann->in_vuse_list = 0;
556 finalize_ssa_vuse_ops (stmt);
557 /* The v_may_def build vector wasn't cleaned up because we needed it. */
558 cleanup_v_may_defs ();
560 /* Free the vuses build vector. */
561 VEC_truncate (tree, build_vuses, 0);
565 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
567 #define FINALIZE_OPBUILD build_v_must_defs
568 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
569 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
570 build_v_must_defs, (I)))
571 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
572 #define FINALIZE_ALLOC alloc_mustdef
573 #define FINALIZE_FREE free_mustdefs
574 #define FINALIZE_TYPE struct mustdef_optype_d
575 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
576 #define FINALIZE_OPS MUSTDEF_OPS
577 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
578 #define FINALIZE_CORRECT_USE set_virtual_use_link
579 #define FINALIZE_BASE_ZERO 0
580 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
581 #define FINALIZE_BASE_TYPE unsigned
582 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
583 (PTR)->def_var = (VAL); \
584 (PTR)->kill_var = (VAL); \
585 (PTR)->use_ptr.use = &((PTR)->kill_var);\
586 link_imm_use_stmt (&((PTR)->use_ptr), \
587 (VAL), (STMT))
588 #include "tree-ssa-opfinalize.h"
591 static void
592 finalize_ssa_v_must_defs (tree stmt)
594 /* In the presence of subvars, there may be more than one V_MUST_DEF per
595 statement (one for each subvar). It is a bit expensive to verify that
596 all must-defs in a statement belong to subvars if there is more than one
597 MUST-def, so we don't do it. Suffice to say, if you reach here without
598 having subvars, and have num >1, you have hit a bug. */
600 finalize_ssa_v_must_def_ops (stmt);
601 VEC_truncate (tree, build_v_must_defs, 0);
605 /* Finalize all the build vectors, fill the new ones into INFO. */
607 static inline void
608 finalize_ssa_stmt_operands (tree stmt)
610 finalize_ssa_defs (stmt);
611 finalize_ssa_uses (stmt);
612 finalize_ssa_v_must_defs (stmt);
613 finalize_ssa_v_may_defs (stmt);
614 finalize_ssa_vuses (stmt);
618 /* Start the process of building up operands vectors in INFO. */
620 static inline void
621 start_ssa_stmt_operands (void)
623 gcc_assert (VEC_length (tree, build_defs) == 0);
624 gcc_assert (VEC_length (tree, build_uses) == 0);
625 gcc_assert (VEC_length (tree, build_vuses) == 0);
626 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
627 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
631 /* Add DEF_P to the list of pointers to operands. */
633 static inline void
634 append_def (tree *def_p)
636 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
640 /* Add USE_P to the list of pointers to operands. */
642 static inline void
643 append_use (tree *use_p)
645 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
649 /* Add a new virtual may def for variable VAR to the build array. */
651 static inline void
652 append_v_may_def (tree var)
654 if (TREE_CODE (var) != SSA_NAME)
656 var_ann_t ann = get_var_ann (var);
658 /* Don't allow duplicate entries. */
659 if (ann->in_v_may_def_list)
660 return;
661 ann->in_v_may_def_list = 1;
664 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
668 /* Add VAR to the list of virtual uses. */
670 static inline void
671 append_vuse (tree var)
674 /* Don't allow duplicate entries. */
675 if (TREE_CODE (var) != SSA_NAME)
677 var_ann_t ann = get_var_ann (var);
679 if (ann->in_vuse_list || ann->in_v_may_def_list)
680 return;
681 ann->in_vuse_list = 1;
684 VEC_safe_push (tree, heap, build_vuses, (tree)var);
688 /* Add VAR to the list of virtual must definitions for INFO. */
690 static inline void
691 append_v_must_def (tree var)
693 unsigned i;
695 /* Don't allow duplicate entries. */
696 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
697 if (var == VEC_index (tree, build_v_must_defs, i))
698 return;
700 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
704 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
705 cache for STMT, if it existed before. When finished, the various build_*
706 operand vectors will have potential operands. in them. */
708 static void
709 parse_ssa_operands (tree stmt)
711 enum tree_code code;
713 code = TREE_CODE (stmt);
714 switch (code)
716 case MODIFY_EXPR:
717 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
718 either only part of LHS is modified or if the RHS might throw,
719 otherwise, use V_MUST_DEF.
721 ??? If it might throw, we should represent somehow that it is killed
722 on the fallthrough path. */
724 tree lhs = TREE_OPERAND (stmt, 0);
725 int lhs_flags = opf_is_def;
727 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
729 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
730 or not the entire LHS is modified; that depends on what's
731 inside the VIEW_CONVERT_EXPR. */
732 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
733 lhs = TREE_OPERAND (lhs, 0);
735 if (TREE_CODE (lhs) != ARRAY_REF
736 && TREE_CODE (lhs) != ARRAY_RANGE_REF
737 && TREE_CODE (lhs) != BIT_FIELD_REF
738 && TREE_CODE (lhs) != REALPART_EXPR
739 && TREE_CODE (lhs) != IMAGPART_EXPR)
740 lhs_flags |= opf_kill_def;
742 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
744 break;
746 case COND_EXPR:
747 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
748 break;
750 case SWITCH_EXPR:
751 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
752 break;
754 case ASM_EXPR:
755 get_asm_expr_operands (stmt);
756 break;
758 case RETURN_EXPR:
759 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
760 break;
762 case GOTO_EXPR:
763 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
764 break;
766 case LABEL_EXPR:
767 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
768 break;
770 /* These nodes contain no variable references. */
771 case BIND_EXPR:
772 case CASE_LABEL_EXPR:
773 case TRY_CATCH_EXPR:
774 case TRY_FINALLY_EXPR:
775 case EH_FILTER_EXPR:
776 case CATCH_EXPR:
777 case RESX_EXPR:
778 break;
780 default:
781 /* Notice that if get_expr_operands tries to use &STMT as the operand
782 pointer (which may only happen for USE operands), we will fail in
783 append_use. This default will handle statements like empty
784 statements, or CALL_EXPRs that may appear on the RHS of a statement
785 or as statements themselves. */
786 get_expr_operands (stmt, &stmt, opf_none);
787 break;
791 /* Create an operands cache for STMT. */
793 static void
794 build_ssa_operands (tree stmt)
796 stmt_ann_t ann = get_stmt_ann (stmt);
798 /* Initially assume that the statement has no volatile operands. */
799 if (ann)
800 ann->has_volatile_ops = false;
802 start_ssa_stmt_operands ();
804 parse_ssa_operands (stmt);
805 operand_build_sort_virtual (build_vuses);
806 operand_build_sort_virtual (build_v_may_defs);
807 operand_build_sort_virtual (build_v_must_defs);
809 finalize_ssa_stmt_operands (stmt);
813 /* Free any operands vectors in OPS. */
814 void
815 free_ssa_operands (stmt_operands_p ops)
817 ops->def_ops = NULL;
818 ops->use_ops = NULL;
819 ops->maydef_ops = NULL;
820 ops->mustdef_ops = NULL;
821 ops->vuse_ops = NULL;
825 /* Get the operands of statement STMT. Note that repeated calls to
826 get_stmt_operands for the same statement will do nothing until the
827 statement is marked modified by a call to mark_stmt_modified(). */
829 void
830 update_stmt_operands (tree stmt)
832 stmt_ann_t ann = get_stmt_ann (stmt);
833 /* If get_stmt_operands is called before SSA is initialized, dont
834 do anything. */
835 if (!ssa_operands_active ())
836 return;
837 /* The optimizers cannot handle statements that are nothing but a
838 _DECL. This indicates a bug in the gimplifier. */
839 gcc_assert (!SSA_VAR_P (stmt));
841 gcc_assert (ann->modified);
843 timevar_push (TV_TREE_OPS);
845 build_ssa_operands (stmt);
847 /* Clear the modified bit for STMT. Subsequent calls to
848 get_stmt_operands for this statement will do nothing until the
849 statement is marked modified by a call to mark_stmt_modified(). */
850 ann->modified = 0;
852 timevar_pop (TV_TREE_OPS);
856 /* Copies virtual operands from SRC to DST. */
858 void
859 copy_virtual_operands (tree dest, tree src)
861 tree t;
862 ssa_op_iter iter, old_iter;
863 use_operand_p use_p, u2;
864 def_operand_p def_p, d2;
866 build_ssa_operands (dest);
868 /* Copy all the virtual fields. */
869 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
870 append_vuse (t);
871 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
872 append_v_may_def (t);
873 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
874 append_v_must_def (t);
876 if (VEC_length (tree, build_vuses) == 0
877 && VEC_length (tree, build_v_may_defs) == 0
878 && VEC_length (tree, build_v_must_defs) == 0)
879 return;
881 /* Now commit the virtual operands to this stmt. */
882 finalize_ssa_v_must_defs (dest);
883 finalize_ssa_v_may_defs (dest);
884 finalize_ssa_vuses (dest);
886 /* Finally, set the field to the same values as then originals. */
889 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
890 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
892 gcc_assert (!op_iter_done (&old_iter));
893 SET_USE (use_p, t);
894 t = op_iter_next_tree (&old_iter);
896 gcc_assert (op_iter_done (&old_iter));
898 op_iter_init_maydef (&old_iter, src, &u2, &d2);
899 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
901 gcc_assert (!op_iter_done (&old_iter));
902 SET_USE (use_p, USE_FROM_PTR (u2));
903 SET_DEF (def_p, DEF_FROM_PTR (d2));
904 op_iter_next_maymustdef (&u2, &d2, &old_iter);
906 gcc_assert (op_iter_done (&old_iter));
908 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
909 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
911 gcc_assert (!op_iter_done (&old_iter));
912 SET_USE (use_p, USE_FROM_PTR (u2));
913 SET_DEF (def_p, DEF_FROM_PTR (d2));
914 op_iter_next_maymustdef (&u2, &d2, &old_iter);
916 gcc_assert (op_iter_done (&old_iter));
921 /* Specifically for use in DOM's expression analysis. Given a store, we
922 create an artificial stmt which looks like a load from the store, this can
923 be used to eliminate redundant loads. OLD_OPS are the operands from the
924 store stmt, and NEW_STMT is the new load which represents a load of the
925 values stored. */
927 void
928 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
930 stmt_ann_t ann;
931 tree op;
932 ssa_op_iter iter;
933 use_operand_p use_p;
934 unsigned x;
936 ann = get_stmt_ann (new_stmt);
938 /* process the stmt looking for operands. */
939 start_ssa_stmt_operands ();
940 parse_ssa_operands (new_stmt);
942 for (x = 0; x < VEC_length (tree, build_vuses); x++)
944 tree t = VEC_index (tree, build_vuses, x);
945 if (TREE_CODE (t) != SSA_NAME)
947 var_ann_t ann = var_ann (t);
948 ann->in_vuse_list = 0;
952 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
954 tree t = VEC_index (tree, build_v_may_defs, x);
955 if (TREE_CODE (t) != SSA_NAME)
957 var_ann_t ann = var_ann (t);
958 ann->in_v_may_def_list = 0;
961 /* Remove any virtual operands that were found. */
962 VEC_truncate (tree, build_v_may_defs, 0);
963 VEC_truncate (tree, build_v_must_defs, 0);
964 VEC_truncate (tree, build_vuses, 0);
966 /* For each VDEF on the original statement, we want to create a
967 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
968 statement. */
969 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
970 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
971 append_vuse (op);
973 /* Now build the operands for this new stmt. */
974 finalize_ssa_stmt_operands (new_stmt);
976 /* All uses in this fake stmt must not be in the immediate use lists. */
977 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
978 delink_imm_use (use_p);
981 void
982 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
984 tree op0, op1;
985 op0 = *exp0;
986 op1 = *exp1;
988 /* If the operand cache is active, attempt to preserve the relative positions
989 of these two operands in their respective immediate use lists. */
990 if (ssa_operands_active () && op0 != op1)
992 use_optype_p use0, use1, ptr;
993 use0 = use1 = NULL;
994 /* Find the 2 operands in the cache, if they are there. */
995 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
996 if (USE_OP_PTR (ptr)->use == exp0)
998 use0 = ptr;
999 break;
1001 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1002 if (USE_OP_PTR (ptr)->use == exp1)
1004 use1 = ptr;
1005 break;
1007 /* If both uses don't have operand entries, there isn't much we can do
1008 at this point. Presumably we dont need to worry about it. */
1009 if (use0 && use1)
1011 tree *tmp = USE_OP_PTR (use1)->use;
1012 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1013 USE_OP_PTR (use0)->use = tmp;
1017 /* Now swap the data. */
1018 *exp0 = op1;
1019 *exp1 = op0;
1022 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1023 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1024 the operands found. */
1026 static void
1027 get_expr_operands (tree stmt, tree *expr_p, int flags)
1029 enum tree_code code;
1030 enum tree_code_class class;
1031 tree expr = *expr_p;
1032 stmt_ann_t s_ann = stmt_ann (stmt);
1034 if (expr == NULL)
1035 return;
1037 code = TREE_CODE (expr);
1038 class = TREE_CODE_CLASS (code);
1040 switch (code)
1042 case ADDR_EXPR:
1043 /* We could have the address of a component, array member,
1044 etc which has interesting variable references. */
1045 /* Taking the address of a variable does not represent a
1046 reference to it, but the fact that the stmt takes its address will be
1047 of interest to some passes (e.g. alias resolution). */
1048 add_stmt_operand (expr_p, s_ann, 0);
1050 /* If the address is invariant, there may be no interesting variable
1051 references inside. */
1052 if (is_gimple_min_invariant (expr))
1053 return;
1055 /* There should be no VUSEs created, since the referenced objects are
1056 not really accessed. The only operands that we should find here
1057 are ARRAY_REF indices which will always be real operands (GIMPLE
1058 does not allow non-registers as array indices). */
1059 flags |= opf_no_vops;
1061 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1062 return;
1064 case SSA_NAME:
1065 case STRUCT_FIELD_TAG:
1066 case TYPE_MEMORY_TAG:
1067 case NAME_MEMORY_TAG:
1068 case VAR_DECL:
1069 case PARM_DECL:
1070 case RESULT_DECL:
1071 case CONST_DECL:
1073 subvar_t svars;
1075 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1076 Otherwise, add the variable itself.
1077 Whether it goes to USES or DEFS depends on the operand flags. */
1078 if (var_can_have_subvars (expr)
1079 && (svars = get_subvars_for_var (expr)))
1081 subvar_t sv;
1082 for (sv = svars; sv; sv = sv->next)
1083 add_stmt_operand (&sv->var, s_ann, flags);
1085 else
1087 add_stmt_operand (expr_p, s_ann, flags);
1089 return;
1091 case MISALIGNED_INDIRECT_REF:
1092 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1093 /* fall through */
1095 case ALIGN_INDIRECT_REF:
1096 case INDIRECT_REF:
1097 get_indirect_ref_operands (stmt, expr, flags);
1098 return;
1100 case TARGET_MEM_REF:
1101 get_tmr_operands (stmt, expr, flags);
1102 return;
1104 case ARRAY_REF:
1105 case ARRAY_RANGE_REF:
1106 /* Treat array references as references to the virtual variable
1107 representing the array. The virtual variable for an ARRAY_REF
1108 is the VAR_DECL for the array. */
1110 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1111 according to the value of IS_DEF. Recurse if the LHS of the
1112 ARRAY_REF node is not a regular variable. */
1113 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1114 add_stmt_operand (expr_p, s_ann, flags);
1115 else
1116 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1118 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1119 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1120 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1121 return;
1123 case COMPONENT_REF:
1124 case REALPART_EXPR:
1125 case IMAGPART_EXPR:
1127 tree ref;
1128 HOST_WIDE_INT offset, size, maxsize;
1129 /* This component ref becomes an access to all of the subvariables
1130 it can touch, if we can determine that, but *NOT* the real one.
1131 If we can't determine which fields we could touch, the recursion
1132 will eventually get to a variable and add *all* of its subvars, or
1133 whatever is the minimum correct subset. */
1135 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1136 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1138 subvar_t svars = get_subvars_for_var (ref);
1139 subvar_t sv;
1140 for (sv = svars; sv; sv = sv->next)
1142 bool exact;
1143 if (overlap_subvar (offset, maxsize, sv, &exact))
1145 int subvar_flags = flags;
1146 if (!exact
1147 || size != maxsize)
1148 subvar_flags &= ~opf_kill_def;
1149 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1153 else
1154 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1155 flags & ~opf_kill_def);
1157 if (code == COMPONENT_REF)
1159 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1160 s_ann->has_volatile_ops = true;
1161 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1163 return;
1165 case WITH_SIZE_EXPR:
1166 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1167 and an rvalue reference to its second argument. */
1168 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1169 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1170 return;
1172 case CALL_EXPR:
1173 get_call_expr_operands (stmt, expr);
1174 return;
1176 case COND_EXPR:
1177 case VEC_COND_EXPR:
1178 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1179 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1180 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1181 return;
1183 case MODIFY_EXPR:
1185 int subflags;
1186 tree op;
1188 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1190 op = TREE_OPERAND (expr, 0);
1191 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1192 op = TREE_OPERAND (expr, 0);
1193 if (TREE_CODE (op) == ARRAY_REF
1194 || TREE_CODE (op) == ARRAY_RANGE_REF
1195 || TREE_CODE (op) == REALPART_EXPR
1196 || TREE_CODE (op) == IMAGPART_EXPR)
1197 subflags = opf_is_def;
1198 else
1199 subflags = opf_is_def | opf_kill_def;
1201 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1202 return;
1205 case CONSTRUCTOR:
1207 /* General aggregate CONSTRUCTORs have been decomposed, but they
1208 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1209 constructor_elt *ce;
1210 unsigned HOST_WIDE_INT idx;
1212 for (idx = 0;
1213 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1214 idx++)
1215 get_expr_operands (stmt, &ce->value, opf_none);
1217 return;
1220 case TRUTH_NOT_EXPR:
1221 case BIT_FIELD_REF:
1222 case VIEW_CONVERT_EXPR:
1223 do_unary:
1224 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1225 return;
1227 case TRUTH_AND_EXPR:
1228 case TRUTH_OR_EXPR:
1229 case TRUTH_XOR_EXPR:
1230 case COMPOUND_EXPR:
1231 case OBJ_TYPE_REF:
1232 case ASSERT_EXPR:
1233 do_binary:
1235 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1236 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1237 return;
1240 case REALIGN_LOAD_EXPR:
1242 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1243 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1244 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1245 return;
1248 case BLOCK:
1249 case FUNCTION_DECL:
1250 case EXC_PTR_EXPR:
1251 case FILTER_EXPR:
1252 case LABEL_DECL:
1253 /* Expressions that make no memory references. */
1254 return;
1256 default:
1257 if (class == tcc_unary)
1258 goto do_unary;
1259 if (class == tcc_binary || class == tcc_comparison)
1260 goto do_binary;
1261 if (class == tcc_constant || class == tcc_type)
1262 return;
1265 /* If we get here, something has gone wrong. */
1266 #ifdef ENABLE_CHECKING
1267 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1268 debug_tree (expr);
1269 fputs ("\n", stderr);
1270 internal_error ("internal error");
1271 #endif
1272 gcc_unreachable ();
1276 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1278 static void
1279 get_asm_expr_operands (tree stmt)
1281 stmt_ann_t s_ann = stmt_ann (stmt);
1282 int noutputs = list_length (ASM_OUTPUTS (stmt));
1283 const char **oconstraints
1284 = (const char **) alloca ((noutputs) * sizeof (const char *));
1285 int i;
1286 tree link;
1287 const char *constraint;
1288 bool allows_mem, allows_reg, is_inout;
1290 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1292 oconstraints[i] = constraint
1293 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1294 parse_output_constraint (&constraint, i, 0, 0,
1295 &allows_mem, &allows_reg, &is_inout);
1297 /* This should have been split in gimplify_asm_expr. */
1298 gcc_assert (!allows_reg || !is_inout);
1300 /* Memory operands are addressable. Note that STMT needs the
1301 address of this operand. */
1302 if (!allows_reg && allows_mem)
1304 tree t = get_base_address (TREE_VALUE (link));
1305 if (t && DECL_P (t) && s_ann)
1306 add_to_addressable_set (t, &s_ann->addresses_taken);
1309 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1312 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1314 constraint
1315 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1316 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1317 oconstraints, &allows_mem, &allows_reg);
1319 /* Memory operands are addressable. Note that STMT needs the
1320 address of this operand. */
1321 if (!allows_reg && allows_mem)
1323 tree t = get_base_address (TREE_VALUE (link));
1324 if (t && DECL_P (t) && s_ann)
1325 add_to_addressable_set (t, &s_ann->addresses_taken);
1328 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1332 /* Clobber memory for asm ("" : : : "memory"); */
1333 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1334 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1336 unsigned i;
1337 bitmap_iterator bi;
1339 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1340 decided to group them). */
1341 if (global_var)
1342 add_stmt_operand (&global_var, s_ann, opf_is_def);
1343 else
1344 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1346 tree var = referenced_var (i);
1347 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1350 /* Now clobber all addressables. */
1351 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1353 tree var = referenced_var (i);
1355 /* Subvars are explicitly represented in this list, so
1356 we don't need the original to be added to the clobber
1357 ops, but the original *will* be in this list because
1358 we keep the addressability of the original
1359 variable up-to-date so we don't screw up the rest of
1360 the backend. */
1361 if (var_can_have_subvars (var)
1362 && get_subvars_for_var (var) != NULL)
1363 continue;
1365 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1368 break;
1372 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1373 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1375 static void
1376 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1378 tree *pptr = &TREE_OPERAND (expr, 0);
1379 tree ptr = *pptr;
1380 stmt_ann_t s_ann = stmt_ann (stmt);
1382 /* Stores into INDIRECT_REF operands are never killing definitions. */
1383 flags &= ~opf_kill_def;
1385 if (SSA_VAR_P (ptr))
1387 struct ptr_info_def *pi = NULL;
1389 /* If PTR has flow-sensitive points-to information, use it. */
1390 if (TREE_CODE (ptr) == SSA_NAME
1391 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1392 && pi->name_mem_tag)
1394 /* PTR has its own memory tag. Use it. */
1395 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1397 else
1399 /* If PTR is not an SSA_NAME or it doesn't have a name
1400 tag, use its type memory tag. */
1401 var_ann_t v_ann;
1403 /* If we are emitting debugging dumps, display a warning if
1404 PTR is an SSA_NAME with no flow-sensitive alias
1405 information. That means that we may need to compute
1406 aliasing again. */
1407 if (dump_file
1408 && TREE_CODE (ptr) == SSA_NAME
1409 && pi == NULL)
1411 fprintf (dump_file,
1412 "NOTE: no flow-sensitive alias info for ");
1413 print_generic_expr (dump_file, ptr, dump_flags);
1414 fprintf (dump_file, " in ");
1415 print_generic_stmt (dump_file, stmt, dump_flags);
1418 if (TREE_CODE (ptr) == SSA_NAME)
1419 ptr = SSA_NAME_VAR (ptr);
1420 v_ann = var_ann (ptr);
1421 if (v_ann->type_mem_tag)
1422 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1426 /* If a constant is used as a pointer, we can't generate a real
1427 operand for it but we mark the statement volatile to prevent
1428 optimizations from messing things up. */
1429 else if (TREE_CODE (ptr) == INTEGER_CST)
1431 if (s_ann)
1432 s_ann->has_volatile_ops = true;
1433 return;
1436 /* Everything else *should* have been folded elsewhere, but users
1437 are smarter than we in finding ways to write invalid code. We
1438 cannot just assert here. If we were absolutely certain that we
1439 do handle all valid cases, then we could just do nothing here.
1440 That seems optimistic, so attempt to do something logical... */
1441 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1442 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1443 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1445 /* Make sure we know the object is addressable. */
1446 pptr = &TREE_OPERAND (ptr, 0);
1447 add_stmt_operand (pptr, s_ann, 0);
1449 /* Mark the object itself with a VUSE. */
1450 pptr = &TREE_OPERAND (*pptr, 0);
1451 get_expr_operands (stmt, pptr, flags);
1452 return;
1455 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1456 else
1457 gcc_unreachable ();
1459 /* Add a USE operand for the base pointer. */
1460 get_expr_operands (stmt, pptr, opf_none);
1463 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1465 static void
1466 get_tmr_operands (tree stmt, tree expr, int flags)
1468 tree tag = TMR_TAG (expr);
1470 /* First record the real operands. */
1471 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1472 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1474 /* MEM_REFs should never be killing. */
1475 flags &= ~opf_kill_def;
1477 if (TMR_SYMBOL (expr))
1479 stmt_ann_t ann = stmt_ann (stmt);
1480 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1483 if (tag)
1484 get_expr_operands (stmt, &tag, flags);
1485 else
1486 /* Something weird, so ensure that we will be careful. */
1487 stmt_ann (stmt)->has_volatile_ops = true;
1490 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1492 static void
1493 get_call_expr_operands (tree stmt, tree expr)
1495 tree op;
1496 int call_flags = call_expr_flags (expr);
1498 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1499 operands for all the symbols that have been found to be
1500 call-clobbered.
1502 Note that if aliases have not been computed, the global effects
1503 of calls will not be included in the SSA web. This is fine
1504 because no optimizer should run before aliases have been
1505 computed. By not bothering with virtual operands for CALL_EXPRs
1506 we avoid adding superfluous virtual operands, which can be a
1507 significant compile time sink (See PR 15855). */
1508 if (aliases_computed_p
1509 && !bitmap_empty_p (call_clobbered_vars)
1510 && !(call_flags & ECF_NOVOPS))
1512 /* A 'pure' or a 'const' function never call-clobbers anything.
1513 A 'noreturn' function might, but since we don't return anyway
1514 there is no point in recording that. */
1515 if (TREE_SIDE_EFFECTS (expr)
1516 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1517 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1518 else if (!(call_flags & ECF_CONST))
1519 add_call_read_ops (stmt);
1522 /* Find uses in the called function. */
1523 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1525 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1526 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1528 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1533 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1534 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1535 the statement's real operands, otherwise it is added to virtual
1536 operands. */
1538 static void
1539 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1541 bool is_real_op;
1542 tree var, sym;
1543 var_ann_t v_ann;
1545 var = *var_p;
1546 STRIP_NOPS (var);
1548 /* If the operand is an ADDR_EXPR, add its operand to the list of
1549 variables that have had their address taken in this statement. */
1550 if (TREE_CODE (var) == ADDR_EXPR && s_ann)
1552 add_to_addressable_set (TREE_OPERAND (var, 0), &s_ann->addresses_taken);
1553 return;
1556 /* If the original variable is not a scalar, it will be added to the list
1557 of virtual operands. In that case, use its base symbol as the virtual
1558 variable representing it. */
1559 is_real_op = is_gimple_reg (var);
1560 if (!is_real_op && !DECL_P (var))
1561 var = get_virtual_var (var);
1563 /* If VAR is not a variable that we care to optimize, do nothing. */
1564 if (var == NULL_TREE || !SSA_VAR_P (var))
1565 return;
1567 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1568 v_ann = var_ann (sym);
1570 /* Mark statements with volatile operands. Optimizers should back
1571 off from statements having volatile operands. */
1572 if (TREE_THIS_VOLATILE (sym) && s_ann)
1573 s_ann->has_volatile_ops = true;
1575 /* If the variable cannot be modified and this is a V_MAY_DEF change
1576 it into a VUSE. This happens when read-only variables are marked
1577 call-clobbered and/or aliased to writable variables. So we only
1578 check that this only happens on non-specific stores.
1580 Note that if this is a specific store, i.e. associated with a
1581 modify_expr, then we can't suppress the V_DEF, lest we run into
1582 validation problems.
1584 This can happen when programs cast away const, leaving us with a
1585 store to read-only memory. If the statement is actually executed
1586 at runtime, then the program is ill formed. If the statement is
1587 not executed then all is well. At the very least, we cannot ICE. */
1588 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1590 gcc_assert (!is_real_op);
1591 flags &= ~(opf_is_def | opf_kill_def);
1594 if (is_real_op)
1596 /* The variable is a GIMPLE register. Add it to real operands. */
1597 if (flags & opf_is_def)
1598 append_def (var_p);
1599 else
1600 append_use (var_p);
1602 else
1604 varray_type aliases;
1606 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1607 virtual operands, unless the caller has specifically requested
1608 not to add virtual operands (used when adding operands inside an
1609 ADDR_EXPR expression). */
1610 if (flags & opf_no_vops)
1611 return;
1613 aliases = v_ann->may_aliases;
1615 if (aliases == NULL)
1617 /* The variable is not aliased or it is an alias tag. */
1618 if (flags & opf_is_def)
1620 if (flags & opf_kill_def)
1622 /* Only regular variables or struct fields may get a
1623 V_MUST_DEF operand. */
1624 gcc_assert (!MTAG_P (var)
1625 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1626 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1627 variable definitions. */
1628 append_v_must_def (var);
1630 else
1632 /* Add a V_MAY_DEF for call-clobbered variables and
1633 memory tags. */
1634 append_v_may_def (var);
1637 else
1638 append_vuse (var);
1640 else
1642 size_t i;
1644 /* The variable is aliased. Add its aliases to the virtual
1645 operands. */
1646 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1648 if (flags & opf_is_def)
1650 /* If the variable is also an alias tag, add a virtual
1651 operand for it, otherwise we will miss representing
1652 references to the members of the variable's alias set.
1653 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1654 if (v_ann->is_alias_tag)
1655 append_v_may_def (var);
1657 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1658 append_v_may_def (VARRAY_TREE (aliases, i));
1660 else
1662 /* Similarly, append a virtual uses for VAR itself, when
1663 it is an alias tag. */
1664 if (v_ann->is_alias_tag)
1665 append_vuse (var);
1667 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1668 append_vuse (VARRAY_TREE (aliases, i));
1675 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1676 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1677 a single variable whose address has been taken or any other valid
1678 GIMPLE memory reference (structure reference, array, etc). If the
1679 base address of REF is a decl that has sub-variables, also add all
1680 of its sub-variables. */
1682 void
1683 add_to_addressable_set (tree ref, bitmap *addresses_taken)
1685 tree var;
1686 subvar_t svars;
1688 gcc_assert (addresses_taken);
1690 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1691 as the only thing we take the address of. If VAR is a structure,
1692 taking the address of a field means that the whole structure may
1693 be referenced using pointer arithmetic. See PR 21407 and the
1694 ensuing mailing list discussion. */
1695 var = get_base_address (ref);
1696 if (var && SSA_VAR_P (var))
1698 if (*addresses_taken == NULL)
1699 *addresses_taken = BITMAP_GGC_ALLOC ();
1701 if (var_can_have_subvars (var)
1702 && (svars = get_subvars_for_var (var)))
1704 subvar_t sv;
1705 for (sv = svars; sv; sv = sv->next)
1707 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1708 TREE_ADDRESSABLE (sv->var) = 1;
1711 else
1713 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1714 TREE_ADDRESSABLE (var) = 1;
1720 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1721 clobbered variables in the function. */
1723 static void
1724 add_call_clobber_ops (tree stmt, tree callee)
1726 unsigned u;
1727 tree t;
1728 bitmap_iterator bi;
1729 stmt_ann_t s_ann = stmt_ann (stmt);
1730 struct stmt_ann_d empty_ann;
1731 bitmap not_read_b, not_written_b;
1733 /* Functions that are not const, pure or never return may clobber
1734 call-clobbered variables. */
1735 if (s_ann)
1736 s_ann->makes_clobbering_call = true;
1738 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1739 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1740 if (global_var)
1742 add_stmt_operand (&global_var, s_ann, opf_is_def);
1743 return;
1746 /* FIXME - if we have better information from the static vars
1747 analysis, we need to make the cache call site specific. This way
1748 we can have the performance benefits even if we are doing good
1749 optimization. */
1751 /* Get info for local and module level statics. There is a bit
1752 set for each static if the call being processed does not read
1753 or write that variable. */
1755 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1756 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1758 /* If cache is valid, copy the elements into the build vectors. */
1759 if (ssa_call_clobbered_cache_valid
1760 && (!not_read_b || bitmap_empty_p (not_read_b))
1761 && (!not_written_b || bitmap_empty_p (not_written_b)))
1763 for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++)
1765 t = VEC_index (tree, clobbered_vuses, u);
1766 gcc_assert (TREE_CODE (t) != SSA_NAME);
1767 var_ann (t)->in_vuse_list = 1;
1768 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1770 for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++)
1772 t = VEC_index (tree, clobbered_v_may_defs, u);
1773 gcc_assert (TREE_CODE (t) != SSA_NAME);
1774 var_ann (t)->in_v_may_def_list = 1;
1775 VEC_safe_push (tree, heap, build_v_may_defs, (tree)t);
1777 return;
1780 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1782 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1783 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1785 tree var = referenced_var (u);
1786 if (unmodifiable_var_p (var))
1787 add_stmt_operand (&var, &empty_ann, opf_none);
1788 else
1790 bool not_read
1791 = not_read_b ? bitmap_bit_p (not_read_b, u) : false;
1792 bool not_written
1793 = not_written_b ? bitmap_bit_p (not_written_b, u) : false;
1795 if (not_written)
1797 if (!not_read)
1798 add_stmt_operand (&var, &empty_ann, opf_none);
1800 else
1801 add_stmt_operand (&var, &empty_ann, opf_is_def);
1805 if ((!not_read_b || bitmap_empty_p (not_read_b))
1806 && (!not_written_b || bitmap_empty_p (not_written_b)))
1808 /* Prepare empty cache vectors. */
1809 VEC_truncate (tree, clobbered_vuses, 0);
1810 VEC_truncate (tree, clobbered_v_may_defs, 0);
1812 /* Now fill the clobbered cache with the values that have been found. */
1813 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1814 VEC_safe_push (tree, heap, clobbered_vuses,
1815 VEC_index (tree, build_vuses, u));
1817 gcc_assert (VEC_length (tree, build_vuses)
1818 == VEC_length (tree, clobbered_vuses));
1820 for (u = 0; u < VEC_length (tree, build_v_may_defs); u++)
1821 VEC_safe_push (tree, heap, clobbered_v_may_defs,
1822 VEC_index (tree, build_v_may_defs, u));
1824 gcc_assert (VEC_length (tree, build_v_may_defs)
1825 == VEC_length (tree, clobbered_v_may_defs));
1827 ssa_call_clobbered_cache_valid = true;
1832 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1833 function. */
1835 static void
1836 add_call_read_ops (tree stmt)
1838 unsigned u;
1839 tree t;
1840 bitmap_iterator bi;
1841 stmt_ann_t s_ann = stmt_ann (stmt);
1842 struct stmt_ann_d empty_ann;
1844 /* if the function is not pure, it may reference memory. Add
1845 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1846 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1847 if (global_var)
1849 add_stmt_operand (&global_var, s_ann, opf_none);
1850 return;
1853 /* If cache is valid, copy the elements into the build vector. */
1854 if (ssa_ro_call_cache_valid)
1856 for (u = 0; u < VEC_length (tree, ro_call_vuses); u++)
1858 t = VEC_index (tree, ro_call_vuses, u);
1859 gcc_assert (TREE_CODE (t) != SSA_NAME);
1860 var_ann (t)->in_vuse_list = 1;
1861 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1863 return;
1866 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1868 /* Add a VUSE for each call-clobbered variable. */
1869 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1871 tree var = referenced_var (u);
1872 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
1875 /* Prepare empty cache vectors. */
1876 VEC_truncate (tree, ro_call_vuses, 0);
1878 /* Now fill the clobbered cache with the values that have been found. */
1879 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1880 VEC_safe_push (tree, heap, ro_call_vuses,
1881 VEC_index (tree, build_vuses, u));
1883 gcc_assert (VEC_length (tree, build_vuses)
1884 == VEC_length (tree, ro_call_vuses));
1886 ssa_ro_call_cache_valid = true;
1890 /* Scan the immediate_use list for VAR making sure its linked properly.
1891 return RTUE iof there is a problem. */
1893 bool
1894 verify_imm_links (FILE *f, tree var)
1896 use_operand_p ptr, prev, list;
1897 int count;
1899 gcc_assert (TREE_CODE (var) == SSA_NAME);
1901 list = &(SSA_NAME_IMM_USE_NODE (var));
1902 gcc_assert (list->use == NULL);
1904 if (list->prev == NULL)
1906 gcc_assert (list->next == NULL);
1907 return false;
1910 prev = list;
1911 count = 0;
1912 for (ptr = list->next; ptr != list; )
1914 if (prev != ptr->prev)
1915 goto error;
1917 if (ptr->use == NULL)
1918 goto error; /* 2 roots, or SAFE guard node. */
1919 else if (*(ptr->use) != var)
1920 goto error;
1922 prev = ptr;
1923 ptr = ptr->next;
1924 /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
1925 if (count++ > 50000000)
1926 goto error;
1929 /* Verify list in the other direction. */
1930 prev = list;
1931 for (ptr = list->prev; ptr != list; )
1933 if (prev != ptr->next)
1934 goto error;
1935 prev = ptr;
1936 ptr = ptr->prev;
1937 if (count-- < 0)
1938 goto error;
1941 if (count != 0)
1942 goto error;
1944 return false;
1946 error:
1947 if (ptr->stmt && stmt_modified_p (ptr->stmt))
1949 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
1950 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
1952 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1953 (void *)ptr->use);
1954 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1955 fprintf(f, "\n");
1956 return true;
1960 /* Dump all the immediate uses to FILE. */
1962 void
1963 dump_immediate_uses_for (FILE *file, tree var)
1965 imm_use_iterator iter;
1966 use_operand_p use_p;
1968 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1970 print_generic_expr (file, var, TDF_SLIM);
1971 fprintf (file, " : -->");
1972 if (has_zero_uses (var))
1973 fprintf (file, " no uses.\n");
1974 else
1975 if (has_single_use (var))
1976 fprintf (file, " single use.\n");
1977 else
1978 fprintf (file, "%d uses.\n", num_imm_uses (var));
1980 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1982 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1983 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
1984 else
1985 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
1987 fprintf(file, "\n");
1990 /* Dump all the immediate uses to FILE. */
1992 void
1993 dump_immediate_uses (FILE *file)
1995 tree var;
1996 unsigned int x;
1998 fprintf (file, "Immediate_uses: \n\n");
1999 for (x = 1; x < num_ssa_names; x++)
2001 var = ssa_name(x);
2002 if (!var)
2003 continue;
2004 dump_immediate_uses_for (file, var);
2009 /* Dump def-use edges on stderr. */
2011 void
2012 debug_immediate_uses (void)
2014 dump_immediate_uses (stderr);
2017 /* Dump def-use edges on stderr. */
2019 void
2020 debug_immediate_uses_for (tree var)
2022 dump_immediate_uses_for (stderr, var);
2024 #include "gt-tree-ssa-operands.h"