Merge from gomp branch:
[official-gcc.git] / gcc / tree-ssa-operands.c
blob87a1fc6eb1de7d758ac6152a03b327f13abb2c8a
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
85 #define opf_none 0
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
107 /* Array for building all the def operands. */
108 static VEC(tree,heap) *build_defs;
110 /* Array for building all the use operands. */
111 static VEC(tree,heap) *build_uses;
113 /* Array for building all the v_may_def operands. */
114 static VEC(tree,heap) *build_v_may_defs;
116 /* Array for building all the vuse operands. */
117 static VEC(tree,heap) *build_vuses;
119 /* Array for building all the v_must_def operands. */
120 static VEC(tree,heap) *build_v_must_defs;
122 /* True if the operands for call clobbered vars are cached and valid. */
123 bool ssa_call_clobbered_cache_valid;
124 bool ssa_ro_call_cache_valid;
126 /* These arrays are the cached operand vectors for call clobbered calls. */
127 static VEC(tree,heap) *clobbered_v_may_defs;
128 static VEC(tree,heap) *clobbered_vuses;
129 static VEC(tree,heap) *ro_call_vuses;
130 static bool ops_active = false;
132 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
133 static unsigned operand_memory_index;
135 static void get_expr_operands (tree, tree *, int);
136 static void get_asm_expr_operands (tree);
137 static void get_indirect_ref_operands (tree, tree, int);
138 static void get_tmr_operands (tree, tree, int);
139 static void get_call_expr_operands (tree, tree);
140 static inline void append_def (tree *);
141 static inline void append_use (tree *);
142 static void append_v_may_def (tree);
143 static void append_v_must_def (tree);
144 static void add_call_clobber_ops (tree, tree);
145 static void add_call_read_ops (tree);
146 static void add_stmt_operand (tree *, stmt_ann_t, int);
147 static void build_ssa_operands (tree stmt);
149 static def_optype_p free_defs = NULL;
150 static use_optype_p free_uses = NULL;
151 static vuse_optype_p free_vuses = NULL;
152 static maydef_optype_p free_maydefs = NULL;
153 static mustdef_optype_p free_mustdefs = NULL;
156 /* Return the DECL_UID of the base variable of T. */
158 static inline unsigned
159 get_name_decl (tree t)
161 if (TREE_CODE (t) != SSA_NAME)
162 return DECL_UID (t);
163 else
164 return DECL_UID (SSA_NAME_VAR (t));
167 /* Comparison function for qsort used in operand_build_sort_virtual. */
169 static int
170 operand_build_cmp (const void *p, const void *q)
172 tree e1 = *((const tree *)p);
173 tree e2 = *((const tree *)q);
174 unsigned int u1,u2;
176 u1 = get_name_decl (e1);
177 u2 = get_name_decl (e2);
179 /* We want to sort in ascending order. They can never be equal. */
180 #ifdef ENABLE_CHECKING
181 gcc_assert (u1 != u2);
182 #endif
183 return (u1 > u2 ? 1 : -1);
186 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
188 static inline void
189 operand_build_sort_virtual (VEC(tree,heap) *list)
191 int num = VEC_length (tree, list);
192 if (num < 2)
193 return;
194 if (num == 2)
196 if (get_name_decl (VEC_index (tree, list, 0))
197 > get_name_decl (VEC_index (tree, list, 1)))
199 /* Swap elements if in the wrong order. */
200 tree tmp = VEC_index (tree, list, 0);
201 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
202 VEC_replace (tree, list, 1, tmp);
204 return;
206 /* There are 3 or more elements, call qsort. */
207 qsort (VEC_address (tree, list),
208 VEC_length (tree, list),
209 sizeof (tree),
210 operand_build_cmp);
215 /* Return true if the ssa operands cache is active. */
217 bool
218 ssa_operands_active (void)
220 return ops_active;
224 /* Initialize the operand cache routines. */
226 void
227 init_ssa_operands (void)
229 build_defs = VEC_alloc (tree, heap, 5);
230 build_uses = VEC_alloc (tree, heap, 10);
231 build_vuses = VEC_alloc (tree, heap, 25);
232 build_v_may_defs = VEC_alloc (tree, heap, 25);
233 build_v_must_defs = VEC_alloc (tree, heap, 25);
235 gcc_assert (operand_memory == NULL);
236 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
237 ops_active = true;
241 /* Dispose of anything required by the operand routines. */
243 void
244 fini_ssa_operands (void)
246 struct ssa_operand_memory_d *ptr;
247 VEC_free (tree, heap, build_defs);
248 VEC_free (tree, heap, build_uses);
249 VEC_free (tree, heap, build_v_must_defs);
250 VEC_free (tree, heap, build_v_may_defs);
251 VEC_free (tree, heap, build_vuses);
252 free_defs = NULL;
253 free_uses = NULL;
254 free_vuses = NULL;
255 free_maydefs = NULL;
256 free_mustdefs = NULL;
257 while ((ptr = operand_memory) != NULL)
259 operand_memory = operand_memory->next;
260 ggc_free (ptr);
263 VEC_free (tree, heap, clobbered_v_may_defs);
264 VEC_free (tree, heap, clobbered_vuses);
265 VEC_free (tree, heap, ro_call_vuses);
266 ops_active = false;
270 /* Return memory for operands of SIZE chunks. */
272 static inline void *
273 ssa_operand_alloc (unsigned size)
275 char *ptr;
276 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
278 struct ssa_operand_memory_d *ptr;
279 ptr = GGC_NEW (struct ssa_operand_memory_d);
280 ptr->next = operand_memory;
281 operand_memory = ptr;
282 operand_memory_index = 0;
284 ptr = &(operand_memory->mem[operand_memory_index]);
285 operand_memory_index += size;
286 return ptr;
290 /* Make sure PTR is in the correct immediate use list. Since uses are simply
291 pointers into the stmt TREE, there is no way of telling if anyone has
292 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
293 The contents are different, but the pointer is still the same. This
294 routine will check to make sure PTR is in the correct list, and if it isn't
295 put it in the correct list. We cannot simply check the previous node
296 because all nodes in the same stmt might have be changed. */
298 static inline void
299 correct_use_link (use_operand_p ptr, tree stmt)
301 use_operand_p prev;
302 tree root;
304 /* Fold_stmt () may have changed the stmt pointers. */
305 if (ptr->stmt != stmt)
306 ptr->stmt = stmt;
308 prev = ptr->prev;
309 if (prev)
311 /* Find the root element, making sure we skip any safe iterators. */
312 while (prev->use != NULL || prev->stmt == NULL)
313 prev = prev->prev;
315 /* Get the ssa_name of the list the node is in. */
316 root = prev->stmt;
317 /* If it's the right list, simply return. */
318 if (root == *(ptr->use))
319 return;
321 /* Its in the wrong list if we reach here. */
322 delink_imm_use (ptr);
323 link_imm_use (ptr, *(ptr->use));
327 /* This routine makes sure that PTR is in an immediate use list, and makes
328 sure the stmt pointer is set to the current stmt. Virtual uses do not need
329 the overhead of correct_use_link since they cannot be directly manipulated
330 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
331 static inline void
332 set_virtual_use_link (use_operand_p ptr, tree stmt)
334 /* Fold_stmt () may have changed the stmt pointers. */
335 if (ptr->stmt != stmt)
336 ptr->stmt = stmt;
338 /* If this use isn't in a list, add it to the correct list. */
339 if (!ptr->prev)
340 link_imm_use (ptr, *(ptr->use));
345 #define FINALIZE_OPBUILD build_defs
346 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
347 build_defs, (I))
348 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
349 build_defs, (I))
350 #define FINALIZE_FUNC finalize_ssa_def_ops
351 #define FINALIZE_ALLOC alloc_def
352 #define FINALIZE_FREE free_defs
353 #define FINALIZE_TYPE struct def_optype_d
354 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
355 #define FINALIZE_OPS DEF_OPS
356 #define FINALIZE_BASE(VAR) VAR
357 #define FINALIZE_BASE_TYPE tree *
358 #define FINALIZE_BASE_ZERO NULL
359 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
360 #include "tree-ssa-opfinalize.h"
363 /* This routine will create stmt operands for STMT from the def build list. */
365 static void
366 finalize_ssa_defs (tree stmt)
368 unsigned int num = VEC_length (tree, build_defs);
369 /* There should only be a single real definition per assignment. */
370 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
372 /* If there is an old list, often the new list is identical, or close, so
373 find the elements at the beginning that are the same as the vector. */
375 finalize_ssa_def_ops (stmt);
376 VEC_truncate (tree, build_defs, 0);
379 #define FINALIZE_OPBUILD build_uses
380 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
381 build_uses, (I))
382 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
383 build_uses, (I))
384 #define FINALIZE_FUNC finalize_ssa_use_ops
385 #define FINALIZE_ALLOC alloc_use
386 #define FINALIZE_FREE free_uses
387 #define FINALIZE_TYPE struct use_optype_d
388 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
389 #define FINALIZE_OPS USE_OPS
390 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
391 #define FINALIZE_CORRECT_USE correct_use_link
392 #define FINALIZE_BASE(VAR) VAR
393 #define FINALIZE_BASE_TYPE tree *
394 #define FINALIZE_BASE_ZERO NULL
395 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
396 (PTR)->use_ptr.use = (VAL); \
397 link_imm_use_stmt (&((PTR)->use_ptr), \
398 *(VAL), (STMT))
399 #include "tree-ssa-opfinalize.h"
401 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
403 static void
404 finalize_ssa_uses (tree stmt)
406 #ifdef ENABLE_CHECKING
408 unsigned x;
409 unsigned num = VEC_length (tree, build_uses);
411 /* If the pointer to the operand is the statement itself, something is
412 wrong. It means that we are pointing to a local variable (the
413 initial call to get_stmt_operands does not pass a pointer to a
414 statement). */
415 for (x = 0; x < num; x++)
416 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
418 #endif
419 finalize_ssa_use_ops (stmt);
420 VEC_truncate (tree, build_uses, 0);
424 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
425 #define FINALIZE_OPBUILD build_v_may_defs
426 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
427 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
428 build_v_may_defs, (I)))
429 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
430 #define FINALIZE_ALLOC alloc_maydef
431 #define FINALIZE_FREE free_maydefs
432 #define FINALIZE_TYPE struct maydef_optype_d
433 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
434 #define FINALIZE_OPS MAYDEF_OPS
435 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
436 #define FINALIZE_CORRECT_USE set_virtual_use_link
437 #define FINALIZE_BASE_ZERO 0
438 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
439 #define FINALIZE_BASE_TYPE unsigned
440 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
441 (PTR)->def_var = (VAL); \
442 (PTR)->use_var = (VAL); \
443 (PTR)->use_ptr.use = &((PTR)->use_var); \
444 link_imm_use_stmt (&((PTR)->use_ptr), \
445 (VAL), (STMT))
446 #include "tree-ssa-opfinalize.h"
449 static void
450 finalize_ssa_v_may_defs (tree stmt)
452 finalize_ssa_v_may_def_ops (stmt);
456 /* Clear the in_list bits and empty the build array for v_may_defs. */
458 static inline void
459 cleanup_v_may_defs (void)
461 unsigned x, num;
462 num = VEC_length (tree, build_v_may_defs);
464 for (x = 0; x < num; x++)
466 tree t = VEC_index (tree, build_v_may_defs, x);
467 if (TREE_CODE (t) != SSA_NAME)
469 var_ann_t ann = var_ann (t);
470 ann->in_v_may_def_list = 0;
473 VEC_truncate (tree, build_v_may_defs, 0);
477 #define FINALIZE_OPBUILD build_vuses
478 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
479 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
480 build_vuses, (I)))
481 #define FINALIZE_FUNC finalize_ssa_vuse_ops
482 #define FINALIZE_ALLOC alloc_vuse
483 #define FINALIZE_FREE free_vuses
484 #define FINALIZE_TYPE struct vuse_optype_d
485 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
486 #define FINALIZE_OPS VUSE_OPS
487 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
488 #define FINALIZE_CORRECT_USE set_virtual_use_link
489 #define FINALIZE_BASE_ZERO 0
490 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
491 #define FINALIZE_BASE_TYPE unsigned
492 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
493 (PTR)->use_var = (VAL); \
494 (PTR)->use_ptr.use = &((PTR)->use_var); \
495 link_imm_use_stmt (&((PTR)->use_ptr), \
496 (VAL), (STMT))
497 #include "tree-ssa-opfinalize.h"
500 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
502 static void
503 finalize_ssa_vuses (tree stmt)
505 unsigned num, num_v_may_defs;
506 unsigned vuse_index;
508 /* Remove superfluous VUSE operands. If the statement already has a
509 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
510 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
511 suppose that variable 'a' is aliased:
513 # VUSE <a_2>
514 # a_3 = V_MAY_DEF <a_2>
515 a = a + 1;
517 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
518 operation. */
520 num = VEC_length (tree, build_vuses);
521 num_v_may_defs = VEC_length (tree, build_v_may_defs);
523 if (num > 0 && num_v_may_defs > 0)
525 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
527 tree vuse;
528 vuse = VEC_index (tree, build_vuses, vuse_index);
529 if (TREE_CODE (vuse) != SSA_NAME)
531 var_ann_t ann = var_ann (vuse);
532 ann->in_vuse_list = 0;
533 if (ann->in_v_may_def_list)
535 VEC_ordered_remove (tree, build_vuses, vuse_index);
536 continue;
539 vuse_index++;
542 else
543 /* Clear out the in_list bits. */
544 for (vuse_index = 0;
545 vuse_index < VEC_length (tree, build_vuses);
546 vuse_index++)
548 tree t = VEC_index (tree, build_vuses, vuse_index);
549 if (TREE_CODE (t) != SSA_NAME)
551 var_ann_t ann = var_ann (t);
552 ann->in_vuse_list = 0;
556 finalize_ssa_vuse_ops (stmt);
557 /* The v_may_def build vector wasn't cleaned up because we needed it. */
558 cleanup_v_may_defs ();
560 /* Free the vuses build vector. */
561 VEC_truncate (tree, build_vuses, 0);
565 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
567 #define FINALIZE_OPBUILD build_v_must_defs
568 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
569 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
570 build_v_must_defs, (I)))
571 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
572 #define FINALIZE_ALLOC alloc_mustdef
573 #define FINALIZE_FREE free_mustdefs
574 #define FINALIZE_TYPE struct mustdef_optype_d
575 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
576 #define FINALIZE_OPS MUSTDEF_OPS
577 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
578 #define FINALIZE_CORRECT_USE set_virtual_use_link
579 #define FINALIZE_BASE_ZERO 0
580 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
581 #define FINALIZE_BASE_TYPE unsigned
582 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
583 (PTR)->def_var = (VAL); \
584 (PTR)->kill_var = (VAL); \
585 (PTR)->use_ptr.use = &((PTR)->kill_var);\
586 link_imm_use_stmt (&((PTR)->use_ptr), \
587 (VAL), (STMT))
588 #include "tree-ssa-opfinalize.h"
591 static void
592 finalize_ssa_v_must_defs (tree stmt)
594 /* In the presence of subvars, there may be more than one V_MUST_DEF per
595 statement (one for each subvar). It is a bit expensive to verify that
596 all must-defs in a statement belong to subvars if there is more than one
597 MUST-def, so we don't do it. Suffice to say, if you reach here without
598 having subvars, and have num >1, you have hit a bug. */
600 finalize_ssa_v_must_def_ops (stmt);
601 VEC_truncate (tree, build_v_must_defs, 0);
605 /* Finalize all the build vectors, fill the new ones into INFO. */
607 static inline void
608 finalize_ssa_stmt_operands (tree stmt)
610 finalize_ssa_defs (stmt);
611 finalize_ssa_uses (stmt);
612 finalize_ssa_v_must_defs (stmt);
613 finalize_ssa_v_may_defs (stmt);
614 finalize_ssa_vuses (stmt);
618 /* Start the process of building up operands vectors in INFO. */
620 static inline void
621 start_ssa_stmt_operands (void)
623 gcc_assert (VEC_length (tree, build_defs) == 0);
624 gcc_assert (VEC_length (tree, build_uses) == 0);
625 gcc_assert (VEC_length (tree, build_vuses) == 0);
626 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
627 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
631 /* Add DEF_P to the list of pointers to operands. */
633 static inline void
634 append_def (tree *def_p)
636 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
640 /* Add USE_P to the list of pointers to operands. */
642 static inline void
643 append_use (tree *use_p)
645 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
649 /* Add a new virtual may def for variable VAR to the build array. */
651 static inline void
652 append_v_may_def (tree var)
654 if (TREE_CODE (var) != SSA_NAME)
656 var_ann_t ann = get_var_ann (var);
658 /* Don't allow duplicate entries. */
659 if (ann->in_v_may_def_list)
660 return;
661 ann->in_v_may_def_list = 1;
664 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
668 /* Add VAR to the list of virtual uses. */
670 static inline void
671 append_vuse (tree var)
674 /* Don't allow duplicate entries. */
675 if (TREE_CODE (var) != SSA_NAME)
677 var_ann_t ann = get_var_ann (var);
679 if (ann->in_vuse_list || ann->in_v_may_def_list)
680 return;
681 ann->in_vuse_list = 1;
684 VEC_safe_push (tree, heap, build_vuses, (tree)var);
688 /* Add VAR to the list of virtual must definitions for INFO. */
690 static inline void
691 append_v_must_def (tree var)
693 unsigned i;
695 /* Don't allow duplicate entries. */
696 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
697 if (var == VEC_index (tree, build_v_must_defs, i))
698 return;
700 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
704 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
705 cache for STMT, if it existed before. When finished, the various build_*
706 operand vectors will have potential operands. in them. */
708 static void
709 parse_ssa_operands (tree stmt)
711 enum tree_code code;
713 code = TREE_CODE (stmt);
714 switch (code)
716 case MODIFY_EXPR:
717 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
718 either only part of LHS is modified or if the RHS might throw,
719 otherwise, use V_MUST_DEF.
721 ??? If it might throw, we should represent somehow that it is killed
722 on the fallthrough path. */
724 tree lhs = TREE_OPERAND (stmt, 0);
725 int lhs_flags = opf_is_def;
727 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
729 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
730 or not the entire LHS is modified; that depends on what's
731 inside the VIEW_CONVERT_EXPR. */
732 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
733 lhs = TREE_OPERAND (lhs, 0);
735 if (TREE_CODE (lhs) != ARRAY_RANGE_REF
736 && TREE_CODE (lhs) != BIT_FIELD_REF)
737 lhs_flags |= opf_kill_def;
739 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
741 break;
743 case COND_EXPR:
744 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
745 break;
747 case SWITCH_EXPR:
748 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
749 break;
751 case ASM_EXPR:
752 get_asm_expr_operands (stmt);
753 break;
755 case RETURN_EXPR:
756 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
757 break;
759 case GOTO_EXPR:
760 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
761 break;
763 case LABEL_EXPR:
764 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
765 break;
767 /* These nodes contain no variable references. */
768 case BIND_EXPR:
769 case CASE_LABEL_EXPR:
770 case TRY_CATCH_EXPR:
771 case TRY_FINALLY_EXPR:
772 case EH_FILTER_EXPR:
773 case CATCH_EXPR:
774 case RESX_EXPR:
775 break;
777 default:
778 /* Notice that if get_expr_operands tries to use &STMT as the operand
779 pointer (which may only happen for USE operands), we will fail in
780 append_use. This default will handle statements like empty
781 statements, or CALL_EXPRs that may appear on the RHS of a statement
782 or as statements themselves. */
783 get_expr_operands (stmt, &stmt, opf_none);
784 break;
788 /* Create an operands cache for STMT. */
790 static void
791 build_ssa_operands (tree stmt)
793 stmt_ann_t ann = get_stmt_ann (stmt);
795 /* Initially assume that the statement has no volatile operands. */
796 if (ann)
797 ann->has_volatile_ops = false;
799 start_ssa_stmt_operands ();
801 parse_ssa_operands (stmt);
802 operand_build_sort_virtual (build_vuses);
803 operand_build_sort_virtual (build_v_may_defs);
804 operand_build_sort_virtual (build_v_must_defs);
806 finalize_ssa_stmt_operands (stmt);
810 /* Free any operands vectors in OPS. */
811 void
812 free_ssa_operands (stmt_operands_p ops)
814 ops->def_ops = NULL;
815 ops->use_ops = NULL;
816 ops->maydef_ops = NULL;
817 ops->mustdef_ops = NULL;
818 ops->vuse_ops = NULL;
822 /* Get the operands of statement STMT. Note that repeated calls to
823 get_stmt_operands for the same statement will do nothing until the
824 statement is marked modified by a call to mark_stmt_modified(). */
826 void
827 update_stmt_operands (tree stmt)
829 stmt_ann_t ann = get_stmt_ann (stmt);
830 /* If get_stmt_operands is called before SSA is initialized, dont
831 do anything. */
832 if (!ssa_operands_active ())
833 return;
834 /* The optimizers cannot handle statements that are nothing but a
835 _DECL. This indicates a bug in the gimplifier. */
836 gcc_assert (!SSA_VAR_P (stmt));
838 gcc_assert (ann->modified);
840 timevar_push (TV_TREE_OPS);
842 build_ssa_operands (stmt);
844 /* Clear the modified bit for STMT. Subsequent calls to
845 get_stmt_operands for this statement will do nothing until the
846 statement is marked modified by a call to mark_stmt_modified(). */
847 ann->modified = 0;
849 timevar_pop (TV_TREE_OPS);
853 /* Copies virtual operands from SRC to DST. */
855 void
856 copy_virtual_operands (tree dest, tree src)
858 tree t;
859 ssa_op_iter iter, old_iter;
860 use_operand_p use_p, u2;
861 def_operand_p def_p, d2;
863 build_ssa_operands (dest);
865 /* Copy all the virtual fields. */
866 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
867 append_vuse (t);
868 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
869 append_v_may_def (t);
870 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
871 append_v_must_def (t);
873 if (VEC_length (tree, build_vuses) == 0
874 && VEC_length (tree, build_v_may_defs) == 0
875 && VEC_length (tree, build_v_must_defs) == 0)
876 return;
878 /* Now commit the virtual operands to this stmt. */
879 finalize_ssa_v_must_defs (dest);
880 finalize_ssa_v_may_defs (dest);
881 finalize_ssa_vuses (dest);
883 /* Finally, set the field to the same values as then originals. */
886 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
887 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
889 gcc_assert (!op_iter_done (&old_iter));
890 SET_USE (use_p, t);
891 t = op_iter_next_tree (&old_iter);
893 gcc_assert (op_iter_done (&old_iter));
895 op_iter_init_maydef (&old_iter, src, &u2, &d2);
896 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
898 gcc_assert (!op_iter_done (&old_iter));
899 SET_USE (use_p, USE_FROM_PTR (u2));
900 SET_DEF (def_p, DEF_FROM_PTR (d2));
901 op_iter_next_maymustdef (&u2, &d2, &old_iter);
903 gcc_assert (op_iter_done (&old_iter));
905 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
906 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
908 gcc_assert (!op_iter_done (&old_iter));
909 SET_USE (use_p, USE_FROM_PTR (u2));
910 SET_DEF (def_p, DEF_FROM_PTR (d2));
911 op_iter_next_maymustdef (&u2, &d2, &old_iter);
913 gcc_assert (op_iter_done (&old_iter));
918 /* Specifically for use in DOM's expression analysis. Given a store, we
919 create an artificial stmt which looks like a load from the store, this can
920 be used to eliminate redundant loads. OLD_OPS are the operands from the
921 store stmt, and NEW_STMT is the new load which represents a load of the
922 values stored. */
924 void
925 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
927 stmt_ann_t ann;
928 tree op;
929 ssa_op_iter iter;
930 use_operand_p use_p;
931 unsigned x;
933 ann = get_stmt_ann (new_stmt);
935 /* process the stmt looking for operands. */
936 start_ssa_stmt_operands ();
937 parse_ssa_operands (new_stmt);
939 for (x = 0; x < VEC_length (tree, build_vuses); x++)
941 tree t = VEC_index (tree, build_vuses, x);
942 if (TREE_CODE (t) != SSA_NAME)
944 var_ann_t ann = var_ann (t);
945 ann->in_vuse_list = 0;
949 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
951 tree t = VEC_index (tree, build_v_may_defs, x);
952 if (TREE_CODE (t) != SSA_NAME)
954 var_ann_t ann = var_ann (t);
955 ann->in_v_may_def_list = 0;
958 /* Remove any virtual operands that were found. */
959 VEC_truncate (tree, build_v_may_defs, 0);
960 VEC_truncate (tree, build_v_must_defs, 0);
961 VEC_truncate (tree, build_vuses, 0);
963 /* For each VDEF on the original statement, we want to create a
964 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
965 statement. */
966 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
967 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
968 append_vuse (op);
970 /* Now build the operands for this new stmt. */
971 finalize_ssa_stmt_operands (new_stmt);
973 /* All uses in this fake stmt must not be in the immediate use lists. */
974 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
975 delink_imm_use (use_p);
978 void
979 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
981 tree op0, op1;
982 op0 = *exp0;
983 op1 = *exp1;
985 /* If the operand cache is active, attempt to preserve the relative positions
986 of these two operands in their respective immediate use lists. */
987 if (ssa_operands_active () && op0 != op1)
989 use_optype_p use0, use1, ptr;
990 use0 = use1 = NULL;
991 /* Find the 2 operands in the cache, if they are there. */
992 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
993 if (USE_OP_PTR (ptr)->use == exp0)
995 use0 = ptr;
996 break;
998 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
999 if (USE_OP_PTR (ptr)->use == exp1)
1001 use1 = ptr;
1002 break;
1004 /* If both uses don't have operand entries, there isn't much we can do
1005 at this point. Presumably we dont need to worry about it. */
1006 if (use0 && use1)
1008 tree *tmp = USE_OP_PTR (use1)->use;
1009 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1010 USE_OP_PTR (use0)->use = tmp;
1014 /* Now swap the data. */
1015 *exp0 = op1;
1016 *exp1 = op0;
1019 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1020 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1021 the operands found. */
1023 static void
1024 get_expr_operands (tree stmt, tree *expr_p, int flags)
1026 enum tree_code code;
1027 enum tree_code_class class;
1028 tree expr = *expr_p;
1029 stmt_ann_t s_ann = stmt_ann (stmt);
1031 if (expr == NULL)
1032 return;
1034 code = TREE_CODE (expr);
1035 class = TREE_CODE_CLASS (code);
1037 switch (code)
1039 case ADDR_EXPR:
1040 /* Taking the address of a variable does not represent a
1041 reference to it, but the fact that the stmt takes its address will be
1042 of interest to some passes (e.g. alias resolution). */
1043 add_to_addressable_set (TREE_OPERAND (expr, 0),
1044 &s_ann->addresses_taken);
1046 /* If the address is invariant, there may be no interesting variable
1047 references inside. */
1048 if (is_gimple_min_invariant (expr))
1049 return;
1051 /* There should be no VUSEs created, since the referenced objects are
1052 not really accessed. The only operands that we should find here
1053 are ARRAY_REF indices which will always be real operands (GIMPLE
1054 does not allow non-registers as array indices). */
1055 flags |= opf_no_vops;
1057 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1058 return;
1060 case SSA_NAME:
1061 case STRUCT_FIELD_TAG:
1062 case TYPE_MEMORY_TAG:
1063 case NAME_MEMORY_TAG:
1065 add_stmt_operand (expr_p, s_ann, flags);
1066 return;
1068 case VAR_DECL:
1069 case PARM_DECL:
1070 case RESULT_DECL:
1072 subvar_t svars;
1074 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1075 Otherwise, add the variable itself.
1076 Whether it goes to USES or DEFS depends on the operand flags. */
1077 if (var_can_have_subvars (expr)
1078 && (svars = get_subvars_for_var (expr)))
1080 subvar_t sv;
1081 for (sv = svars; sv; sv = sv->next)
1082 add_stmt_operand (&sv->var, s_ann, flags);
1084 else
1086 add_stmt_operand (expr_p, s_ann, flags);
1088 return;
1090 case MISALIGNED_INDIRECT_REF:
1091 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1092 /* fall through */
1094 case ALIGN_INDIRECT_REF:
1095 case INDIRECT_REF:
1096 get_indirect_ref_operands (stmt, expr, flags);
1097 return;
1099 case TARGET_MEM_REF:
1100 get_tmr_operands (stmt, expr, flags);
1101 return;
1103 case ARRAY_RANGE_REF:
1104 /* Treat array references as references to the virtual variable
1105 representing the array. The virtual variable for an ARRAY_REF
1106 is the VAR_DECL for the array. */
1108 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1109 according to the value of IS_DEF. */
1110 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1111 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1112 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1113 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1114 return;
1116 case ARRAY_REF:
1117 case COMPONENT_REF:
1118 case REALPART_EXPR:
1119 case IMAGPART_EXPR:
1121 tree ref;
1122 HOST_WIDE_INT offset, size, maxsize;
1123 bool none = true;
1124 /* This component ref becomes an access to all of the subvariables
1125 it can touch, if we can determine that, but *NOT* the real one.
1126 If we can't determine which fields we could touch, the recursion
1127 will eventually get to a variable and add *all* of its subvars, or
1128 whatever is the minimum correct subset. */
1130 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1131 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1133 subvar_t svars = get_subvars_for_var (ref);
1134 subvar_t sv;
1135 for (sv = svars; sv; sv = sv->next)
1137 bool exact;
1138 if (overlap_subvar (offset, maxsize, sv, &exact))
1140 int subvar_flags = flags;
1141 none = false;
1142 if (!exact
1143 || size != maxsize)
1144 subvar_flags &= ~opf_kill_def;
1145 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1148 if (!none)
1149 flags |= opf_no_vops;
1152 /* Even if we found subvars above we need to ensure to see
1153 immediate uses for d in s.a[d]. In case of s.a having
1154 a subvar we'd miss it otherwise. */
1155 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1156 flags & ~opf_kill_def);
1158 if (code == COMPONENT_REF)
1160 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1161 s_ann->has_volatile_ops = true;
1162 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1164 else if (code == ARRAY_REF)
1166 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1167 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1168 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1170 return;
1172 case WITH_SIZE_EXPR:
1173 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1174 and an rvalue reference to its second argument. */
1175 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1176 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1177 return;
1179 case CALL_EXPR:
1180 get_call_expr_operands (stmt, expr);
1181 return;
1183 case COND_EXPR:
1184 case VEC_COND_EXPR:
1185 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1186 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1187 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1188 return;
1190 case MODIFY_EXPR:
1192 int subflags;
1193 tree op;
1195 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1197 op = TREE_OPERAND (expr, 0);
1198 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1199 op = TREE_OPERAND (expr, 0);
1200 if (TREE_CODE (op) == ARRAY_RANGE_REF
1201 || TREE_CODE (op) == REALPART_EXPR
1202 || TREE_CODE (op) == IMAGPART_EXPR)
1203 subflags = opf_is_def;
1204 else
1205 subflags = opf_is_def | opf_kill_def;
1207 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1208 return;
1211 case CONSTRUCTOR:
1213 /* General aggregate CONSTRUCTORs have been decomposed, but they
1214 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1215 constructor_elt *ce;
1216 unsigned HOST_WIDE_INT idx;
1218 for (idx = 0;
1219 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1220 idx++)
1221 get_expr_operands (stmt, &ce->value, opf_none);
1223 return;
1226 case TRUTH_NOT_EXPR:
1227 case BIT_FIELD_REF:
1228 case VIEW_CONVERT_EXPR:
1229 do_unary:
1230 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1231 return;
1233 case TRUTH_AND_EXPR:
1234 case TRUTH_OR_EXPR:
1235 case TRUTH_XOR_EXPR:
1236 case COMPOUND_EXPR:
1237 case OBJ_TYPE_REF:
1238 case ASSERT_EXPR:
1239 do_binary:
1241 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1242 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1243 return;
1246 case REALIGN_LOAD_EXPR:
1248 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1249 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1250 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1251 return;
1254 case BLOCK:
1255 case FUNCTION_DECL:
1256 case EXC_PTR_EXPR:
1257 case FILTER_EXPR:
1258 case LABEL_DECL:
1259 case CONST_DECL:
1260 /* Expressions that make no memory references. */
1261 return;
1263 default:
1264 if (class == tcc_unary)
1265 goto do_unary;
1266 if (class == tcc_binary || class == tcc_comparison)
1267 goto do_binary;
1268 if (class == tcc_constant || class == tcc_type)
1269 return;
1272 /* If we get here, something has gone wrong. */
1273 #ifdef ENABLE_CHECKING
1274 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1275 debug_tree (expr);
1276 fputs ("\n", stderr);
1277 internal_error ("internal error");
1278 #endif
1279 gcc_unreachable ();
1283 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1285 static void
1286 get_asm_expr_operands (tree stmt)
1288 stmt_ann_t s_ann = stmt_ann (stmt);
1289 int noutputs = list_length (ASM_OUTPUTS (stmt));
1290 const char **oconstraints
1291 = (const char **) alloca ((noutputs) * sizeof (const char *));
1292 int i;
1293 tree link;
1294 const char *constraint;
1295 bool allows_mem, allows_reg, is_inout;
1297 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1299 oconstraints[i] = constraint
1300 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1301 parse_output_constraint (&constraint, i, 0, 0,
1302 &allows_mem, &allows_reg, &is_inout);
1304 /* This should have been split in gimplify_asm_expr. */
1305 gcc_assert (!allows_reg || !is_inout);
1307 /* Memory operands are addressable. Note that STMT needs the
1308 address of this operand. */
1309 if (!allows_reg && allows_mem)
1311 tree t = get_base_address (TREE_VALUE (link));
1312 if (t && DECL_P (t) && s_ann)
1313 add_to_addressable_set (t, &s_ann->addresses_taken);
1316 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1319 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1321 constraint
1322 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1323 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1324 oconstraints, &allows_mem, &allows_reg);
1326 /* Memory operands are addressable. Note that STMT needs the
1327 address of this operand. */
1328 if (!allows_reg && allows_mem)
1330 tree t = get_base_address (TREE_VALUE (link));
1331 if (t && DECL_P (t) && s_ann)
1332 add_to_addressable_set (t, &s_ann->addresses_taken);
1335 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1339 /* Clobber memory for asm ("" : : : "memory"); */
1340 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1341 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1343 unsigned i;
1344 bitmap_iterator bi;
1346 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1347 decided to group them). */
1348 if (global_var)
1349 add_stmt_operand (&global_var, s_ann, opf_is_def);
1350 else
1351 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1353 tree var = referenced_var (i);
1354 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1357 /* Now clobber all addressables. */
1358 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1360 tree var = referenced_var (i);
1362 /* Subvars are explicitly represented in this list, so
1363 we don't need the original to be added to the clobber
1364 ops, but the original *will* be in this list because
1365 we keep the addressability of the original
1366 variable up-to-date so we don't screw up the rest of
1367 the backend. */
1368 if (var_can_have_subvars (var)
1369 && get_subvars_for_var (var) != NULL)
1370 continue;
1372 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1375 break;
1379 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1380 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1382 static void
1383 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1385 tree *pptr = &TREE_OPERAND (expr, 0);
1386 tree ptr = *pptr;
1387 stmt_ann_t s_ann = stmt_ann (stmt);
1389 /* Stores into INDIRECT_REF operands are never killing definitions. */
1390 flags &= ~opf_kill_def;
1392 if (SSA_VAR_P (ptr))
1394 struct ptr_info_def *pi = NULL;
1396 /* If PTR has flow-sensitive points-to information, use it. */
1397 if (TREE_CODE (ptr) == SSA_NAME
1398 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1399 && pi->name_mem_tag)
1401 /* PTR has its own memory tag. Use it. */
1402 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1404 else
1406 /* If PTR is not an SSA_NAME or it doesn't have a name
1407 tag, use its type memory tag. */
1408 var_ann_t v_ann;
1410 /* If we are emitting debugging dumps, display a warning if
1411 PTR is an SSA_NAME with no flow-sensitive alias
1412 information. That means that we may need to compute
1413 aliasing again. */
1414 if (dump_file
1415 && TREE_CODE (ptr) == SSA_NAME
1416 && pi == NULL)
1418 fprintf (dump_file,
1419 "NOTE: no flow-sensitive alias info for ");
1420 print_generic_expr (dump_file, ptr, dump_flags);
1421 fprintf (dump_file, " in ");
1422 print_generic_stmt (dump_file, stmt, dump_flags);
1425 if (TREE_CODE (ptr) == SSA_NAME)
1426 ptr = SSA_NAME_VAR (ptr);
1427 v_ann = var_ann (ptr);
1428 if (v_ann->type_mem_tag)
1429 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1433 /* If a constant is used as a pointer, we can't generate a real
1434 operand for it but we mark the statement volatile to prevent
1435 optimizations from messing things up. */
1436 else if (TREE_CODE (ptr) == INTEGER_CST)
1438 if (s_ann)
1439 s_ann->has_volatile_ops = true;
1440 return;
1442 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1443 else
1444 gcc_unreachable ();
1446 /* Add a USE operand for the base pointer. */
1447 get_expr_operands (stmt, pptr, opf_none);
1450 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1452 static void
1453 get_tmr_operands (tree stmt, tree expr, int flags)
1455 tree tag = TMR_TAG (expr), ref;
1456 HOST_WIDE_INT offset, size, maxsize;
1457 subvar_t svars, sv;
1458 stmt_ann_t s_ann = stmt_ann (stmt);
1460 /* First record the real operands. */
1461 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1462 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1464 /* MEM_REFs should never be killing. */
1465 flags &= ~opf_kill_def;
1467 if (TMR_SYMBOL (expr))
1469 stmt_ann_t ann = stmt_ann (stmt);
1470 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1473 if (!tag)
1475 /* Something weird, so ensure that we will be careful. */
1476 stmt_ann (stmt)->has_volatile_ops = true;
1477 return;
1480 if (DECL_P (tag))
1482 get_expr_operands (stmt, &tag, flags);
1483 return;
1486 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1487 gcc_assert (ref != NULL_TREE);
1488 svars = get_subvars_for_var (ref);
1489 for (sv = svars; sv; sv = sv->next)
1491 bool exact;
1492 if (overlap_subvar (offset, maxsize, sv, &exact))
1494 int subvar_flags = flags;
1495 if (!exact || size != maxsize)
1496 subvar_flags &= ~opf_kill_def;
1497 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1502 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1504 static void
1505 get_call_expr_operands (tree stmt, tree expr)
1507 tree op;
1508 int call_flags = call_expr_flags (expr);
1510 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1511 operands for all the symbols that have been found to be
1512 call-clobbered.
1514 Note that if aliases have not been computed, the global effects
1515 of calls will not be included in the SSA web. This is fine
1516 because no optimizer should run before aliases have been
1517 computed. By not bothering with virtual operands for CALL_EXPRs
1518 we avoid adding superfluous virtual operands, which can be a
1519 significant compile time sink (See PR 15855). */
1520 if (aliases_computed_p
1521 && !bitmap_empty_p (call_clobbered_vars)
1522 && !(call_flags & ECF_NOVOPS))
1524 /* A 'pure' or a 'const' function never call-clobbers anything.
1525 A 'noreturn' function might, but since we don't return anyway
1526 there is no point in recording that. */
1527 if (TREE_SIDE_EFFECTS (expr)
1528 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1529 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1530 else if (!(call_flags & ECF_CONST))
1531 add_call_read_ops (stmt);
1534 /* Find uses in the called function. */
1535 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1537 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1538 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1540 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1545 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1546 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1547 the statement's real operands, otherwise it is added to virtual
1548 operands. */
1550 static void
1551 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1553 bool is_real_op;
1554 tree var, sym;
1555 var_ann_t v_ann;
1557 var = *var_p;
1558 gcc_assert (SSA_VAR_P (var));
1560 is_real_op = is_gimple_reg (var);
1561 /* If this is a real operand, the operand is either ssa name or decl.
1562 Virtual operands may only be decls. */
1563 gcc_assert (is_real_op || DECL_P (var));
1565 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1566 v_ann = var_ann (sym);
1568 /* Mark statements with volatile operands. Optimizers should back
1569 off from statements having volatile operands. */
1570 if (TREE_THIS_VOLATILE (sym) && s_ann)
1571 s_ann->has_volatile_ops = true;
1573 /* If the variable cannot be modified and this is a V_MAY_DEF change
1574 it into a VUSE. This happens when read-only variables are marked
1575 call-clobbered and/or aliased to writable variables. So we only
1576 check that this only happens on non-specific stores.
1578 Note that if this is a specific store, i.e. associated with a
1579 modify_expr, then we can't suppress the V_DEF, lest we run into
1580 validation problems.
1582 This can happen when programs cast away const, leaving us with a
1583 store to read-only memory. If the statement is actually executed
1584 at runtime, then the program is ill formed. If the statement is
1585 not executed then all is well. At the very least, we cannot ICE. */
1586 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1588 gcc_assert (!is_real_op);
1589 flags &= ~(opf_is_def | opf_kill_def);
1592 if (is_real_op)
1594 /* The variable is a GIMPLE register. Add it to real operands. */
1595 if (flags & opf_is_def)
1596 append_def (var_p);
1597 else
1598 append_use (var_p);
1600 else
1602 VEC(tree,gc) *aliases;
1604 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1605 virtual operands, unless the caller has specifically requested
1606 not to add virtual operands (used when adding operands inside an
1607 ADDR_EXPR expression). */
1608 if (flags & opf_no_vops)
1609 return;
1611 aliases = v_ann->may_aliases;
1613 if (aliases == NULL)
1615 /* The variable is not aliased or it is an alias tag. */
1616 if (flags & opf_is_def)
1618 if (flags & opf_kill_def)
1620 /* Only regular variables or struct fields may get a
1621 V_MUST_DEF operand. */
1622 gcc_assert (!MTAG_P (var)
1623 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1624 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1625 variable definitions. */
1626 append_v_must_def (var);
1628 else
1630 /* Add a V_MAY_DEF for call-clobbered variables and
1631 memory tags. */
1632 append_v_may_def (var);
1635 else
1636 append_vuse (var);
1638 else
1640 unsigned i;
1641 tree al;
1643 /* The variable is aliased. Add its aliases to the virtual
1644 operands. */
1645 gcc_assert (VEC_length (tree, aliases) != 0);
1647 if (flags & opf_is_def)
1649 /* If the variable is also an alias tag, add a virtual
1650 operand for it, otherwise we will miss representing
1651 references to the members of the variable's alias set.
1652 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1653 if (v_ann->is_alias_tag)
1654 append_v_may_def (var);
1656 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1657 append_v_may_def (al);
1659 else
1661 /* Similarly, append a virtual uses for VAR itself, when
1662 it is an alias tag. */
1663 if (v_ann->is_alias_tag)
1664 append_vuse (var);
1666 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1667 append_vuse (al);
1674 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1675 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1676 a single variable whose address has been taken or any other valid
1677 GIMPLE memory reference (structure reference, array, etc). If the
1678 base address of REF is a decl that has sub-variables, also add all
1679 of its sub-variables. */
1681 void
1682 add_to_addressable_set (tree ref, bitmap *addresses_taken)
1684 tree var;
1685 subvar_t svars;
1687 gcc_assert (addresses_taken);
1689 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1690 as the only thing we take the address of. If VAR is a structure,
1691 taking the address of a field means that the whole structure may
1692 be referenced using pointer arithmetic. See PR 21407 and the
1693 ensuing mailing list discussion. */
1694 var = get_base_address (ref);
1695 if (var && SSA_VAR_P (var))
1697 if (*addresses_taken == NULL)
1698 *addresses_taken = BITMAP_GGC_ALLOC ();
1700 if (var_can_have_subvars (var)
1701 && (svars = get_subvars_for_var (var)))
1703 subvar_t sv;
1704 for (sv = svars; sv; sv = sv->next)
1706 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1707 TREE_ADDRESSABLE (sv->var) = 1;
1710 else
1712 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1713 TREE_ADDRESSABLE (var) = 1;
1719 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1720 clobbered variables in the function. */
1722 static void
1723 add_call_clobber_ops (tree stmt, tree callee)
1725 unsigned u;
1726 tree t;
1727 bitmap_iterator bi;
1728 stmt_ann_t s_ann = stmt_ann (stmt);
1729 struct stmt_ann_d empty_ann;
1730 bitmap not_read_b, not_written_b;
1732 /* Functions that are not const, pure or never return may clobber
1733 call-clobbered variables. */
1734 if (s_ann)
1735 s_ann->makes_clobbering_call = true;
1737 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1738 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1739 if (global_var)
1741 add_stmt_operand (&global_var, s_ann, opf_is_def);
1742 return;
1745 /* FIXME - if we have better information from the static vars
1746 analysis, we need to make the cache call site specific. This way
1747 we can have the performance benefits even if we are doing good
1748 optimization. */
1750 /* Get info for local and module level statics. There is a bit
1751 set for each static if the call being processed does not read
1752 or write that variable. */
1754 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1755 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1757 /* If cache is valid, copy the elements into the build vectors. */
1758 if (ssa_call_clobbered_cache_valid
1759 && (!not_read_b || bitmap_empty_p (not_read_b))
1760 && (!not_written_b || bitmap_empty_p (not_written_b)))
1762 for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++)
1764 t = VEC_index (tree, clobbered_vuses, u);
1765 gcc_assert (TREE_CODE (t) != SSA_NAME);
1766 var_ann (t)->in_vuse_list = 1;
1767 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1769 for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++)
1771 t = VEC_index (tree, clobbered_v_may_defs, u);
1772 gcc_assert (TREE_CODE (t) != SSA_NAME);
1773 var_ann (t)->in_v_may_def_list = 1;
1774 VEC_safe_push (tree, heap, build_v_may_defs, (tree)t);
1776 return;
1779 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1781 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1782 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1784 tree var = referenced_var (u);
1785 unsigned int uid = u;
1787 if (unmodifiable_var_p (var))
1788 add_stmt_operand (&var, &empty_ann, opf_none);
1789 else
1791 bool not_read;
1792 bool not_written;
1794 /* Not read and not written are computed on regular vars, not
1795 subvars, so look at the parent var if this is an SFT. */
1797 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1798 uid = DECL_UID (SFT_PARENT_VAR (var));
1800 not_read =
1801 not_read_b ? bitmap_bit_p (not_read_b, uid) : false;
1802 not_written =
1803 not_written_b ? bitmap_bit_p (not_written_b, uid) : false;
1805 if (not_written)
1807 if (!not_read)
1808 add_stmt_operand (&var, &empty_ann, opf_none);
1810 else
1811 add_stmt_operand (&var, &empty_ann, opf_is_def);
1815 if ((!not_read_b || bitmap_empty_p (not_read_b))
1816 && (!not_written_b || bitmap_empty_p (not_written_b)))
1818 /* Prepare empty cache vectors. */
1819 VEC_truncate (tree, clobbered_vuses, 0);
1820 VEC_truncate (tree, clobbered_v_may_defs, 0);
1822 /* Now fill the clobbered cache with the values that have been found. */
1823 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1824 VEC_safe_push (tree, heap, clobbered_vuses,
1825 VEC_index (tree, build_vuses, u));
1827 gcc_assert (VEC_length (tree, build_vuses)
1828 == VEC_length (tree, clobbered_vuses));
1830 for (u = 0; u < VEC_length (tree, build_v_may_defs); u++)
1831 VEC_safe_push (tree, heap, clobbered_v_may_defs,
1832 VEC_index (tree, build_v_may_defs, u));
1834 gcc_assert (VEC_length (tree, build_v_may_defs)
1835 == VEC_length (tree, clobbered_v_may_defs));
1837 ssa_call_clobbered_cache_valid = true;
1842 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1843 function. */
1845 static void
1846 add_call_read_ops (tree stmt)
1848 unsigned u;
1849 tree t;
1850 bitmap_iterator bi;
1851 stmt_ann_t s_ann = stmt_ann (stmt);
1852 struct stmt_ann_d empty_ann;
1854 /* if the function is not pure, it may reference memory. Add
1855 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1856 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1857 if (global_var)
1859 add_stmt_operand (&global_var, s_ann, opf_none);
1860 return;
1863 /* If cache is valid, copy the elements into the build vector. */
1864 if (ssa_ro_call_cache_valid)
1866 for (u = 0; u < VEC_length (tree, ro_call_vuses); u++)
1868 t = VEC_index (tree, ro_call_vuses, u);
1869 gcc_assert (TREE_CODE (t) != SSA_NAME);
1870 var_ann (t)->in_vuse_list = 1;
1871 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1873 return;
1876 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1878 /* Add a VUSE for each call-clobbered variable. */
1879 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1881 tree var = referenced_var (u);
1882 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
1885 /* Prepare empty cache vectors. */
1886 VEC_truncate (tree, ro_call_vuses, 0);
1888 /* Now fill the clobbered cache with the values that have been found. */
1889 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1890 VEC_safe_push (tree, heap, ro_call_vuses,
1891 VEC_index (tree, build_vuses, u));
1893 gcc_assert (VEC_length (tree, build_vuses)
1894 == VEC_length (tree, ro_call_vuses));
1896 ssa_ro_call_cache_valid = true;
1900 /* Scan the immediate_use list for VAR making sure its linked properly.
1901 return RTUE iof there is a problem. */
1903 bool
1904 verify_imm_links (FILE *f, tree var)
1906 use_operand_p ptr, prev, list;
1907 int count;
1909 gcc_assert (TREE_CODE (var) == SSA_NAME);
1911 list = &(SSA_NAME_IMM_USE_NODE (var));
1912 gcc_assert (list->use == NULL);
1914 if (list->prev == NULL)
1916 gcc_assert (list->next == NULL);
1917 return false;
1920 prev = list;
1921 count = 0;
1922 for (ptr = list->next; ptr != list; )
1924 if (prev != ptr->prev)
1925 goto error;
1927 if (ptr->use == NULL)
1928 goto error; /* 2 roots, or SAFE guard node. */
1929 else if (*(ptr->use) != var)
1930 goto error;
1932 prev = ptr;
1933 ptr = ptr->next;
1934 /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
1935 if (count++ > 50000000)
1936 goto error;
1939 /* Verify list in the other direction. */
1940 prev = list;
1941 for (ptr = list->prev; ptr != list; )
1943 if (prev != ptr->next)
1944 goto error;
1945 prev = ptr;
1946 ptr = ptr->prev;
1947 if (count-- < 0)
1948 goto error;
1951 if (count != 0)
1952 goto error;
1954 return false;
1956 error:
1957 if (ptr->stmt && stmt_modified_p (ptr->stmt))
1959 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
1960 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
1962 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1963 (void *)ptr->use);
1964 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1965 fprintf(f, "\n");
1966 return true;
1970 /* Dump all the immediate uses to FILE. */
1972 void
1973 dump_immediate_uses_for (FILE *file, tree var)
1975 imm_use_iterator iter;
1976 use_operand_p use_p;
1978 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1980 print_generic_expr (file, var, TDF_SLIM);
1981 fprintf (file, " : -->");
1982 if (has_zero_uses (var))
1983 fprintf (file, " no uses.\n");
1984 else
1985 if (has_single_use (var))
1986 fprintf (file, " single use.\n");
1987 else
1988 fprintf (file, "%d uses.\n", num_imm_uses (var));
1990 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1992 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1993 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
1994 else
1995 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
1997 fprintf(file, "\n");
2000 /* Dump all the immediate uses to FILE. */
2002 void
2003 dump_immediate_uses (FILE *file)
2005 tree var;
2006 unsigned int x;
2008 fprintf (file, "Immediate_uses: \n\n");
2009 for (x = 1; x < num_ssa_names; x++)
2011 var = ssa_name(x);
2012 if (!var)
2013 continue;
2014 dump_immediate_uses_for (file, var);
2019 /* Dump def-use edges on stderr. */
2021 void
2022 debug_immediate_uses (void)
2024 dump_immediate_uses (stderr);
2027 /* Dump def-use edges on stderr. */
2029 void
2030 debug_immediate_uses_for (tree var)
2032 dump_immediate_uses_for (stderr, var);
2034 #include "gt-tree-ssa-operands.h"