* basic-block.h, config/i386/winnt.c, config/pa/pa.c,
[official-gcc.git] / gcc / tree-ssa-operands.c
blob0e0f3650ddcab4db94dad47640d301a6f9768096
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in helpers. */
84 /* By default, operands are loaded. */
85 #define opf_none 0
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
107 /* Array for building all the def operands. */
108 static VEC(tree,heap) *build_defs;
110 /* Array for building all the use operands. */
111 static VEC(tree,heap) *build_uses;
113 /* Array for building all the v_may_def operands. */
114 static VEC(tree,heap) *build_v_may_defs;
116 /* Array for building all the vuse operands. */
117 static VEC(tree,heap) *build_vuses;
119 /* Array for building all the v_must_def operands. */
120 static VEC(tree,heap) *build_v_must_defs;
122 /* True if the operands for call clobbered vars are cached and valid. */
123 bool ssa_call_clobbered_cache_valid;
124 bool ssa_ro_call_cache_valid;
126 /* These arrays are the cached operand vectors for call clobbered calls. */
127 static VEC(tree,heap) *clobbered_v_may_defs;
128 static VEC(tree,heap) *clobbered_vuses;
129 static VEC(tree,heap) *ro_call_vuses;
130 static bool clobbered_aliased_loads;
131 static bool clobbered_aliased_stores;
132 static bool ro_call_aliased_loads;
133 static bool ops_active = false;
135 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
136 static unsigned operand_memory_index;
138 static void get_expr_operands (tree, tree *, int);
139 static void get_asm_expr_operands (tree);
140 static void get_indirect_ref_operands (tree, tree, int);
141 static void get_tmr_operands (tree, tree, int);
142 static void get_call_expr_operands (tree, tree);
143 static inline void append_def (tree *);
144 static inline void append_use (tree *);
145 static void append_v_may_def (tree);
146 static void append_v_must_def (tree);
147 static void add_call_clobber_ops (tree, tree);
148 static void add_call_read_ops (tree);
149 static void add_stmt_operand (tree *, stmt_ann_t, int);
150 static void build_ssa_operands (tree stmt);
152 static def_optype_p free_defs = NULL;
153 static use_optype_p free_uses = NULL;
154 static vuse_optype_p free_vuses = NULL;
155 static maydef_optype_p free_maydefs = NULL;
156 static mustdef_optype_p free_mustdefs = NULL;
159 /* Return the DECL_UID of the base variable of T. */
161 static inline unsigned
162 get_name_decl (tree t)
164 if (TREE_CODE (t) != SSA_NAME)
165 return DECL_UID (t);
166 else
167 return DECL_UID (SSA_NAME_VAR (t));
170 /* Comparison function for qsort used in operand_build_sort_virtual. */
172 static int
173 operand_build_cmp (const void *p, const void *q)
175 tree e1 = *((const tree *)p);
176 tree e2 = *((const tree *)q);
177 unsigned int u1,u2;
179 u1 = get_name_decl (e1);
180 u2 = get_name_decl (e2);
182 /* We want to sort in ascending order. They can never be equal. */
183 #ifdef ENABLE_CHECKING
184 gcc_assert (u1 != u2);
185 #endif
186 return (u1 > u2 ? 1 : -1);
189 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
191 static inline void
192 operand_build_sort_virtual (VEC(tree,heap) *list)
194 int num = VEC_length (tree, list);
195 if (num < 2)
196 return;
197 if (num == 2)
199 if (get_name_decl (VEC_index (tree, list, 0))
200 > get_name_decl (VEC_index (tree, list, 1)))
202 /* Swap elements if in the wrong order. */
203 tree tmp = VEC_index (tree, list, 0);
204 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
205 VEC_replace (tree, list, 1, tmp);
207 return;
209 /* There are 3 or more elements, call qsort. */
210 qsort (VEC_address (tree, list),
211 VEC_length (tree, list),
212 sizeof (tree),
213 operand_build_cmp);
218 /* Return true if the ssa operands cache is active. */
220 bool
221 ssa_operands_active (void)
223 return ops_active;
227 /* Initialize the operand cache routines. */
229 void
230 init_ssa_operands (void)
232 build_defs = VEC_alloc (tree, heap, 5);
233 build_uses = VEC_alloc (tree, heap, 10);
234 build_vuses = VEC_alloc (tree, heap, 25);
235 build_v_may_defs = VEC_alloc (tree, heap, 25);
236 build_v_must_defs = VEC_alloc (tree, heap, 25);
238 gcc_assert (operand_memory == NULL);
239 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
240 ops_active = true;
244 /* Dispose of anything required by the operand routines. */
246 void
247 fini_ssa_operands (void)
249 struct ssa_operand_memory_d *ptr;
250 VEC_free (tree, heap, build_defs);
251 VEC_free (tree, heap, build_uses);
252 VEC_free (tree, heap, build_v_must_defs);
253 VEC_free (tree, heap, build_v_may_defs);
254 VEC_free (tree, heap, build_vuses);
255 free_defs = NULL;
256 free_uses = NULL;
257 free_vuses = NULL;
258 free_maydefs = NULL;
259 free_mustdefs = NULL;
260 while ((ptr = operand_memory) != NULL)
262 operand_memory = operand_memory->next;
263 ggc_free (ptr);
266 VEC_free (tree, heap, clobbered_v_may_defs);
267 VEC_free (tree, heap, clobbered_vuses);
268 VEC_free (tree, heap, ro_call_vuses);
269 ops_active = false;
273 /* Return memory for operands of SIZE chunks. */
275 static inline void *
276 ssa_operand_alloc (unsigned size)
278 char *ptr;
279 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
281 struct ssa_operand_memory_d *ptr;
282 ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d));
283 ptr->next = operand_memory;
284 operand_memory = ptr;
285 operand_memory_index = 0;
287 ptr = &(operand_memory->mem[operand_memory_index]);
288 operand_memory_index += size;
289 return ptr;
293 /* Make sure PTR is in the correct immediate use list. Since uses are simply
294 pointers into the stmt TREE, there is no way of telling if anyone has
295 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
296 The contents are different, but the pointer is still the same. This
297 routine will check to make sure PTR is in the correct list, and if it isn't
298 put it in the correct list. We cannot simply check the previous node
299 because all nodes in the same stmt might have be changed. */
301 static inline void
302 correct_use_link (use_operand_p ptr, tree stmt)
304 use_operand_p prev;
305 tree root;
307 /* Fold_stmt () may have changed the stmt pointers. */
308 if (ptr->stmt != stmt)
309 ptr->stmt = stmt;
311 prev = ptr->prev;
312 if (prev)
314 /* Find the root element, making sure we skip any safe iterators. */
315 while (prev->use != NULL || prev->stmt == NULL)
316 prev = prev->prev;
318 /* Get the ssa_name of the list the node is in. */
319 root = prev->stmt;
320 /* If it's the right list, simply return. */
321 if (root == *(ptr->use))
322 return;
324 /* Its in the wrong list if we reach here. */
325 delink_imm_use (ptr);
326 link_imm_use (ptr, *(ptr->use));
330 /* This routine makes sure that PTR is in an immediate use list, and makes
331 sure the stmt pointer is set to the current stmt. Virtual uses do not need
332 the overhead of correct_use_link since they cannot be directly manipulated
333 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
334 static inline void
335 set_virtual_use_link (use_operand_p ptr, tree stmt)
337 /* Fold_stmt () may have changed the stmt pointers. */
338 if (ptr->stmt != stmt)
339 ptr->stmt = stmt;
341 /* If this use isn't in a list, add it to the correct list. */
342 if (!ptr->prev)
343 link_imm_use (ptr, *(ptr->use));
348 #define FINALIZE_OPBUILD build_defs
349 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
350 build_defs, (I))
351 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
352 build_defs, (I))
353 #define FINALIZE_FUNC finalize_ssa_def_ops
354 #define FINALIZE_ALLOC alloc_def
355 #define FINALIZE_FREE free_defs
356 #define FINALIZE_TYPE struct def_optype_d
357 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
358 #define FINALIZE_OPS DEF_OPS
359 #define FINALIZE_BASE(VAR) VAR
360 #define FINALIZE_BASE_TYPE tree *
361 #define FINALIZE_BASE_ZERO NULL
362 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
363 #include "tree-ssa-opfinalize.h"
366 /* This routine will create stmt operands for STMT from the def build list. */
368 static void
369 finalize_ssa_defs (tree stmt)
371 unsigned int num = VEC_length (tree, build_defs);
372 /* There should only be a single real definition per assignment. */
373 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
375 /* If there is an old list, often the new list is identical, or close, so
376 find the elements at the beginning that are the same as the vector. */
378 finalize_ssa_def_ops (stmt);
379 VEC_truncate (tree, build_defs, 0);
382 #define FINALIZE_OPBUILD build_uses
383 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
384 build_uses, (I))
385 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
386 build_uses, (I))
387 #define FINALIZE_FUNC finalize_ssa_use_ops
388 #define FINALIZE_ALLOC alloc_use
389 #define FINALIZE_FREE free_uses
390 #define FINALIZE_TYPE struct use_optype_d
391 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
392 #define FINALIZE_OPS USE_OPS
393 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
394 #define FINALIZE_CORRECT_USE correct_use_link
395 #define FINALIZE_BASE(VAR) VAR
396 #define FINALIZE_BASE_TYPE tree *
397 #define FINALIZE_BASE_ZERO NULL
398 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
399 (PTR)->use_ptr.use = (VAL); \
400 link_imm_use_stmt (&((PTR)->use_ptr), \
401 *(VAL), (STMT))
402 #include "tree-ssa-opfinalize.h"
404 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
406 static void
407 finalize_ssa_uses (tree stmt)
409 #ifdef ENABLE_CHECKING
411 unsigned x;
412 unsigned num = VEC_length (tree, build_uses);
414 /* If the pointer to the operand is the statement itself, something is
415 wrong. It means that we are pointing to a local variable (the
416 initial call to get_stmt_operands does not pass a pointer to a
417 statement). */
418 for (x = 0; x < num; x++)
419 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
421 #endif
422 finalize_ssa_use_ops (stmt);
423 VEC_truncate (tree, build_uses, 0);
427 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
428 #define FINALIZE_OPBUILD build_v_may_defs
429 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
430 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
431 build_v_may_defs, (I)))
432 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
433 #define FINALIZE_ALLOC alloc_maydef
434 #define FINALIZE_FREE free_maydefs
435 #define FINALIZE_TYPE struct maydef_optype_d
436 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
437 #define FINALIZE_OPS MAYDEF_OPS
438 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
439 #define FINALIZE_CORRECT_USE set_virtual_use_link
440 #define FINALIZE_BASE_ZERO 0
441 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
442 #define FINALIZE_BASE_TYPE unsigned
443 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
444 (PTR)->def_var = (VAL); \
445 (PTR)->use_var = (VAL); \
446 (PTR)->use_ptr.use = &((PTR)->use_var); \
447 link_imm_use_stmt (&((PTR)->use_ptr), \
448 (VAL), (STMT))
449 #include "tree-ssa-opfinalize.h"
452 static void
453 finalize_ssa_v_may_defs (tree stmt)
455 finalize_ssa_v_may_def_ops (stmt);
459 /* Clear the in_list bits and empty the build array for v_may_defs. */
461 static inline void
462 cleanup_v_may_defs (void)
464 unsigned x, num;
465 num = VEC_length (tree, build_v_may_defs);
467 for (x = 0; x < num; x++)
469 tree t = VEC_index (tree, build_v_may_defs, x);
470 if (TREE_CODE (t) != SSA_NAME)
472 var_ann_t ann = var_ann (t);
473 ann->in_v_may_def_list = 0;
476 VEC_truncate (tree, build_v_may_defs, 0);
480 #define FINALIZE_OPBUILD build_vuses
481 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
482 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
483 build_vuses, (I)))
484 #define FINALIZE_FUNC finalize_ssa_vuse_ops
485 #define FINALIZE_ALLOC alloc_vuse
486 #define FINALIZE_FREE free_vuses
487 #define FINALIZE_TYPE struct vuse_optype_d
488 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
489 #define FINALIZE_OPS VUSE_OPS
490 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
491 #define FINALIZE_CORRECT_USE set_virtual_use_link
492 #define FINALIZE_BASE_ZERO 0
493 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
494 #define FINALIZE_BASE_TYPE unsigned
495 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
496 (PTR)->use_var = (VAL); \
497 (PTR)->use_ptr.use = &((PTR)->use_var); \
498 link_imm_use_stmt (&((PTR)->use_ptr), \
499 (VAL), (STMT))
500 #include "tree-ssa-opfinalize.h"
503 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
505 static void
506 finalize_ssa_vuses (tree stmt)
508 unsigned num, num_v_may_defs;
509 unsigned vuse_index;
511 /* Remove superfluous VUSE operands. If the statement already has a
512 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
513 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
514 suppose that variable 'a' is aliased:
516 # VUSE <a_2>
517 # a_3 = V_MAY_DEF <a_2>
518 a = a + 1;
520 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
521 operation. */
523 num = VEC_length (tree, build_vuses);
524 num_v_may_defs = VEC_length (tree, build_v_may_defs);
526 if (num > 0 && num_v_may_defs > 0)
528 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
530 tree vuse;
531 vuse = VEC_index (tree, build_vuses, vuse_index);
532 if (TREE_CODE (vuse) != SSA_NAME)
534 var_ann_t ann = var_ann (vuse);
535 ann->in_vuse_list = 0;
536 if (ann->in_v_may_def_list)
538 VEC_ordered_remove (tree, build_vuses, vuse_index);
539 continue;
542 vuse_index++;
545 else
546 /* Clear out the in_list bits. */
547 for (vuse_index = 0;
548 vuse_index < VEC_length (tree, build_vuses);
549 vuse_index++)
551 tree t = VEC_index (tree, build_vuses, vuse_index);
552 if (TREE_CODE (t) != SSA_NAME)
554 var_ann_t ann = var_ann (t);
555 ann->in_vuse_list = 0;
559 finalize_ssa_vuse_ops (stmt);
560 /* The v_may_def build vector wasn't cleaned up because we needed it. */
561 cleanup_v_may_defs ();
563 /* Free the vuses build vector. */
564 VEC_truncate (tree, build_vuses, 0);
568 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
570 #define FINALIZE_OPBUILD build_v_must_defs
571 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
572 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
573 build_v_must_defs, (I)))
574 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
575 #define FINALIZE_ALLOC alloc_mustdef
576 #define FINALIZE_FREE free_mustdefs
577 #define FINALIZE_TYPE struct mustdef_optype_d
578 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
579 #define FINALIZE_OPS MUSTDEF_OPS
580 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
581 #define FINALIZE_CORRECT_USE set_virtual_use_link
582 #define FINALIZE_BASE_ZERO 0
583 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
584 #define FINALIZE_BASE_TYPE unsigned
585 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
586 (PTR)->def_var = (VAL); \
587 (PTR)->kill_var = (VAL); \
588 (PTR)->use_ptr.use = &((PTR)->kill_var);\
589 link_imm_use_stmt (&((PTR)->use_ptr), \
590 (VAL), (STMT))
591 #include "tree-ssa-opfinalize.h"
594 static void
595 finalize_ssa_v_must_defs (tree stmt)
597 /* In the presence of subvars, there may be more than one V_MUST_DEF per
598 statement (one for each subvar). It is a bit expensive to verify that
599 all must-defs in a statement belong to subvars if there is more than one
600 MUST-def, so we don't do it. Suffice to say, if you reach here without
601 having subvars, and have num >1, you have hit a bug. */
603 finalize_ssa_v_must_def_ops (stmt);
604 VEC_truncate (tree, build_v_must_defs, 0);
608 /* Finalize all the build vectors, fill the new ones into INFO. */
610 static inline void
611 finalize_ssa_stmt_operands (tree stmt)
613 finalize_ssa_defs (stmt);
614 finalize_ssa_uses (stmt);
615 finalize_ssa_v_must_defs (stmt);
616 finalize_ssa_v_may_defs (stmt);
617 finalize_ssa_vuses (stmt);
621 /* Start the process of building up operands vectors in INFO. */
623 static inline void
624 start_ssa_stmt_operands (void)
626 gcc_assert (VEC_length (tree, build_defs) == 0);
627 gcc_assert (VEC_length (tree, build_uses) == 0);
628 gcc_assert (VEC_length (tree, build_vuses) == 0);
629 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
630 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
634 /* Add DEF_P to the list of pointers to operands. */
636 static inline void
637 append_def (tree *def_p)
639 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
643 /* Add USE_P to the list of pointers to operands. */
645 static inline void
646 append_use (tree *use_p)
648 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
652 /* Add a new virtual may def for variable VAR to the build array. */
654 static inline void
655 append_v_may_def (tree var)
657 if (TREE_CODE (var) != SSA_NAME)
659 var_ann_t ann = get_var_ann (var);
661 /* Don't allow duplicate entries. */
662 if (ann->in_v_may_def_list)
663 return;
664 ann->in_v_may_def_list = 1;
667 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
671 /* Add VAR to the list of virtual uses. */
673 static inline void
674 append_vuse (tree var)
677 /* Don't allow duplicate entries. */
678 if (TREE_CODE (var) != SSA_NAME)
680 var_ann_t ann = get_var_ann (var);
682 if (ann->in_vuse_list || ann->in_v_may_def_list)
683 return;
684 ann->in_vuse_list = 1;
687 VEC_safe_push (tree, heap, build_vuses, (tree)var);
691 /* Add VAR to the list of virtual must definitions for INFO. */
693 static inline void
694 append_v_must_def (tree var)
696 unsigned i;
698 /* Don't allow duplicate entries. */
699 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
700 if (var == VEC_index (tree, build_v_must_defs, i))
701 return;
703 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
707 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
708 cache for STMT, if it existed before. When finished, the various build_*
709 operand vectors will have potential operands. in them. */
711 static void
712 parse_ssa_operands (tree stmt)
714 enum tree_code code;
716 code = TREE_CODE (stmt);
717 switch (code)
719 case MODIFY_EXPR:
720 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
721 either only part of LHS is modified or if the RHS might throw,
722 otherwise, use V_MUST_DEF.
724 ??? If it might throw, we should represent somehow that it is killed
725 on the fallthrough path. */
727 tree lhs = TREE_OPERAND (stmt, 0);
728 int lhs_flags = opf_is_def;
730 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
732 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
733 or not the entire LHS is modified; that depends on what's
734 inside the VIEW_CONVERT_EXPR. */
735 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
736 lhs = TREE_OPERAND (lhs, 0);
738 if (TREE_CODE (lhs) != ARRAY_REF
739 && TREE_CODE (lhs) != ARRAY_RANGE_REF
740 && TREE_CODE (lhs) != BIT_FIELD_REF
741 && TREE_CODE (lhs) != REALPART_EXPR
742 && TREE_CODE (lhs) != IMAGPART_EXPR)
743 lhs_flags |= opf_kill_def;
745 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
747 break;
749 case COND_EXPR:
750 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
751 break;
753 case SWITCH_EXPR:
754 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
755 break;
757 case ASM_EXPR:
758 get_asm_expr_operands (stmt);
759 break;
761 case RETURN_EXPR:
762 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
763 break;
765 case GOTO_EXPR:
766 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
767 break;
769 case LABEL_EXPR:
770 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
771 break;
773 /* These nodes contain no variable references. */
774 case BIND_EXPR:
775 case CASE_LABEL_EXPR:
776 case TRY_CATCH_EXPR:
777 case TRY_FINALLY_EXPR:
778 case EH_FILTER_EXPR:
779 case CATCH_EXPR:
780 case RESX_EXPR:
781 break;
783 default:
784 /* Notice that if get_expr_operands tries to use &STMT as the operand
785 pointer (which may only happen for USE operands), we will fail in
786 append_use. This default will handle statements like empty
787 statements, or CALL_EXPRs that may appear on the RHS of a statement
788 or as statements themselves. */
789 get_expr_operands (stmt, &stmt, opf_none);
790 break;
794 /* Create an operands cache for STMT. */
796 static void
797 build_ssa_operands (tree stmt)
799 stmt_ann_t ann = get_stmt_ann (stmt);
801 /* Initially assume that the statement has no volatile operands, nor
802 makes aliased loads or stores. */
803 if (ann)
805 ann->has_volatile_ops = false;
806 ann->makes_aliased_stores = false;
807 ann->makes_aliased_loads = false;
810 start_ssa_stmt_operands ();
812 parse_ssa_operands (stmt);
813 operand_build_sort_virtual (build_vuses);
814 operand_build_sort_virtual (build_v_may_defs);
815 operand_build_sort_virtual (build_v_must_defs);
817 finalize_ssa_stmt_operands (stmt);
821 /* Free any operands vectors in OPS. */
822 void
823 free_ssa_operands (stmt_operands_p ops)
825 ops->def_ops = NULL;
826 ops->use_ops = NULL;
827 ops->maydef_ops = NULL;
828 ops->mustdef_ops = NULL;
829 ops->vuse_ops = NULL;
833 /* Get the operands of statement STMT. Note that repeated calls to
834 get_stmt_operands for the same statement will do nothing until the
835 statement is marked modified by a call to mark_stmt_modified(). */
837 void
838 update_stmt_operands (tree stmt)
840 stmt_ann_t ann = get_stmt_ann (stmt);
841 /* If get_stmt_operands is called before SSA is initialized, dont
842 do anything. */
843 if (!ssa_operands_active ())
844 return;
845 /* The optimizers cannot handle statements that are nothing but a
846 _DECL. This indicates a bug in the gimplifier. */
847 gcc_assert (!SSA_VAR_P (stmt));
849 gcc_assert (ann->modified);
851 timevar_push (TV_TREE_OPS);
853 build_ssa_operands (stmt);
855 /* Clear the modified bit for STMT. Subsequent calls to
856 get_stmt_operands for this statement will do nothing until the
857 statement is marked modified by a call to mark_stmt_modified(). */
858 ann->modified = 0;
860 timevar_pop (TV_TREE_OPS);
864 /* Copies virtual operands from SRC to DST. */
866 void
867 copy_virtual_operands (tree dest, tree src)
869 tree t;
870 ssa_op_iter iter, old_iter;
871 use_operand_p use_p, u2;
872 def_operand_p def_p, d2;
874 build_ssa_operands (dest);
876 /* Copy all the virtual fields. */
877 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
878 append_vuse (t);
879 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
880 append_v_may_def (t);
881 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
882 append_v_must_def (t);
884 if (VEC_length (tree, build_vuses) == 0
885 && VEC_length (tree, build_v_may_defs) == 0
886 && VEC_length (tree, build_v_must_defs) == 0)
887 return;
889 /* Now commit the virtual operands to this stmt. */
890 finalize_ssa_v_must_defs (dest);
891 finalize_ssa_v_may_defs (dest);
892 finalize_ssa_vuses (dest);
894 /* Finally, set the field to the same values as then originals. */
897 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
898 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
900 gcc_assert (!op_iter_done (&old_iter));
901 SET_USE (use_p, t);
902 t = op_iter_next_tree (&old_iter);
904 gcc_assert (op_iter_done (&old_iter));
906 op_iter_init_maydef (&old_iter, src, &u2, &d2);
907 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
909 gcc_assert (!op_iter_done (&old_iter));
910 SET_USE (use_p, USE_FROM_PTR (u2));
911 SET_DEF (def_p, DEF_FROM_PTR (d2));
912 op_iter_next_maymustdef (&u2, &d2, &old_iter);
914 gcc_assert (op_iter_done (&old_iter));
916 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
917 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
919 gcc_assert (!op_iter_done (&old_iter));
920 SET_USE (use_p, USE_FROM_PTR (u2));
921 SET_DEF (def_p, DEF_FROM_PTR (d2));
922 op_iter_next_maymustdef (&u2, &d2, &old_iter);
924 gcc_assert (op_iter_done (&old_iter));
929 /* Specifically for use in DOM's expression analysis. Given a store, we
930 create an artificial stmt which looks like a load from the store, this can
931 be used to eliminate redundant loads. OLD_OPS are the operands from the
932 store stmt, and NEW_STMT is the new load which represents a load of the
933 values stored. */
935 void
936 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
938 stmt_ann_t ann;
939 tree op;
940 ssa_op_iter iter;
941 use_operand_p use_p;
942 unsigned x;
944 ann = get_stmt_ann (new_stmt);
946 /* process the stmt looking for operands. */
947 start_ssa_stmt_operands ();
948 parse_ssa_operands (new_stmt);
950 for (x = 0; x < VEC_length (tree, build_vuses); x++)
952 tree t = VEC_index (tree, build_vuses, x);
953 if (TREE_CODE (t) != SSA_NAME)
955 var_ann_t ann = var_ann (t);
956 ann->in_vuse_list = 0;
960 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
962 tree t = VEC_index (tree, build_v_may_defs, x);
963 if (TREE_CODE (t) != SSA_NAME)
965 var_ann_t ann = var_ann (t);
966 ann->in_v_may_def_list = 0;
969 /* Remove any virtual operands that were found. */
970 VEC_truncate (tree, build_v_may_defs, 0);
971 VEC_truncate (tree, build_v_must_defs, 0);
972 VEC_truncate (tree, build_vuses, 0);
974 /* For each VDEF on the original statement, we want to create a
975 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
976 statement. */
977 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
978 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
979 append_vuse (op);
981 /* Now build the operands for this new stmt. */
982 finalize_ssa_stmt_operands (new_stmt);
984 /* All uses in this fake stmt must not be in the immediate use lists. */
985 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
986 delink_imm_use (use_p);
989 void
990 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
992 tree op0, op1;
993 op0 = *exp0;
994 op1 = *exp1;
996 /* If the operand cache is active, attempt to preserve the relative positions
997 of these two operands in their respective immediate use lists. */
998 if (ssa_operands_active () && op0 != op1)
1000 use_optype_p use0, use1, ptr;
1001 use0 = use1 = NULL;
1002 /* Find the 2 operands in the cache, if they are there. */
1003 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1004 if (USE_OP_PTR (ptr)->use == exp0)
1006 use0 = ptr;
1007 break;
1009 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1010 if (USE_OP_PTR (ptr)->use == exp1)
1012 use1 = ptr;
1013 break;
1015 /* If both uses don't have operand entries, there isn't much we can do
1016 at this point. Presumably we dont need to worry about it. */
1017 if (use0 && use1)
1019 tree *tmp = USE_OP_PTR (use1)->use;
1020 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1021 USE_OP_PTR (use0)->use = tmp;
1025 /* Now swap the data. */
1026 *exp0 = op1;
1027 *exp1 = op0;
1030 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1031 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1032 the operands found. */
1034 static void
1035 get_expr_operands (tree stmt, tree *expr_p, int flags)
1037 enum tree_code code;
1038 enum tree_code_class class;
1039 tree expr = *expr_p;
1040 stmt_ann_t s_ann = stmt_ann (stmt);
1042 if (expr == NULL)
1043 return;
1045 code = TREE_CODE (expr);
1046 class = TREE_CODE_CLASS (code);
1048 switch (code)
1050 case ADDR_EXPR:
1051 /* We could have the address of a component, array member,
1052 etc which has interesting variable references. */
1053 /* Taking the address of a variable does not represent a
1054 reference to it, but the fact that the stmt takes its address will be
1055 of interest to some passes (e.g. alias resolution). */
1056 add_stmt_operand (expr_p, s_ann, 0);
1058 /* If the address is invariant, there may be no interesting variable
1059 references inside. */
1060 if (is_gimple_min_invariant (expr))
1061 return;
1063 /* There should be no VUSEs created, since the referenced objects are
1064 not really accessed. The only operands that we should find here
1065 are ARRAY_REF indices which will always be real operands (GIMPLE
1066 does not allow non-registers as array indices). */
1067 flags |= opf_no_vops;
1069 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1070 return;
1072 case SSA_NAME:
1073 case STRUCT_FIELD_TAG:
1074 case TYPE_MEMORY_TAG:
1075 case NAME_MEMORY_TAG:
1076 case VAR_DECL:
1077 case PARM_DECL:
1078 case RESULT_DECL:
1079 case CONST_DECL:
1081 subvar_t svars;
1083 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1084 Otherwise, add the variable itself.
1085 Whether it goes to USES or DEFS depends on the operand flags. */
1086 if (var_can_have_subvars (expr)
1087 && (svars = get_subvars_for_var (expr)))
1089 subvar_t sv;
1090 for (sv = svars; sv; sv = sv->next)
1091 add_stmt_operand (&sv->var, s_ann, flags);
1093 else
1095 add_stmt_operand (expr_p, s_ann, flags);
1097 return;
1099 case MISALIGNED_INDIRECT_REF:
1100 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1101 /* fall through */
1103 case ALIGN_INDIRECT_REF:
1104 case INDIRECT_REF:
1105 get_indirect_ref_operands (stmt, expr, flags);
1106 return;
1108 case TARGET_MEM_REF:
1109 get_tmr_operands (stmt, expr, flags);
1110 return;
1112 case ARRAY_REF:
1113 case ARRAY_RANGE_REF:
1114 /* Treat array references as references to the virtual variable
1115 representing the array. The virtual variable for an ARRAY_REF
1116 is the VAR_DECL for the array. */
1118 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1119 according to the value of IS_DEF. Recurse if the LHS of the
1120 ARRAY_REF node is not a regular variable. */
1121 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1122 add_stmt_operand (expr_p, s_ann, flags);
1123 else
1124 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1126 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1127 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1128 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1129 return;
1131 case COMPONENT_REF:
1132 case REALPART_EXPR:
1133 case IMAGPART_EXPR:
1135 tree ref;
1136 HOST_WIDE_INT offset, size, maxsize;
1137 /* This component ref becomes an access to all of the subvariables
1138 it can touch, if we can determine that, but *NOT* the real one.
1139 If we can't determine which fields we could touch, the recursion
1140 will eventually get to a variable and add *all* of its subvars, or
1141 whatever is the minimum correct subset. */
1143 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1144 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1146 subvar_t svars = get_subvars_for_var (ref);
1147 subvar_t sv;
1148 for (sv = svars; sv; sv = sv->next)
1150 bool exact;
1151 if (overlap_subvar (offset, maxsize, sv, &exact))
1153 int subvar_flags = flags;
1154 if (!exact
1155 || size != maxsize)
1156 subvar_flags &= ~opf_kill_def;
1157 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1161 else
1162 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1163 flags & ~opf_kill_def);
1165 if (code == COMPONENT_REF)
1167 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1168 s_ann->has_volatile_ops = true;
1169 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1171 return;
1173 case WITH_SIZE_EXPR:
1174 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1175 and an rvalue reference to its second argument. */
1176 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1177 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1178 return;
1180 case CALL_EXPR:
1181 get_call_expr_operands (stmt, expr);
1182 return;
1184 case COND_EXPR:
1185 case VEC_COND_EXPR:
1186 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1187 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1188 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1189 return;
1191 case MODIFY_EXPR:
1193 int subflags;
1194 tree op;
1196 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1198 op = TREE_OPERAND (expr, 0);
1199 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1200 op = TREE_OPERAND (expr, 0);
1201 if (TREE_CODE (op) == ARRAY_REF
1202 || TREE_CODE (op) == ARRAY_RANGE_REF
1203 || TREE_CODE (op) == REALPART_EXPR
1204 || TREE_CODE (op) == IMAGPART_EXPR)
1205 subflags = opf_is_def;
1206 else
1207 subflags = opf_is_def | opf_kill_def;
1209 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1210 return;
1213 case CONSTRUCTOR:
1215 /* General aggregate CONSTRUCTORs have been decomposed, but they
1216 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1217 constructor_elt *ce;
1218 unsigned HOST_WIDE_INT idx;
1220 for (idx = 0;
1221 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1222 idx++)
1223 get_expr_operands (stmt, &ce->value, opf_none);
1225 return;
1228 case TRUTH_NOT_EXPR:
1229 case BIT_FIELD_REF:
1230 case VIEW_CONVERT_EXPR:
1231 do_unary:
1232 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1233 return;
1235 case TRUTH_AND_EXPR:
1236 case TRUTH_OR_EXPR:
1237 case TRUTH_XOR_EXPR:
1238 case COMPOUND_EXPR:
1239 case OBJ_TYPE_REF:
1240 case ASSERT_EXPR:
1241 do_binary:
1243 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1244 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1245 return;
1248 case REALIGN_LOAD_EXPR:
1250 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1251 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1252 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1253 return;
1256 case BLOCK:
1257 case FUNCTION_DECL:
1258 case EXC_PTR_EXPR:
1259 case FILTER_EXPR:
1260 case LABEL_DECL:
1261 /* Expressions that make no memory references. */
1262 return;
1264 default:
1265 if (class == tcc_unary)
1266 goto do_unary;
1267 if (class == tcc_binary || class == tcc_comparison)
1268 goto do_binary;
1269 if (class == tcc_constant || class == tcc_type)
1270 return;
1273 /* If we get here, something has gone wrong. */
1274 #ifdef ENABLE_CHECKING
1275 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1276 debug_tree (expr);
1277 fputs ("\n", stderr);
1278 internal_error ("internal error");
1279 #endif
1280 gcc_unreachable ();
1284 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1286 static void
1287 get_asm_expr_operands (tree stmt)
1289 stmt_ann_t s_ann = stmt_ann (stmt);
1290 int noutputs = list_length (ASM_OUTPUTS (stmt));
1291 const char **oconstraints
1292 = (const char **) alloca ((noutputs) * sizeof (const char *));
1293 int i;
1294 tree link;
1295 const char *constraint;
1296 bool allows_mem, allows_reg, is_inout;
1298 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1300 oconstraints[i] = constraint
1301 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1302 parse_output_constraint (&constraint, i, 0, 0,
1303 &allows_mem, &allows_reg, &is_inout);
1305 /* This should have been split in gimplify_asm_expr. */
1306 gcc_assert (!allows_reg || !is_inout);
1308 /* Memory operands are addressable. Note that STMT needs the
1309 address of this operand. */
1310 if (!allows_reg && allows_mem)
1312 tree t = get_base_address (TREE_VALUE (link));
1313 if (t && DECL_P (t) && s_ann)
1314 add_to_addressable_set (t, &s_ann->addresses_taken);
1317 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1320 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1322 constraint
1323 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1324 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1325 oconstraints, &allows_mem, &allows_reg);
1327 /* Memory operands are addressable. Note that STMT needs the
1328 address of this operand. */
1329 if (!allows_reg && allows_mem)
1331 tree t = get_base_address (TREE_VALUE (link));
1332 if (t && DECL_P (t) && s_ann)
1333 add_to_addressable_set (t, &s_ann->addresses_taken);
1336 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1340 /* Clobber memory for asm ("" : : : "memory"); */
1341 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1342 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1344 unsigned i;
1345 bitmap_iterator bi;
1347 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1348 decided to group them). */
1349 if (global_var)
1350 add_stmt_operand (&global_var, s_ann, opf_is_def);
1351 else
1352 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1354 tree var = referenced_var (i);
1355 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1358 /* Now clobber all addressables. */
1359 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1361 tree var = referenced_var (i);
1363 /* Subvars are explicitly represented in this list, so
1364 we don't need the original to be added to the clobber
1365 ops, but the original *will* be in this list because
1366 we keep the addressability of the original
1367 variable up-to-date so we don't screw up the rest of
1368 the backend. */
1369 if (var_can_have_subvars (var)
1370 && get_subvars_for_var (var) != NULL)
1371 continue;
1373 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1376 break;
1380 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1381 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1383 static void
1384 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1386 tree *pptr = &TREE_OPERAND (expr, 0);
1387 tree ptr = *pptr;
1388 stmt_ann_t s_ann = stmt_ann (stmt);
1390 /* Stores into INDIRECT_REF operands are never killing definitions. */
1391 flags &= ~opf_kill_def;
1393 if (SSA_VAR_P (ptr))
1395 struct ptr_info_def *pi = NULL;
1397 /* If PTR has flow-sensitive points-to information, use it. */
1398 if (TREE_CODE (ptr) == SSA_NAME
1399 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1400 && pi->name_mem_tag)
1402 /* PTR has its own memory tag. Use it. */
1403 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1405 else
1407 /* If PTR is not an SSA_NAME or it doesn't have a name
1408 tag, use its type memory tag. */
1409 var_ann_t v_ann;
1411 /* If we are emitting debugging dumps, display a warning if
1412 PTR is an SSA_NAME with no flow-sensitive alias
1413 information. That means that we may need to compute
1414 aliasing again. */
1415 if (dump_file
1416 && TREE_CODE (ptr) == SSA_NAME
1417 && pi == NULL)
1419 fprintf (dump_file,
1420 "NOTE: no flow-sensitive alias info for ");
1421 print_generic_expr (dump_file, ptr, dump_flags);
1422 fprintf (dump_file, " in ");
1423 print_generic_stmt (dump_file, stmt, dump_flags);
1426 if (TREE_CODE (ptr) == SSA_NAME)
1427 ptr = SSA_NAME_VAR (ptr);
1428 v_ann = var_ann (ptr);
1429 if (v_ann->type_mem_tag)
1430 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1434 /* If a constant is used as a pointer, we can't generate a real
1435 operand for it but we mark the statement volatile to prevent
1436 optimizations from messing things up. */
1437 else if (TREE_CODE (ptr) == INTEGER_CST)
1439 if (s_ann)
1440 s_ann->has_volatile_ops = true;
1441 return;
1444 /* Everything else *should* have been folded elsewhere, but users
1445 are smarter than we in finding ways to write invalid code. We
1446 cannot just assert here. If we were absolutely certain that we
1447 do handle all valid cases, then we could just do nothing here.
1448 That seems optimistic, so attempt to do something logical... */
1449 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1450 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1451 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1453 /* Make sure we know the object is addressable. */
1454 pptr = &TREE_OPERAND (ptr, 0);
1455 add_stmt_operand (pptr, s_ann, 0);
1457 /* Mark the object itself with a VUSE. */
1458 pptr = &TREE_OPERAND (*pptr, 0);
1459 get_expr_operands (stmt, pptr, flags);
1460 return;
1463 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1464 else
1465 gcc_unreachable ();
1467 /* Add a USE operand for the base pointer. */
1468 get_expr_operands (stmt, pptr, opf_none);
1471 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1473 static void
1474 get_tmr_operands (tree stmt, tree expr, int flags)
1476 tree tag = TMR_TAG (expr);
1478 /* First record the real operands. */
1479 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1480 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1482 /* MEM_REFs should never be killing. */
1483 flags &= ~opf_kill_def;
1485 if (TMR_SYMBOL (expr))
1487 stmt_ann_t ann = stmt_ann (stmt);
1488 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1491 if (tag)
1492 get_expr_operands (stmt, &tag, flags);
1493 else
1494 /* Something weird, so ensure that we will be careful. */
1495 stmt_ann (stmt)->has_volatile_ops = true;
1498 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1500 static void
1501 get_call_expr_operands (tree stmt, tree expr)
1503 tree op;
1504 int call_flags = call_expr_flags (expr);
1506 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1507 operands for all the symbols that have been found to be
1508 call-clobbered.
1510 Note that if aliases have not been computed, the global effects
1511 of calls will not be included in the SSA web. This is fine
1512 because no optimizer should run before aliases have been
1513 computed. By not bothering with virtual operands for CALL_EXPRs
1514 we avoid adding superfluous virtual operands, which can be a
1515 significant compile time sink (See PR 15855). */
1516 if (aliases_computed_p
1517 && !bitmap_empty_p (call_clobbered_vars)
1518 && !(call_flags & ECF_NOVOPS))
1520 /* A 'pure' or a 'const' function never call-clobbers anything.
1521 A 'noreturn' function might, but since we don't return anyway
1522 there is no point in recording that. */
1523 if (TREE_SIDE_EFFECTS (expr)
1524 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1525 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1526 else if (!(call_flags & ECF_CONST))
1527 add_call_read_ops (stmt);
1530 /* Find uses in the called function. */
1531 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1533 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1534 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1536 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1541 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1542 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1543 the statement's real operands, otherwise it is added to virtual
1544 operands. */
1546 static void
1547 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1549 bool is_real_op;
1550 tree var, sym;
1551 var_ann_t v_ann;
1553 var = *var_p;
1554 STRIP_NOPS (var);
1556 /* If the operand is an ADDR_EXPR, add its operand to the list of
1557 variables that have had their address taken in this statement. */
1558 if (TREE_CODE (var) == ADDR_EXPR && s_ann)
1560 add_to_addressable_set (TREE_OPERAND (var, 0), &s_ann->addresses_taken);
1561 return;
1564 /* If the original variable is not a scalar, it will be added to the list
1565 of virtual operands. In that case, use its base symbol as the virtual
1566 variable representing it. */
1567 is_real_op = is_gimple_reg (var);
1568 if (!is_real_op && !DECL_P (var))
1569 var = get_virtual_var (var);
1571 /* If VAR is not a variable that we care to optimize, do nothing. */
1572 if (var == NULL_TREE || !SSA_VAR_P (var))
1573 return;
1575 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1576 v_ann = var_ann (sym);
1578 /* Mark statements with volatile operands. Optimizers should back
1579 off from statements having volatile operands. */
1580 if (TREE_THIS_VOLATILE (sym) && s_ann)
1581 s_ann->has_volatile_ops = true;
1583 /* If the variable cannot be modified and this is a V_MAY_DEF change
1584 it into a VUSE. This happens when read-only variables are marked
1585 call-clobbered and/or aliased to writable variables. So we only
1586 check that this only happens on non-specific stores.
1588 Note that if this is a specific store, i.e. associated with a
1589 modify_expr, then we can't suppress the V_DEF, lest we run into
1590 validation problems.
1592 This can happen when programs cast away const, leaving us with a
1593 store to read-only memory. If the statement is actually executed
1594 at runtime, then the program is ill formed. If the statement is
1595 not executed then all is well. At the very least, we cannot ICE. */
1596 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1598 gcc_assert (!is_real_op);
1599 flags &= ~(opf_is_def | opf_kill_def);
1602 if (is_real_op)
1604 /* The variable is a GIMPLE register. Add it to real operands. */
1605 if (flags & opf_is_def)
1606 append_def (var_p);
1607 else
1608 append_use (var_p);
1610 else
1612 varray_type aliases;
1614 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1615 virtual operands, unless the caller has specifically requested
1616 not to add virtual operands (used when adding operands inside an
1617 ADDR_EXPR expression). */
1618 if (flags & opf_no_vops)
1619 return;
1621 aliases = v_ann->may_aliases;
1623 if (aliases == NULL)
1625 /* The variable is not aliased or it is an alias tag. */
1626 if (flags & opf_is_def)
1628 if (flags & opf_kill_def)
1630 /* Only regular variables or struct fields may get a
1631 V_MUST_DEF operand. */
1632 gcc_assert (!MTAG_P (var)
1633 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1634 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1635 variable definitions. */
1636 append_v_must_def (var);
1638 else
1640 /* Add a V_MAY_DEF for call-clobbered variables and
1641 memory tags. */
1642 append_v_may_def (var);
1645 else
1647 append_vuse (var);
1648 if (s_ann && v_ann->is_alias_tag)
1649 s_ann->makes_aliased_loads = 1;
1652 else
1654 size_t i;
1656 /* The variable is aliased. Add its aliases to the virtual
1657 operands. */
1658 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1660 if (flags & opf_is_def)
1662 /* If the variable is also an alias tag, add a virtual
1663 operand for it, otherwise we will miss representing
1664 references to the members of the variable's alias set.
1665 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1666 if (v_ann->is_alias_tag)
1667 append_v_may_def (var);
1669 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1670 append_v_may_def (VARRAY_TREE (aliases, i));
1672 if (s_ann)
1673 s_ann->makes_aliased_stores = 1;
1675 else
1677 /* Similarly, append a virtual uses for VAR itself, when
1678 it is an alias tag. */
1679 if (v_ann->is_alias_tag)
1680 append_vuse (var);
1682 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1683 append_vuse (VARRAY_TREE (aliases, i));
1685 if (s_ann)
1686 s_ann->makes_aliased_loads = 1;
1693 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1694 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1695 a single variable whose address has been taken or any other valid
1696 GIMPLE memory reference (structure reference, array, etc). If the
1697 base address of REF is a decl that has sub-variables, also add all
1698 of its sub-variables. */
1700 void
1701 add_to_addressable_set (tree ref, bitmap *addresses_taken)
1703 tree var;
1704 subvar_t svars;
1706 gcc_assert (addresses_taken);
1708 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1709 as the only thing we take the address of. If VAR is a structure,
1710 taking the address of a field means that the whole structure may
1711 be referenced using pointer arithmetic. See PR 21407 and the
1712 ensuing mailing list discussion. */
1713 var = get_base_address (ref);
1714 if (var && SSA_VAR_P (var))
1716 if (*addresses_taken == NULL)
1717 *addresses_taken = BITMAP_GGC_ALLOC ();
1719 if (var_can_have_subvars (var)
1720 && (svars = get_subvars_for_var (var)))
1722 subvar_t sv;
1723 for (sv = svars; sv; sv = sv->next)
1725 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1726 TREE_ADDRESSABLE (sv->var) = 1;
1729 else
1731 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1732 TREE_ADDRESSABLE (var) = 1;
1738 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1739 clobbered variables in the function. */
1741 static void
1742 add_call_clobber_ops (tree stmt, tree callee)
1744 unsigned u;
1745 tree t;
1746 bitmap_iterator bi;
1747 stmt_ann_t s_ann = stmt_ann (stmt);
1748 struct stmt_ann_d empty_ann;
1749 bitmap not_read_b, not_written_b;
1751 /* Functions that are not const, pure or never return may clobber
1752 call-clobbered variables. */
1753 if (s_ann)
1754 s_ann->makes_clobbering_call = true;
1756 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1757 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1758 if (global_var)
1760 add_stmt_operand (&global_var, s_ann, opf_is_def);
1761 return;
1764 /* FIXME - if we have better information from the static vars
1765 analysis, we need to make the cache call site specific. This way
1766 we can have the performance benefits even if we are doing good
1767 optimization. */
1769 /* Get info for local and module level statics. There is a bit
1770 set for each static if the call being processed does not read
1771 or write that variable. */
1773 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1774 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1776 /* If cache is valid, copy the elements into the build vectors. */
1777 if (ssa_call_clobbered_cache_valid
1778 && (!not_read_b || bitmap_empty_p (not_read_b))
1779 && (!not_written_b || bitmap_empty_p (not_written_b)))
1781 for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++)
1783 t = VEC_index (tree, clobbered_vuses, u);
1784 gcc_assert (TREE_CODE (t) != SSA_NAME);
1785 var_ann (t)->in_vuse_list = 1;
1786 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1788 for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++)
1790 t = VEC_index (tree, clobbered_v_may_defs, u);
1791 gcc_assert (TREE_CODE (t) != SSA_NAME);
1792 var_ann (t)->in_v_may_def_list = 1;
1793 VEC_safe_push (tree, heap, build_v_may_defs, (tree)t);
1795 if (s_ann)
1797 s_ann->makes_aliased_loads = clobbered_aliased_loads;
1798 s_ann->makes_aliased_stores = clobbered_aliased_stores;
1800 return;
1803 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1805 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1806 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1808 tree var = referenced_var (u);
1809 if (unmodifiable_var_p (var))
1810 add_stmt_operand (&var, &empty_ann, opf_none);
1811 else
1813 bool not_read
1814 = not_read_b ? bitmap_bit_p (not_read_b, u) : false;
1815 bool not_written
1816 = not_written_b ? bitmap_bit_p (not_written_b, u) : false;
1818 if (not_written)
1820 if (!not_read)
1821 add_stmt_operand (&var, &empty_ann, opf_none);
1823 else
1824 add_stmt_operand (&var, &empty_ann, opf_is_def);
1828 if ((!not_read_b || bitmap_empty_p (not_read_b))
1829 && (!not_written_b || bitmap_empty_p (not_written_b)))
1831 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
1832 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
1834 /* Set the flags for a stmt's annotation. */
1835 if (s_ann)
1837 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1838 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
1841 /* Prepare empty cache vectors. */
1842 VEC_truncate (tree, clobbered_vuses, 0);
1843 VEC_truncate (tree, clobbered_v_may_defs, 0);
1845 /* Now fill the clobbered cache with the values that have been found. */
1846 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1847 VEC_safe_push (tree, heap, clobbered_vuses,
1848 VEC_index (tree, build_vuses, u));
1850 gcc_assert (VEC_length (tree, build_vuses)
1851 == VEC_length (tree, clobbered_vuses));
1853 for (u = 0; u < VEC_length (tree, build_v_may_defs); u++)
1854 VEC_safe_push (tree, heap, clobbered_v_may_defs,
1855 VEC_index (tree, build_v_may_defs, u));
1857 gcc_assert (VEC_length (tree, build_v_may_defs)
1858 == VEC_length (tree, clobbered_v_may_defs));
1860 ssa_call_clobbered_cache_valid = true;
1865 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1866 function. */
1868 static void
1869 add_call_read_ops (tree stmt)
1871 unsigned u;
1872 tree t;
1873 bitmap_iterator bi;
1874 stmt_ann_t s_ann = stmt_ann (stmt);
1875 struct stmt_ann_d empty_ann;
1877 /* if the function is not pure, it may reference memory. Add
1878 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1879 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1880 if (global_var)
1882 add_stmt_operand (&global_var, s_ann, opf_none);
1883 return;
1886 /* If cache is valid, copy the elements into the build vector. */
1887 if (ssa_ro_call_cache_valid)
1889 for (u = 0; u < VEC_length (tree, ro_call_vuses); u++)
1891 t = VEC_index (tree, ro_call_vuses, u);
1892 gcc_assert (TREE_CODE (t) != SSA_NAME);
1893 var_ann (t)->in_vuse_list = 1;
1894 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1896 if (s_ann)
1897 s_ann->makes_aliased_loads = ro_call_aliased_loads;
1898 return;
1901 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1903 /* Add a VUSE for each call-clobbered variable. */
1904 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1906 tree var = referenced_var (u);
1907 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
1910 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
1911 if (s_ann)
1912 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1914 /* Prepare empty cache vectors. */
1915 VEC_truncate (tree, ro_call_vuses, 0);
1917 /* Now fill the clobbered cache with the values that have been found. */
1918 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1919 VEC_safe_push (tree, heap, ro_call_vuses,
1920 VEC_index (tree, build_vuses, u));
1922 gcc_assert (VEC_length (tree, build_vuses)
1923 == VEC_length (tree, ro_call_vuses));
1925 ssa_ro_call_cache_valid = true;
1929 /* Scan the immediate_use list for VAR making sure its linked properly.
1930 return RTUE iof there is a problem. */
1932 bool
1933 verify_imm_links (FILE *f, tree var)
1935 use_operand_p ptr, prev, list;
1936 int count;
1938 gcc_assert (TREE_CODE (var) == SSA_NAME);
1940 list = &(SSA_NAME_IMM_USE_NODE (var));
1941 gcc_assert (list->use == NULL);
1943 if (list->prev == NULL)
1945 gcc_assert (list->next == NULL);
1946 return false;
1949 prev = list;
1950 count = 0;
1951 for (ptr = list->next; ptr != list; )
1953 if (prev != ptr->prev)
1954 goto error;
1956 if (ptr->use == NULL)
1957 goto error; /* 2 roots, or SAFE guard node. */
1958 else if (*(ptr->use) != var)
1959 goto error;
1961 prev = ptr;
1962 ptr = ptr->next;
1963 /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
1964 if (count++ > 50000000)
1965 goto error;
1968 /* Verify list in the other direction. */
1969 prev = list;
1970 for (ptr = list->prev; ptr != list; )
1972 if (prev != ptr->next)
1973 goto error;
1974 prev = ptr;
1975 ptr = ptr->prev;
1976 if (count-- < 0)
1977 goto error;
1980 if (count != 0)
1981 goto error;
1983 return false;
1985 error:
1986 if (ptr->stmt && stmt_modified_p (ptr->stmt))
1988 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
1989 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
1991 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1992 (void *)ptr->use);
1993 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1994 fprintf(f, "\n");
1995 return true;
1999 /* Dump all the immediate uses to FILE. */
2001 void
2002 dump_immediate_uses_for (FILE *file, tree var)
2004 imm_use_iterator iter;
2005 use_operand_p use_p;
2007 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2009 print_generic_expr (file, var, TDF_SLIM);
2010 fprintf (file, " : -->");
2011 if (has_zero_uses (var))
2012 fprintf (file, " no uses.\n");
2013 else
2014 if (has_single_use (var))
2015 fprintf (file, " single use.\n");
2016 else
2017 fprintf (file, "%d uses.\n", num_imm_uses (var));
2019 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2021 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2022 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2023 else
2024 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2026 fprintf(file, "\n");
2029 /* Dump all the immediate uses to FILE. */
2031 void
2032 dump_immediate_uses (FILE *file)
2034 tree var;
2035 unsigned int x;
2037 fprintf (file, "Immediate_uses: \n\n");
2038 for (x = 1; x < num_ssa_names; x++)
2040 var = ssa_name(x);
2041 if (!var)
2042 continue;
2043 dump_immediate_uses_for (file, var);
2048 /* Dump def-use edges on stderr. */
2050 void
2051 debug_immediate_uses (void)
2053 dump_immediate_uses (stderr);
2056 /* Dump def-use edges on stderr. */
2058 void
2059 debug_immediate_uses_for (tree var)
2061 dump_immediate_uses_for (stderr, var);
2063 #include "gt-tree-ssa-operands.h"