Daily bump.
[official-gcc.git] / gcc / tree-ssa-operands.c
bloba3c453d667b0056ef920027e98358ae5bf91cf83
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
79 /* Flags to describe operand properties in helpers. */
81 /* By default, operands are loaded. */
82 #define opf_none 0
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_is_def (1 << 0)
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96 VUSE for 'b'. */
97 #define opf_no_vops (1 << 2)
99 /* Operand is a "non-specific" kill for call-clobbers and such. This
100 is used to distinguish "reset the world" events from explicit
101 MODIFY_EXPRs. */
102 #define opf_non_specific (1 << 3)
104 /* Array for building all the def operands. */
105 static VEC(tree,heap) *build_defs;
107 /* Array for building all the use operands. */
108 static VEC(tree,heap) *build_uses;
110 /* Array for building all the V_MAY_DEF operands. */
111 static VEC(tree,heap) *build_v_may_defs;
113 /* Array for building all the VUSE operands. */
114 static VEC(tree,heap) *build_vuses;
116 /* Array for building all the V_MUST_DEF operands. */
117 static VEC(tree,heap) *build_v_must_defs;
119 static void get_expr_operands (tree, tree *, int);
121 /* Number of functions with initialized ssa_operands. */
122 static int n_initialized = 0;
124 /* Allocates operand OP of given TYPE from the appropriate free list,
125 or of the new value if the list is empty. */
127 #define ALLOC_OPTYPE(OP, TYPE) \
128 do \
130 TYPE##_optype_p ret \
131 = gimple_ssa_operands (cfun)->free_##TYPE##s; \
132 if (ret) \
133 gimple_ssa_operands (cfun)->free_##TYPE##s \
134 = ret->next; \
135 else \
136 ret = ssa_operand_alloc (sizeof (*ret)); \
137 (OP) = ret; \
138 } while (0)
140 /* Return the DECL_UID of the base variable of T. */
142 static inline unsigned
143 get_name_decl (tree t)
145 if (TREE_CODE (t) != SSA_NAME)
146 return DECL_UID (t);
147 else
148 return DECL_UID (SSA_NAME_VAR (t));
152 /* Comparison function for qsort used in operand_build_sort_virtual. */
154 static int
155 operand_build_cmp (const void *p, const void *q)
157 tree e1 = *((const tree *)p);
158 tree e2 = *((const tree *)q);
159 unsigned int u1,u2;
161 u1 = get_name_decl (e1);
162 u2 = get_name_decl (e2);
164 /* We want to sort in ascending order. They can never be equal. */
165 #ifdef ENABLE_CHECKING
166 gcc_assert (u1 != u2);
167 #endif
168 return (u1 > u2 ? 1 : -1);
172 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
174 static inline void
175 operand_build_sort_virtual (VEC(tree,heap) *list)
177 int num = VEC_length (tree, list);
179 if (num < 2)
180 return;
182 if (num == 2)
184 if (get_name_decl (VEC_index (tree, list, 0))
185 > get_name_decl (VEC_index (tree, list, 1)))
187 /* Swap elements if in the wrong order. */
188 tree tmp = VEC_index (tree, list, 0);
189 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
190 VEC_replace (tree, list, 1, tmp);
192 return;
195 /* There are 3 or more elements, call qsort. */
196 qsort (VEC_address (tree, list),
197 VEC_length (tree, list),
198 sizeof (tree),
199 operand_build_cmp);
203 /* Return true if the SSA operands cache is active. */
205 bool
206 ssa_operands_active (void)
208 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
212 /* Structure storing statistics on how many call clobbers we have, and
213 how many where avoided. */
215 static struct
217 /* Number of call-clobbered ops we attempt to add to calls in
218 add_call_clobber_ops. */
219 unsigned int clobbered_vars;
221 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
222 not_written information. */
223 unsigned int static_write_clobbers_avoided;
225 /* Number of reads (VUSEs) avoided by using not_read information. */
226 unsigned int static_read_clobbers_avoided;
228 /* Number of write-clobbers avoided because the variable can't escape to
229 this call. */
230 unsigned int unescapable_clobbers_avoided;
232 /* Number of read-only uses we attempt to add to calls in
233 add_call_read_ops. */
234 unsigned int readonly_clobbers;
236 /* Number of read-only uses we avoid using not_read information. */
237 unsigned int static_readonly_clobbers_avoided;
238 } clobber_stats;
241 /* Initialize the operand cache routines. */
243 void
244 init_ssa_operands (void)
246 if (!n_initialized++)
248 build_defs = VEC_alloc (tree, heap, 5);
249 build_uses = VEC_alloc (tree, heap, 10);
250 build_vuses = VEC_alloc (tree, heap, 25);
251 build_v_may_defs = VEC_alloc (tree, heap, 25);
252 build_v_must_defs = VEC_alloc (tree, heap, 25);
255 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
256 gimple_ssa_operands (cfun)->operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
257 gimple_ssa_operands (cfun)->ops_active = true;
258 memset (&clobber_stats, 0, sizeof (clobber_stats));
262 /* Dispose of anything required by the operand routines. */
264 void
265 fini_ssa_operands (void)
267 struct ssa_operand_memory_d *ptr;
268 if (!--n_initialized)
270 VEC_free (tree, heap, build_defs);
271 VEC_free (tree, heap, build_uses);
272 VEC_free (tree, heap, build_v_must_defs);
273 VEC_free (tree, heap, build_v_may_defs);
274 VEC_free (tree, heap, build_vuses);
276 gimple_ssa_operands (cfun)->free_defs = NULL;
277 gimple_ssa_operands (cfun)->free_uses = NULL;
278 gimple_ssa_operands (cfun)->free_vuses = NULL;
279 gimple_ssa_operands (cfun)->free_maydefs = NULL;
280 gimple_ssa_operands (cfun)->free_mustdefs = NULL;
281 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
283 gimple_ssa_operands (cfun)->operand_memory
284 = gimple_ssa_operands (cfun)->operand_memory->next;
285 ggc_free (ptr);
288 gimple_ssa_operands (cfun)->ops_active = false;
290 if (dump_file && (dump_flags & TDF_STATS))
292 fprintf (dump_file, "Original clobbered vars:%d\n",
293 clobber_stats.clobbered_vars);
294 fprintf (dump_file, "Static write clobbers avoided:%d\n",
295 clobber_stats.static_write_clobbers_avoided);
296 fprintf (dump_file, "Static read clobbers avoided:%d\n",
297 clobber_stats.static_read_clobbers_avoided);
298 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
299 clobber_stats.unescapable_clobbers_avoided);
300 fprintf (dump_file, "Original read-only clobbers:%d\n",
301 clobber_stats.readonly_clobbers);
302 fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
303 clobber_stats.static_readonly_clobbers_avoided);
308 /* Return memory for operands of SIZE chunks. */
310 static inline void *
311 ssa_operand_alloc (unsigned size)
313 char *ptr;
314 if (gimple_ssa_operands (cfun)->operand_memory_index + size
315 >= SSA_OPERAND_MEMORY_SIZE)
317 struct ssa_operand_memory_d *ptr;
318 ptr = GGC_NEW (struct ssa_operand_memory_d);
319 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
320 gimple_ssa_operands (cfun)->operand_memory = ptr;
321 gimple_ssa_operands (cfun)->operand_memory_index = 0;
323 ptr = &(gimple_ssa_operands (cfun)->operand_memory
324 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
325 gimple_ssa_operands (cfun)->operand_memory_index += size;
326 return ptr;
331 /* This routine makes sure that PTR is in an immediate use list, and makes
332 sure the stmt pointer is set to the current stmt. */
334 static inline void
335 set_virtual_use_link (use_operand_p ptr, tree stmt)
337 /* fold_stmt may have changed the stmt pointers. */
338 if (ptr->stmt != stmt)
339 ptr->stmt = stmt;
341 /* If this use isn't in a list, add it to the correct list. */
342 if (!ptr->prev)
343 link_imm_use (ptr, *(ptr->use));
346 /* Appends ELT after TO, and moves the TO pointer to ELT. */
348 #define APPEND_OP_AFTER(ELT, TO) \
349 do \
351 (TO)->next = (ELT); \
352 (TO) = (ELT); \
353 } while (0)
355 /* Appends head of list FROM after TO, and move both pointers
356 to their successors. */
358 #define MOVE_HEAD_AFTER(FROM, TO) \
359 do \
361 APPEND_OP_AFTER (FROM, TO); \
362 (FROM) = (FROM)->next; \
363 } while (0)
365 /* Moves OP to appropriate freelist. OP is set to its successor. */
367 #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
368 do \
370 TYPE##_optype_p next = (OP)->next; \
371 (OP)->next \
372 = gimple_ssa_operands (cfun)->free_##TYPE##s; \
373 gimple_ssa_operands (cfun)->free_##TYPE##s = (OP);\
374 (OP) = next; \
375 } while (0)
377 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
378 of immediate uses. STMT is the current statement. */
380 #define INITIALIZE_USE(USE_PTR, VAL, STMT) \
381 do \
383 (USE_PTR)->use = (VAL); \
384 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
385 } while (0)
387 /* Adds OP to the list of defs after LAST, and moves
388 LAST to the new element. */
390 static inline void
391 add_def_op (tree *op, def_optype_p *last)
393 def_optype_p new;
395 ALLOC_OPTYPE (new, def);
396 DEF_OP_PTR (new) = op;
397 APPEND_OP_AFTER (new, *last);
400 /* Adds OP to the list of uses of statement STMT after LAST, and moves
401 LAST to the new element. */
403 static inline void
404 add_use_op (tree stmt, tree *op, use_optype_p *last)
406 use_optype_p new;
408 ALLOC_OPTYPE (new, use);
409 INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
410 APPEND_OP_AFTER (new, *last);
413 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
414 LAST to the new element. */
416 static inline void
417 add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
419 vuse_optype_p new;
421 ALLOC_OPTYPE (new, vuse);
422 VUSE_OP (new) = op;
423 INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
424 APPEND_OP_AFTER (new, *last);
427 /* Adds OP to the list of maydefs of statement STMT after LAST, and moves
428 LAST to the new element. */
430 static inline void
431 add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
433 maydef_optype_p new;
435 ALLOC_OPTYPE (new, maydef);
436 MAYDEF_RESULT (new) = op;
437 MAYDEF_OP (new) = op;
438 INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
439 APPEND_OP_AFTER (new, *last);
442 /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
443 LAST to the new element. */
445 static inline void
446 add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
448 mustdef_optype_p new;
450 ALLOC_OPTYPE (new, mustdef);
451 MUSTDEF_RESULT (new) = op;
452 MUSTDEF_KILL (new) = op;
453 INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
454 APPEND_OP_AFTER (new, *last);
457 /* Takes elements from build_defs and turns them into def operands of STMT.
458 TODO -- Given that def operands list is not necessarily sorted, merging
459 the operands this way does not make much sense.
460 -- Make build_defs VEC of tree *. */
462 static inline void
463 finalize_ssa_def_ops (tree stmt)
465 unsigned new_i;
466 struct def_optype_d new_list;
467 def_optype_p old_ops, last;
468 tree *old_base;
470 new_list.next = NULL;
471 last = &new_list;
473 old_ops = DEF_OPS (stmt);
475 new_i = 0;
476 while (old_ops && new_i < VEC_length (tree, build_defs))
478 tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
479 old_base = DEF_OP_PTR (old_ops);
481 if (old_base == new_base)
483 /* if variables are the same, reuse this node. */
484 MOVE_HEAD_AFTER (old_ops, last);
485 new_i++;
487 else if (old_base < new_base)
489 /* if old is less than new, old goes to the free list. */
490 MOVE_HEAD_TO_FREELIST (old_ops, def);
492 else
494 /* This is a new operand. */
495 add_def_op (new_base, &last);
496 new_i++;
500 /* If there is anything remaining in the build_defs list, simply emit it. */
501 for ( ; new_i < VEC_length (tree, build_defs); new_i++)
502 add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
504 last->next = NULL;
506 /* If there is anything in the old list, free it. */
507 if (old_ops)
509 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
510 gimple_ssa_operands (cfun)->free_defs = old_ops;
513 /* Now set the stmt's operands. */
514 DEF_OPS (stmt) = new_list.next;
516 #ifdef ENABLE_CHECKING
518 def_optype_p ptr;
519 unsigned x = 0;
520 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
521 x++;
523 gcc_assert (x == VEC_length (tree, build_defs));
525 #endif
528 /* This routine will create stmt operands for STMT from the def build list. */
530 static void
531 finalize_ssa_defs (tree stmt)
533 unsigned int num = VEC_length (tree, build_defs);
535 /* There should only be a single real definition per assignment. */
536 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
538 /* If there is an old list, often the new list is identical, or close, so
539 find the elements at the beginning that are the same as the vector. */
540 finalize_ssa_def_ops (stmt);
541 VEC_truncate (tree, build_defs, 0);
544 /* Takes elements from build_uses and turns them into use operands of STMT.
545 TODO -- Make build_uses VEC of tree *. */
547 static inline void
548 finalize_ssa_use_ops (tree stmt)
550 unsigned new_i;
551 struct use_optype_d new_list;
552 use_optype_p old_ops, ptr, last;
554 new_list.next = NULL;
555 last = &new_list;
557 old_ops = USE_OPS (stmt);
559 /* If there is anything in the old list, free it. */
560 if (old_ops)
562 for (ptr = old_ops; ptr; ptr = ptr->next)
563 delink_imm_use (USE_OP_PTR (ptr));
564 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
565 gimple_ssa_operands (cfun)->free_uses = old_ops;
568 /* Now create nodes for all the new nodes. */
569 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
570 add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
572 last->next = NULL;
574 /* Now set the stmt's operands. */
575 USE_OPS (stmt) = new_list.next;
577 #ifdef ENABLE_CHECKING
579 unsigned x = 0;
580 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
581 x++;
583 gcc_assert (x == VEC_length (tree, build_uses));
585 #endif
588 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
590 static void
591 finalize_ssa_uses (tree stmt)
593 #ifdef ENABLE_CHECKING
595 unsigned x;
596 unsigned num = VEC_length (tree, build_uses);
598 /* If the pointer to the operand is the statement itself, something is
599 wrong. It means that we are pointing to a local variable (the
600 initial call to update_stmt_operands does not pass a pointer to a
601 statement). */
602 for (x = 0; x < num; x++)
603 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
605 #endif
606 finalize_ssa_use_ops (stmt);
607 VEC_truncate (tree, build_uses, 0);
611 /* Takes elements from build_v_may_defs and turns them into maydef operands of
612 STMT. */
614 static inline void
615 finalize_ssa_v_may_def_ops (tree stmt)
617 unsigned new_i;
618 struct maydef_optype_d new_list;
619 maydef_optype_p old_ops, ptr, last;
620 tree act;
621 unsigned old_base, new_base;
623 new_list.next = NULL;
624 last = &new_list;
626 old_ops = MAYDEF_OPS (stmt);
628 new_i = 0;
629 while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
631 act = VEC_index (tree, build_v_may_defs, new_i);
632 new_base = get_name_decl (act);
633 old_base = get_name_decl (MAYDEF_OP (old_ops));
635 if (old_base == new_base)
637 /* if variables are the same, reuse this node. */
638 MOVE_HEAD_AFTER (old_ops, last);
639 set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
640 new_i++;
642 else if (old_base < new_base)
644 /* if old is less than new, old goes to the free list. */
645 delink_imm_use (MAYDEF_OP_PTR (old_ops));
646 MOVE_HEAD_TO_FREELIST (old_ops, maydef);
648 else
650 /* This is a new operand. */
651 add_maydef_op (stmt, act, &last);
652 new_i++;
656 /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
657 for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
658 add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
660 last->next = NULL;
662 /* If there is anything in the old list, free it. */
663 if (old_ops)
665 for (ptr = old_ops; ptr; ptr = ptr->next)
666 delink_imm_use (MAYDEF_OP_PTR (ptr));
667 old_ops->next = gimple_ssa_operands (cfun)->free_maydefs;
668 gimple_ssa_operands (cfun)->free_maydefs = old_ops;
671 /* Now set the stmt's operands. */
672 MAYDEF_OPS (stmt) = new_list.next;
674 #ifdef ENABLE_CHECKING
676 unsigned x = 0;
677 for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
678 x++;
680 gcc_assert (x == VEC_length (tree, build_v_may_defs));
682 #endif
685 static void
686 finalize_ssa_v_may_defs (tree stmt)
688 finalize_ssa_v_may_def_ops (stmt);
692 /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
694 static inline void
695 cleanup_v_may_defs (void)
697 unsigned x, num;
698 num = VEC_length (tree, build_v_may_defs);
700 for (x = 0; x < num; x++)
702 tree t = VEC_index (tree, build_v_may_defs, x);
703 if (TREE_CODE (t) != SSA_NAME)
705 var_ann_t ann = var_ann (t);
706 ann->in_v_may_def_list = 0;
709 VEC_truncate (tree, build_v_may_defs, 0);
713 /* Takes elements from build_vuses and turns them into vuse operands of
714 STMT. */
716 static inline void
717 finalize_ssa_vuse_ops (tree stmt)
719 unsigned new_i;
720 struct vuse_optype_d new_list;
721 vuse_optype_p old_ops, ptr, last;
722 tree act;
723 unsigned old_base, new_base;
725 new_list.next = NULL;
726 last = &new_list;
728 old_ops = VUSE_OPS (stmt);
730 new_i = 0;
731 while (old_ops && new_i < VEC_length (tree, build_vuses))
733 act = VEC_index (tree, build_vuses, new_i);
734 new_base = get_name_decl (act);
735 old_base = get_name_decl (VUSE_OP (old_ops));
737 if (old_base == new_base)
739 /* if variables are the same, reuse this node. */
740 MOVE_HEAD_AFTER (old_ops, last);
741 set_virtual_use_link (VUSE_OP_PTR (last), stmt);
742 new_i++;
744 else if (old_base < new_base)
746 /* if old is less than new, old goes to the free list. */
747 delink_imm_use (USE_OP_PTR (old_ops));
748 MOVE_HEAD_TO_FREELIST (old_ops, vuse);
750 else
752 /* This is a new operand. */
753 add_vuse_op (stmt, act, &last);
754 new_i++;
758 /* If there is anything remaining in the build_vuses list, simply emit it. */
759 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
760 add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
762 last->next = NULL;
764 /* If there is anything in the old list, free it. */
765 if (old_ops)
767 for (ptr = old_ops; ptr; ptr = ptr->next)
768 delink_imm_use (VUSE_OP_PTR (ptr));
769 old_ops->next = gimple_ssa_operands (cfun)->free_vuses;
770 gimple_ssa_operands (cfun)->free_vuses = old_ops;
773 /* Now set the stmt's operands. */
774 VUSE_OPS (stmt) = new_list.next;
776 #ifdef ENABLE_CHECKING
778 unsigned x = 0;
779 for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
780 x++;
782 gcc_assert (x == VEC_length (tree, build_vuses));
784 #endif
787 /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
789 static void
790 finalize_ssa_vuses (tree stmt)
792 unsigned num, num_v_may_defs;
793 unsigned vuse_index;
795 /* Remove superfluous VUSE operands. If the statement already has a
796 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
797 not needed because V_MAY_DEFs imply a VUSE of the variable. For
798 instance, suppose that variable 'a' is aliased:
800 # VUSE <a_2>
801 # a_3 = V_MAY_DEF <a_2>
802 a = a + 1;
804 The VUSE <a_2> is superfluous because it is implied by the
805 V_MAY_DEF operation. */
806 num = VEC_length (tree, build_vuses);
807 num_v_may_defs = VEC_length (tree, build_v_may_defs);
809 if (num > 0 && num_v_may_defs > 0)
811 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
813 tree vuse;
814 vuse = VEC_index (tree, build_vuses, vuse_index);
815 if (TREE_CODE (vuse) != SSA_NAME)
817 var_ann_t ann = var_ann (vuse);
818 ann->in_vuse_list = 0;
819 if (ann->in_v_may_def_list)
821 VEC_ordered_remove (tree, build_vuses, vuse_index);
822 continue;
825 vuse_index++;
828 else
830 /* Clear out the in_list bits. */
831 for (vuse_index = 0;
832 vuse_index < VEC_length (tree, build_vuses);
833 vuse_index++)
835 tree t = VEC_index (tree, build_vuses, vuse_index);
836 if (TREE_CODE (t) != SSA_NAME)
838 var_ann_t ann = var_ann (t);
839 ann->in_vuse_list = 0;
844 finalize_ssa_vuse_ops (stmt);
846 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
847 cleanup_v_may_defs ();
849 /* Free the VUSEs build vector. */
850 VEC_truncate (tree, build_vuses, 0);
854 /* Takes elements from build_v_must_defs and turns them into mustdef operands of
855 STMT. */
857 static inline void
858 finalize_ssa_v_must_def_ops (tree stmt)
860 unsigned new_i;
861 struct mustdef_optype_d new_list;
862 mustdef_optype_p old_ops, ptr, last;
863 tree act;
864 unsigned old_base, new_base;
866 new_list.next = NULL;
867 last = &new_list;
869 old_ops = MUSTDEF_OPS (stmt);
871 new_i = 0;
872 while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
874 act = VEC_index (tree, build_v_must_defs, new_i);
875 new_base = get_name_decl (act);
876 old_base = get_name_decl (MUSTDEF_KILL (old_ops));
878 if (old_base == new_base)
880 /* If variables are the same, reuse this node. */
881 MOVE_HEAD_AFTER (old_ops, last);
882 set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
883 new_i++;
885 else if (old_base < new_base)
887 /* If old is less than new, old goes to the free list. */
888 delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
889 MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
891 else
893 /* This is a new operand. */
894 add_mustdef_op (stmt, act, &last);
895 new_i++;
899 /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
900 for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
901 add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
903 last->next = NULL;
905 /* If there is anything in the old list, free it. */
906 if (old_ops)
908 for (ptr = old_ops; ptr; ptr = ptr->next)
909 delink_imm_use (MUSTDEF_KILL_PTR (ptr));
910 old_ops->next = gimple_ssa_operands (cfun)->free_mustdefs;
911 gimple_ssa_operands (cfun)->free_mustdefs = old_ops;
914 /* Now set the stmt's operands. */
915 MUSTDEF_OPS (stmt) = new_list.next;
917 #ifdef ENABLE_CHECKING
919 unsigned x = 0;
920 for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
921 x++;
923 gcc_assert (x == VEC_length (tree, build_v_must_defs));
925 #endif
928 static void
929 finalize_ssa_v_must_defs (tree stmt)
931 /* In the presence of subvars, there may be more than one V_MUST_DEF
932 per statement (one for each subvar). It is a bit expensive to
933 verify that all must-defs in a statement belong to subvars if
934 there is more than one must-def, so we don't do it. Suffice to
935 say, if you reach here without having subvars, and have num >1,
936 you have hit a bug. */
937 finalize_ssa_v_must_def_ops (stmt);
938 VEC_truncate (tree, build_v_must_defs, 0);
942 /* Finalize all the build vectors, fill the new ones into INFO. */
944 static inline void
945 finalize_ssa_stmt_operands (tree stmt)
947 finalize_ssa_defs (stmt);
948 finalize_ssa_uses (stmt);
949 finalize_ssa_v_must_defs (stmt);
950 finalize_ssa_v_may_defs (stmt);
951 finalize_ssa_vuses (stmt);
955 /* Start the process of building up operands vectors in INFO. */
957 static inline void
958 start_ssa_stmt_operands (void)
960 gcc_assert (VEC_length (tree, build_defs) == 0);
961 gcc_assert (VEC_length (tree, build_uses) == 0);
962 gcc_assert (VEC_length (tree, build_vuses) == 0);
963 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
964 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
968 /* Add DEF_P to the list of pointers to operands. */
970 static inline void
971 append_def (tree *def_p)
973 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
977 /* Add USE_P to the list of pointers to operands. */
979 static inline void
980 append_use (tree *use_p)
982 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
986 /* Add a new virtual may def for variable VAR to the build array. */
988 static inline void
989 append_v_may_def (tree var)
991 if (TREE_CODE (var) != SSA_NAME)
993 var_ann_t ann = get_var_ann (var);
995 /* Don't allow duplicate entries. */
996 if (ann->in_v_may_def_list)
997 return;
998 ann->in_v_may_def_list = 1;
1001 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
1005 /* Add VAR to the list of virtual uses. */
1007 static inline void
1008 append_vuse (tree var)
1010 /* Don't allow duplicate entries. */
1011 if (TREE_CODE (var) != SSA_NAME)
1013 var_ann_t ann = get_var_ann (var);
1015 if (ann->in_vuse_list || ann->in_v_may_def_list)
1016 return;
1017 ann->in_vuse_list = 1;
1020 VEC_safe_push (tree, heap, build_vuses, (tree)var);
1024 /* Add VAR to the list of virtual must definitions for INFO. */
1026 static inline void
1027 append_v_must_def (tree var)
1029 unsigned i;
1031 /* Don't allow duplicate entries. */
1032 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1033 if (var == VEC_index (tree, build_v_must_defs, i))
1034 return;
1036 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
1040 /* REF is a tree that contains the entire pointer dereference
1041 expression, if available, or NULL otherwise. ALIAS is the variable
1042 we are asking if REF can access. OFFSET and SIZE come from the
1043 memory access expression that generated this virtual operand. */
1045 static bool
1046 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1047 HOST_WIDE_INT size)
1049 bool offsetgtz = offset > 0;
1050 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1051 tree base = ref ? get_base_address (ref) : NULL;
1053 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1054 using a call-clobbered memory tag. By definition, call-clobbered
1055 memory tags can always touch .GLOBAL_VAR. */
1056 if (alias == gimple_global_var (cfun))
1057 return true;
1059 /* We cannot prune nonlocal aliases because they are not type
1060 specific. */
1061 if (alias == gimple_nonlocal_all (cfun))
1062 return true;
1064 /* If ALIAS is an SFT, it can't be touched if the offset
1065 and size of the access is not overlapping with the SFT offset and
1066 size. This is only true if we are accessing through a pointer
1067 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1068 be accessing through a pointer to some substruct of the
1069 structure, and if we try to prune there, we will have the wrong
1070 offset, and get the wrong answer.
1071 i.e., we can't prune without more work if we have something like
1073 struct gcc_target
1075 struct asm_out
1077 const char *byte_op;
1078 struct asm_int_op
1080 const char *hi;
1081 } aligned_op;
1082 } asm_out;
1083 } targetm;
1085 foo = &targetm.asm_out.aligned_op;
1086 return foo->hi;
1088 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1089 terms of SFT_PARENT_VAR, that is where it is.
1090 However, the access through the foo pointer will be at offset 0. */
1091 if (size != -1
1092 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1093 && base
1094 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1095 && !overlap_subvar (offset, size, alias, NULL))
1097 #ifdef ACCESS_DEBUGGING
1098 fprintf (stderr, "Access to ");
1099 print_generic_expr (stderr, ref, 0);
1100 fprintf (stderr, " may not touch ");
1101 print_generic_expr (stderr, alias, 0);
1102 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1103 #endif
1104 return false;
1107 /* Without strict aliasing, it is impossible for a component access
1108 through a pointer to touch a random variable, unless that
1109 variable *is* a structure or a pointer.
1111 That is, given p->c, and some random global variable b,
1112 there is no legal way that p->c could be an access to b.
1114 Without strict aliasing on, we consider it legal to do something
1115 like:
1117 struct foos { int l; };
1118 int foo;
1119 static struct foos *getfoo(void);
1120 int main (void)
1122 struct foos *f = getfoo();
1123 f->l = 1;
1124 foo = 2;
1125 if (f->l == 1)
1126 abort();
1127 exit(0);
1129 static struct foos *getfoo(void)
1130 { return (struct foos *)&foo; }
1132 (taken from 20000623-1.c)
1134 The docs also say/imply that access through union pointers
1135 is legal (but *not* if you take the address of the union member,
1136 i.e. the inverse), such that you can do
1138 typedef union {
1139 int d;
1140 } U;
1142 int rv;
1143 void breakme()
1145 U *rv0;
1146 U *pretmp = (U*)&rv;
1147 rv0 = pretmp;
1148 rv0->d = 42;
1150 To implement this, we just punt on accesses through union
1151 pointers entirely.
1153 else if (ref
1154 && flag_strict_aliasing
1155 && TREE_CODE (ref) != INDIRECT_REF
1156 && !MTAG_P (alias)
1157 && (TREE_CODE (base) != INDIRECT_REF
1158 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1159 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1160 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1161 #if 0
1162 /* FIXME: PR tree-optimization/29680. */
1163 && !var_ann (alias)->is_heapvar
1164 #else
1165 && !POINTER_TYPE_P (TREE_TYPE (alias))
1166 #endif
1167 /* When the struct has may_alias attached to it, we need not to
1168 return true. */
1169 && get_alias_set (base))
1171 #ifdef ACCESS_DEBUGGING
1172 fprintf (stderr, "Access to ");
1173 print_generic_expr (stderr, ref, 0);
1174 fprintf (stderr, " may not touch ");
1175 print_generic_expr (stderr, alias, 0);
1176 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1177 #endif
1178 return false;
1181 /* If the offset of the access is greater than the size of one of
1182 the possible aliases, it can't be touching that alias, because it
1183 would be past the end of the structure. */
1184 else if (ref
1185 && flag_strict_aliasing
1186 && TREE_CODE (ref) != INDIRECT_REF
1187 && !MTAG_P (alias)
1188 && !POINTER_TYPE_P (TREE_TYPE (alias))
1189 && offsetgtz
1190 && DECL_SIZE (alias)
1191 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1192 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1194 #ifdef ACCESS_DEBUGGING
1195 fprintf (stderr, "Access to ");
1196 print_generic_expr (stderr, ref, 0);
1197 fprintf (stderr, " may not touch ");
1198 print_generic_expr (stderr, alias, 0);
1199 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1200 #endif
1201 return false;
1204 return true;
1208 /* Add VAR to the virtual operands array. FLAGS is as in
1209 get_expr_operands. FULL_REF is a tree that contains the entire
1210 pointer dereference expression, if available, or NULL otherwise.
1211 OFFSET and SIZE come from the memory access expression that
1212 generated this virtual operand. FOR_CLOBBER is true is this is
1213 adding a virtual operand for a call clobber. */
1215 static void
1216 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1217 tree full_ref, HOST_WIDE_INT offset,
1218 HOST_WIDE_INT size, bool for_clobber)
1220 VEC(tree,gc) *aliases;
1221 tree sym;
1222 var_ann_t v_ann;
1224 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1225 v_ann = var_ann (sym);
1227 /* Mark statements with volatile operands. Optimizers should back
1228 off from statements having volatile operands. */
1229 if (TREE_THIS_VOLATILE (sym) && s_ann)
1230 s_ann->has_volatile_ops = true;
1232 /* If the variable cannot be modified and this is a V_MAY_DEF change
1233 it into a VUSE. This happens when read-only variables are marked
1234 call-clobbered and/or aliased to writable variables. So we only
1235 check that this only happens on non-specific stores.
1237 Note that if this is a specific store, i.e. associated with a
1238 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1239 into validation problems.
1241 This can happen when programs cast away const, leaving us with a
1242 store to read-only memory. If the statement is actually executed
1243 at runtime, then the program is ill formed. If the statement is
1244 not executed then all is well. At the very least, we cannot ICE. */
1245 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1246 flags &= ~(opf_is_def | opf_kill_def);
1248 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1249 virtual operands, unless the caller has specifically requested
1250 not to add virtual operands (used when adding operands inside an
1251 ADDR_EXPR expression). */
1252 if (flags & opf_no_vops)
1253 return;
1255 aliases = v_ann->may_aliases;
1256 if (aliases == NULL)
1258 /* The variable is not aliased or it is an alias tag. */
1259 if (flags & opf_is_def)
1261 if (flags & opf_kill_def)
1263 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1264 variable definitions. */
1265 gcc_assert (!MTAG_P (var)
1266 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1267 append_v_must_def (var);
1269 else
1271 /* Add a V_MAY_DEF for call-clobbered variables and
1272 memory tags. */
1273 append_v_may_def (var);
1276 else
1277 append_vuse (var);
1279 else
1281 unsigned i;
1282 tree al;
1284 /* The variable is aliased. Add its aliases to the virtual
1285 operands. */
1286 gcc_assert (VEC_length (tree, aliases) != 0);
1288 if (flags & opf_is_def)
1291 bool none_added = true;
1293 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1295 if (!access_can_touch_variable (full_ref, al, offset, size))
1296 continue;
1298 none_added = false;
1299 append_v_may_def (al);
1302 /* If the variable is also an alias tag, add a virtual
1303 operand for it, otherwise we will miss representing
1304 references to the members of the variable's alias set.
1305 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1307 It is also necessary to add bare defs on clobbers for
1308 SMT's, so that bare SMT uses caused by pruning all the
1309 aliases will link up properly with calls. In order to
1310 keep the number of these bare defs we add down to the
1311 minimum necessary, we keep track of which SMT's were used
1312 alone in statement vdefs or VUSEs. */
1313 if (v_ann->is_aliased
1314 || none_added
1315 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1316 && for_clobber
1317 && SMT_USED_ALONE (var)))
1319 /* Every bare SMT def we add should have SMT_USED_ALONE
1320 set on it, or else we will get the wrong answer on
1321 clobbers. */
1322 if (none_added
1323 && !updating_used_alone && gimple_aliases_computed_p (cfun)
1324 && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1325 gcc_assert (SMT_USED_ALONE (var));
1327 append_v_may_def (var);
1330 else
1332 bool none_added = true;
1333 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1335 if (!access_can_touch_variable (full_ref, al, offset, size))
1336 continue;
1337 none_added = false;
1338 append_vuse (al);
1341 /* Similarly, append a virtual uses for VAR itself, when
1342 it is an alias tag. */
1343 if (v_ann->is_aliased || none_added)
1344 append_vuse (var);
1350 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1351 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1352 the statement's real operands, otherwise it is added to virtual
1353 operands. */
1355 static void
1356 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1358 bool is_real_op;
1359 tree var, sym;
1360 var_ann_t v_ann;
1362 var = *var_p;
1363 gcc_assert (SSA_VAR_P (var));
1365 is_real_op = is_gimple_reg (var);
1367 /* If this is a real operand, the operand is either an SSA name or a
1368 decl. Virtual operands may only be decls. */
1369 gcc_assert (is_real_op || DECL_P (var));
1371 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1372 v_ann = var_ann (sym);
1374 /* Mark statements with volatile operands. Optimizers should back
1375 off from statements having volatile operands. */
1376 if (TREE_THIS_VOLATILE (sym) && s_ann)
1377 s_ann->has_volatile_ops = true;
1379 if (is_real_op)
1381 /* The variable is a GIMPLE register. Add it to real operands. */
1382 if (flags & opf_is_def)
1383 append_def (var_p);
1384 else
1385 append_use (var_p);
1387 else
1388 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1392 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1393 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1395 STMT is the statement being processed, EXPR is the INDIRECT_REF
1396 that got us here.
1398 FLAGS is as in get_expr_operands.
1400 FULL_REF contains the full pointer dereference expression, if we
1401 have it, or NULL otherwise.
1403 OFFSET and SIZE are the location of the access inside the
1404 dereferenced pointer, if known.
1406 RECURSE_ON_BASE should be set to true if we want to continue
1407 calling get_expr_operands on the base pointer, and false if
1408 something else will do it for us. */
1410 static void
1411 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1412 tree full_ref,
1413 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1414 bool recurse_on_base)
1416 tree *pptr = &TREE_OPERAND (expr, 0);
1417 tree ptr = *pptr;
1418 stmt_ann_t s_ann = stmt_ann (stmt);
1420 /* Stores into INDIRECT_REF operands are never killing definitions. */
1421 flags &= ~opf_kill_def;
1423 if (SSA_VAR_P (ptr))
1425 struct ptr_info_def *pi = NULL;
1427 /* If PTR has flow-sensitive points-to information, use it. */
1428 if (TREE_CODE (ptr) == SSA_NAME
1429 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1430 && pi->name_mem_tag)
1432 /* PTR has its own memory tag. Use it. */
1433 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1434 full_ref, offset, size, false);
1436 else
1438 /* If PTR is not an SSA_NAME or it doesn't have a name
1439 tag, use its symbol memory tag. */
1440 var_ann_t v_ann;
1442 /* If we are emitting debugging dumps, display a warning if
1443 PTR is an SSA_NAME with no flow-sensitive alias
1444 information. That means that we may need to compute
1445 aliasing again. */
1446 if (dump_file
1447 && TREE_CODE (ptr) == SSA_NAME
1448 && pi == NULL)
1450 fprintf (dump_file,
1451 "NOTE: no flow-sensitive alias info for ");
1452 print_generic_expr (dump_file, ptr, dump_flags);
1453 fprintf (dump_file, " in ");
1454 print_generic_stmt (dump_file, stmt, dump_flags);
1457 if (TREE_CODE (ptr) == SSA_NAME)
1458 ptr = SSA_NAME_VAR (ptr);
1459 v_ann = var_ann (ptr);
1461 if (v_ann->symbol_mem_tag)
1462 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1463 full_ref, offset, size, false);
1466 else if (TREE_CODE (ptr) == INTEGER_CST)
1468 /* If a constant is used as a pointer, we can't generate a real
1469 operand for it but we mark the statement volatile to prevent
1470 optimizations from messing things up. */
1471 if (s_ann)
1472 s_ann->has_volatile_ops = true;
1473 return;
1475 else
1477 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1478 gcc_unreachable ();
1481 /* If requested, add a USE operand for the base pointer. */
1482 if (recurse_on_base)
1483 get_expr_operands (stmt, pptr, opf_none);
1487 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1489 static void
1490 get_tmr_operands (tree stmt, tree expr, int flags)
1492 tree tag = TMR_TAG (expr), ref;
1493 HOST_WIDE_INT offset, size, maxsize;
1494 subvar_t svars, sv;
1495 stmt_ann_t s_ann = stmt_ann (stmt);
1497 /* First record the real operands. */
1498 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1499 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1501 /* MEM_REFs should never be killing. */
1502 flags &= ~opf_kill_def;
1504 if (TMR_SYMBOL (expr))
1506 stmt_ann_t ann = stmt_ann (stmt);
1507 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1510 if (!tag)
1512 /* Something weird, so ensure that we will be careful. */
1513 stmt_ann (stmt)->has_volatile_ops = true;
1514 return;
1517 if (DECL_P (tag))
1519 get_expr_operands (stmt, &tag, flags);
1520 return;
1523 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1524 gcc_assert (ref != NULL_TREE);
1525 svars = get_subvars_for_var (ref);
1526 for (sv = svars; sv; sv = sv->next)
1528 bool exact;
1529 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1531 int subvar_flags = flags;
1532 if (!exact || size != maxsize)
1533 subvar_flags &= ~opf_kill_def;
1534 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1540 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1541 clobbered variables in the function. */
1543 static void
1544 add_call_clobber_ops (tree stmt, tree callee)
1546 unsigned u;
1547 bitmap_iterator bi;
1548 stmt_ann_t s_ann = stmt_ann (stmt);
1549 bitmap not_read_b, not_written_b;
1551 /* Functions that are not const, pure or never return may clobber
1552 call-clobbered variables. */
1553 if (s_ann)
1554 s_ann->makes_clobbering_call = true;
1556 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1557 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1558 if (gimple_global_var (cfun))
1560 tree var = gimple_global_var (cfun);
1561 add_stmt_operand (&var, s_ann, opf_is_def);
1562 return;
1565 /* Get info for local and module level statics. There is a bit
1566 set for each static if the call being processed does not read
1567 or write that variable. */
1568 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1569 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1570 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1571 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1573 tree var = referenced_var_lookup (u);
1574 unsigned int escape_mask = var_ann (var)->escape_mask;
1575 tree real_var = var;
1576 bool not_read;
1577 bool not_written;
1579 /* Not read and not written are computed on regular vars, not
1580 subvars, so look at the parent var if this is an SFT. */
1581 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1582 real_var = SFT_PARENT_VAR (var);
1584 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1585 DECL_UID (real_var)) : false;
1586 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1587 DECL_UID (real_var)) : false;
1588 gcc_assert (!unmodifiable_var_p (var));
1590 clobber_stats.clobbered_vars++;
1592 /* See if this variable is really clobbered by this function. */
1594 /* Trivial case: Things escaping only to pure/const are not
1595 clobbered by non-pure-const, and only read by pure/const. */
1596 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1598 tree call = get_call_expr_in (stmt);
1599 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1601 add_stmt_operand (&var, s_ann, opf_none);
1602 clobber_stats.unescapable_clobbers_avoided++;
1603 continue;
1605 else
1607 clobber_stats.unescapable_clobbers_avoided++;
1608 continue;
1612 if (not_written)
1614 clobber_stats.static_write_clobbers_avoided++;
1615 if (!not_read)
1616 add_stmt_operand (&var, s_ann, opf_none);
1617 else
1618 clobber_stats.static_read_clobbers_avoided++;
1620 else
1621 add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1626 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1627 function. */
1629 static void
1630 add_call_read_ops (tree stmt, tree callee)
1632 unsigned u;
1633 bitmap_iterator bi;
1634 stmt_ann_t s_ann = stmt_ann (stmt);
1635 bitmap not_read_b;
1637 /* if the function is not pure, it may reference memory. Add
1638 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1639 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1640 if (gimple_global_var (cfun))
1642 tree var = gimple_global_var (cfun);
1643 add_stmt_operand (&var, s_ann, opf_none);
1644 return;
1647 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1649 /* Add a VUSE for each call-clobbered variable. */
1650 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1652 tree var = referenced_var (u);
1653 tree real_var = var;
1654 bool not_read;
1656 clobber_stats.readonly_clobbers++;
1658 /* Not read and not written are computed on regular vars, not
1659 subvars, so look at the parent var if this is an SFT. */
1661 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1662 real_var = SFT_PARENT_VAR (var);
1664 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1665 : false;
1667 if (not_read)
1669 clobber_stats.static_readonly_clobbers_avoided++;
1670 continue;
1673 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1678 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1680 static void
1681 get_call_expr_operands (tree stmt, tree expr)
1683 tree op;
1684 int call_flags = call_expr_flags (expr);
1686 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1687 operands for all the symbols that have been found to be
1688 call-clobbered.
1690 Note that if aliases have not been computed, the global effects
1691 of calls will not be included in the SSA web. This is fine
1692 because no optimizer should run before aliases have been
1693 computed. By not bothering with virtual operands for CALL_EXPRs
1694 we avoid adding superfluous virtual operands, which can be a
1695 significant compile time sink (See PR 15855). */
1696 if (gimple_aliases_computed_p (cfun)
1697 && !bitmap_empty_p (gimple_call_clobbered_vars (cfun))
1698 && !(call_flags & ECF_NOVOPS))
1700 /* A 'pure' or a 'const' function never call-clobbers anything.
1701 A 'noreturn' function might, but since we don't return anyway
1702 there is no point in recording that. */
1703 if (TREE_SIDE_EFFECTS (expr)
1704 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1705 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1706 else if (!(call_flags & ECF_CONST))
1707 add_call_read_ops (stmt, get_callee_fndecl (expr));
1710 /* Find uses in the called function. */
1711 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1713 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1714 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1716 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1720 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1722 static void
1723 get_asm_expr_operands (tree stmt)
1725 stmt_ann_t s_ann = stmt_ann (stmt);
1726 int noutputs = list_length (ASM_OUTPUTS (stmt));
1727 const char **oconstraints
1728 = (const char **) alloca ((noutputs) * sizeof (const char *));
1729 int i;
1730 tree link;
1731 const char *constraint;
1732 bool allows_mem, allows_reg, is_inout;
1734 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1736 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1737 oconstraints[i] = constraint;
1738 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1739 &allows_reg, &is_inout);
1741 /* This should have been split in gimplify_asm_expr. */
1742 gcc_assert (!allows_reg || !is_inout);
1744 /* Memory operands are addressable. Note that STMT needs the
1745 address of this operand. */
1746 if (!allows_reg && allows_mem)
1748 tree t = get_base_address (TREE_VALUE (link));
1749 if (t && DECL_P (t) && s_ann)
1750 add_to_addressable_set (t, &s_ann->addresses_taken);
1753 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1756 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1758 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1759 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1760 oconstraints, &allows_mem, &allows_reg);
1762 /* Memory operands are addressable. Note that STMT needs the
1763 address of this operand. */
1764 if (!allows_reg && allows_mem)
1766 tree t = get_base_address (TREE_VALUE (link));
1767 if (t && DECL_P (t) && s_ann)
1768 add_to_addressable_set (t, &s_ann->addresses_taken);
1771 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1775 /* Clobber memory for asm ("" : : : "memory"); */
1776 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1777 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1779 unsigned i;
1780 bitmap_iterator bi;
1782 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1783 decided to group them). */
1784 if (gimple_global_var (cfun))
1786 tree var = gimple_global_var (cfun);
1787 add_stmt_operand (&var, s_ann, opf_is_def);
1789 else
1790 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1792 tree var = referenced_var (i);
1793 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1796 /* Now clobber all addressables. */
1797 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1799 tree var = referenced_var (i);
1801 /* Subvars are explicitly represented in this list, so
1802 we don't need the original to be added to the clobber
1803 ops, but the original *will* be in this list because
1804 we keep the addressability of the original
1805 variable up-to-date so we don't screw up the rest of
1806 the backend. */
1807 if (var_can_have_subvars (var)
1808 && get_subvars_for_var (var) != NULL)
1809 continue;
1811 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1814 break;
1819 /* Scan operands for the assignment expression EXPR in statement STMT. */
1821 static void
1822 get_modify_expr_operands (tree stmt, tree expr)
1824 /* First get operands from the RHS. */
1825 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1827 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1828 registers. If the LHS is a store to memory, we will either need
1829 a preserving definition (V_MAY_DEF) or a killing definition
1830 (V_MUST_DEF).
1832 Preserving definitions are those that modify a part of an
1833 aggregate object for which no subvars have been computed (or the
1834 reference does not correspond exactly to one of them). Stores
1835 through a pointer are also represented with V_MAY_DEF operators.
1837 The determination of whether to use a preserving or a killing
1838 definition is done while scanning the LHS of the assignment. By
1839 default, assume that we will emit a V_MUST_DEF. */
1840 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1844 /* Recursively scan the expression pointed to by EXPR_P in statement
1845 STMT. FLAGS is one of the OPF_* constants modifying how to
1846 interpret the operands found. */
1848 static void
1849 get_expr_operands (tree stmt, tree *expr_p, int flags)
1851 enum tree_code code;
1852 enum tree_code_class class;
1853 tree expr = *expr_p;
1854 stmt_ann_t s_ann = stmt_ann (stmt);
1856 if (expr == NULL)
1857 return;
1859 code = TREE_CODE (expr);
1860 class = TREE_CODE_CLASS (code);
1862 switch (code)
1864 case ADDR_EXPR:
1865 /* Taking the address of a variable does not represent a
1866 reference to it, but the fact that the statement takes its
1867 address will be of interest to some passes (e.g. alias
1868 resolution). */
1869 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1871 /* If the address is invariant, there may be no interesting
1872 variable references inside. */
1873 if (is_gimple_min_invariant (expr))
1874 return;
1876 /* Otherwise, there may be variables referenced inside but there
1877 should be no VUSEs created, since the referenced objects are
1878 not really accessed. The only operands that we should find
1879 here are ARRAY_REF indices which will always be real operands
1880 (GIMPLE does not allow non-registers as array indices). */
1881 flags |= opf_no_vops;
1882 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1883 return;
1885 case SSA_NAME:
1886 case STRUCT_FIELD_TAG:
1887 case SYMBOL_MEMORY_TAG:
1888 case NAME_MEMORY_TAG:
1889 add_stmt_operand (expr_p, s_ann, flags);
1890 return;
1892 case VAR_DECL:
1893 case PARM_DECL:
1894 case RESULT_DECL:
1896 subvar_t svars;
1898 /* Add the subvars for a variable, if it has subvars, to DEFS
1899 or USES. Otherwise, add the variable itself. Whether it
1900 goes to USES or DEFS depends on the operand flags. */
1901 if (var_can_have_subvars (expr)
1902 && (svars = get_subvars_for_var (expr)))
1904 subvar_t sv;
1905 for (sv = svars; sv; sv = sv->next)
1906 add_stmt_operand (&sv->var, s_ann, flags);
1908 else
1909 add_stmt_operand (expr_p, s_ann, flags);
1911 return;
1914 case MISALIGNED_INDIRECT_REF:
1915 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1916 /* fall through */
1918 case ALIGN_INDIRECT_REF:
1919 case INDIRECT_REF:
1920 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1921 return;
1923 case TARGET_MEM_REF:
1924 get_tmr_operands (stmt, expr, flags);
1925 return;
1927 case ARRAY_REF:
1928 case ARRAY_RANGE_REF:
1929 case COMPONENT_REF:
1930 case REALPART_EXPR:
1931 case IMAGPART_EXPR:
1933 tree ref;
1934 HOST_WIDE_INT offset, size, maxsize;
1935 bool none = true;
1937 /* This component reference becomes an access to all of the
1938 subvariables it can touch, if we can determine that, but
1939 *NOT* the real one. If we can't determine which fields we
1940 could touch, the recursion will eventually get to a
1941 variable and add *all* of its subvars, or whatever is the
1942 minimum correct subset. */
1943 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1944 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1946 subvar_t sv;
1947 subvar_t svars = get_subvars_for_var (ref);
1949 for (sv = svars; sv; sv = sv->next)
1951 bool exact;
1953 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1955 int subvar_flags = flags;
1956 none = false;
1957 if (!exact || size != maxsize)
1958 subvar_flags &= ~opf_kill_def;
1959 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1963 if (!none)
1964 flags |= opf_no_vops;
1966 else if (TREE_CODE (ref) == INDIRECT_REF)
1968 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1969 maxsize, false);
1970 flags |= opf_no_vops;
1973 /* Even if we found subvars above we need to ensure to see
1974 immediate uses for d in s.a[d]. In case of s.a having
1975 a subvar or we would miss it otherwise. */
1976 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1977 flags & ~opf_kill_def);
1979 if (code == COMPONENT_REF)
1981 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1982 s_ann->has_volatile_ops = true;
1983 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1985 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
1987 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1988 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1989 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1992 return;
1995 case WITH_SIZE_EXPR:
1996 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1997 and an rvalue reference to its second argument. */
1998 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1999 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2000 return;
2002 case CALL_EXPR:
2003 get_call_expr_operands (stmt, expr);
2004 return;
2006 case COND_EXPR:
2007 case VEC_COND_EXPR:
2008 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
2009 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2010 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2011 return;
2013 case MODIFY_EXPR:
2014 get_modify_expr_operands (stmt, expr);
2015 return;
2017 case CONSTRUCTOR:
2019 /* General aggregate CONSTRUCTORs have been decomposed, but they
2020 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2021 constructor_elt *ce;
2022 unsigned HOST_WIDE_INT idx;
2024 for (idx = 0;
2025 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2026 idx++)
2027 get_expr_operands (stmt, &ce->value, opf_none);
2029 return;
2032 case BIT_FIELD_REF:
2033 /* Stores using BIT_FIELD_REF are always preserving definitions. */
2034 flags &= ~opf_kill_def;
2036 /* Fallthru */
2038 case TRUTH_NOT_EXPR:
2039 case VIEW_CONVERT_EXPR:
2040 do_unary:
2041 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2042 return;
2044 case TRUTH_AND_EXPR:
2045 case TRUTH_OR_EXPR:
2046 case TRUTH_XOR_EXPR:
2047 case COMPOUND_EXPR:
2048 case OBJ_TYPE_REF:
2049 case ASSERT_EXPR:
2050 do_binary:
2052 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2053 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2054 return;
2057 case DOT_PROD_EXPR:
2058 case REALIGN_LOAD_EXPR:
2060 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2061 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2062 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2063 return;
2066 case BLOCK:
2067 case FUNCTION_DECL:
2068 case EXC_PTR_EXPR:
2069 case FILTER_EXPR:
2070 case LABEL_DECL:
2071 case CONST_DECL:
2072 case OMP_PARALLEL:
2073 case OMP_SECTIONS:
2074 case OMP_FOR:
2075 case OMP_SINGLE:
2076 case OMP_MASTER:
2077 case OMP_ORDERED:
2078 case OMP_CRITICAL:
2079 case OMP_RETURN:
2080 case OMP_CONTINUE:
2081 /* Expressions that make no memory references. */
2082 return;
2084 default:
2085 if (class == tcc_unary)
2086 goto do_unary;
2087 if (class == tcc_binary || class == tcc_comparison)
2088 goto do_binary;
2089 if (class == tcc_constant || class == tcc_type)
2090 return;
2093 /* If we get here, something has gone wrong. */
2094 #ifdef ENABLE_CHECKING
2095 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2096 debug_tree (expr);
2097 fputs ("\n", stderr);
2098 #endif
2099 gcc_unreachable ();
2103 /* Parse STMT looking for operands. When finished, the various
2104 build_* operand vectors will have potential operands in them. */
2106 static void
2107 parse_ssa_operands (tree stmt)
2109 enum tree_code code;
2111 code = TREE_CODE (stmt);
2112 switch (code)
2114 case MODIFY_EXPR:
2115 get_modify_expr_operands (stmt, stmt);
2116 break;
2118 case COND_EXPR:
2119 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2120 break;
2122 case SWITCH_EXPR:
2123 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2124 break;
2126 case ASM_EXPR:
2127 get_asm_expr_operands (stmt);
2128 break;
2130 case RETURN_EXPR:
2131 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2132 break;
2134 case GOTO_EXPR:
2135 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2136 break;
2138 case LABEL_EXPR:
2139 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2140 break;
2142 case BIND_EXPR:
2143 case CASE_LABEL_EXPR:
2144 case TRY_CATCH_EXPR:
2145 case TRY_FINALLY_EXPR:
2146 case EH_FILTER_EXPR:
2147 case CATCH_EXPR:
2148 case RESX_EXPR:
2149 /* These nodes contain no variable references. */
2150 break;
2152 default:
2153 /* Notice that if get_expr_operands tries to use &STMT as the
2154 operand pointer (which may only happen for USE operands), we
2155 will fail in add_stmt_operand. This default will handle
2156 statements like empty statements, or CALL_EXPRs that may
2157 appear on the RHS of a statement or as statements themselves. */
2158 get_expr_operands (stmt, &stmt, opf_none);
2159 break;
2164 /* Create an operands cache for STMT. */
2166 static void
2167 build_ssa_operands (tree stmt)
2169 stmt_ann_t ann = get_stmt_ann (stmt);
2171 /* Initially assume that the statement has no volatile operands. */
2172 if (ann)
2173 ann->has_volatile_ops = false;
2175 start_ssa_stmt_operands ();
2177 parse_ssa_operands (stmt);
2178 operand_build_sort_virtual (build_vuses);
2179 operand_build_sort_virtual (build_v_may_defs);
2180 operand_build_sort_virtual (build_v_must_defs);
2182 finalize_ssa_stmt_operands (stmt);
2186 /* Free any operands vectors in OPS. */
2188 void
2189 free_ssa_operands (stmt_operands_p ops)
2191 ops->def_ops = NULL;
2192 ops->use_ops = NULL;
2193 ops->maydef_ops = NULL;
2194 ops->mustdef_ops = NULL;
2195 ops->vuse_ops = NULL;
2199 /* Get the operands of statement STMT. */
2201 void
2202 update_stmt_operands (tree stmt)
2204 stmt_ann_t ann = get_stmt_ann (stmt);
2206 /* If update_stmt_operands is called before SSA is initialized, do
2207 nothing. */
2208 if (!ssa_operands_active ())
2209 return;
2211 /* The optimizers cannot handle statements that are nothing but a
2212 _DECL. This indicates a bug in the gimplifier. */
2213 gcc_assert (!SSA_VAR_P (stmt));
2215 gcc_assert (ann->modified);
2217 timevar_push (TV_TREE_OPS);
2219 build_ssa_operands (stmt);
2221 /* Clear the modified bit for STMT. */
2222 ann->modified = 0;
2224 timevar_pop (TV_TREE_OPS);
2228 /* Copies virtual operands from SRC to DST. */
2230 void
2231 copy_virtual_operands (tree dest, tree src)
2233 tree t;
2234 ssa_op_iter iter, old_iter;
2235 use_operand_p use_p, u2;
2236 def_operand_p def_p, d2;
2238 build_ssa_operands (dest);
2240 /* Copy all the virtual fields. */
2241 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2242 append_vuse (t);
2243 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2244 append_v_may_def (t);
2245 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2246 append_v_must_def (t);
2248 if (VEC_length (tree, build_vuses) == 0
2249 && VEC_length (tree, build_v_may_defs) == 0
2250 && VEC_length (tree, build_v_must_defs) == 0)
2251 return;
2253 /* Now commit the virtual operands to this stmt. */
2254 finalize_ssa_v_must_defs (dest);
2255 finalize_ssa_v_may_defs (dest);
2256 finalize_ssa_vuses (dest);
2258 /* Finally, set the field to the same values as then originals. */
2259 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2260 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
2262 gcc_assert (!op_iter_done (&old_iter));
2263 SET_USE (use_p, t);
2264 t = op_iter_next_tree (&old_iter);
2266 gcc_assert (op_iter_done (&old_iter));
2268 op_iter_init_maydef (&old_iter, src, &u2, &d2);
2269 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
2271 gcc_assert (!op_iter_done (&old_iter));
2272 SET_USE (use_p, USE_FROM_PTR (u2));
2273 SET_DEF (def_p, DEF_FROM_PTR (d2));
2274 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2276 gcc_assert (op_iter_done (&old_iter));
2278 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2279 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2281 gcc_assert (!op_iter_done (&old_iter));
2282 SET_USE (use_p, USE_FROM_PTR (u2));
2283 SET_DEF (def_p, DEF_FROM_PTR (d2));
2284 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2286 gcc_assert (op_iter_done (&old_iter));
2291 /* Specifically for use in DOM's expression analysis. Given a store, we
2292 create an artificial stmt which looks like a load from the store, this can
2293 be used to eliminate redundant loads. OLD_OPS are the operands from the
2294 store stmt, and NEW_STMT is the new load which represents a load of the
2295 values stored. */
2297 void
2298 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2300 stmt_ann_t ann;
2301 tree op;
2302 ssa_op_iter iter;
2303 use_operand_p use_p;
2304 unsigned x;
2306 ann = get_stmt_ann (new_stmt);
2308 /* Process the stmt looking for operands. */
2309 start_ssa_stmt_operands ();
2310 parse_ssa_operands (new_stmt);
2312 for (x = 0; x < VEC_length (tree, build_vuses); x++)
2314 tree t = VEC_index (tree, build_vuses, x);
2315 if (TREE_CODE (t) != SSA_NAME)
2317 var_ann_t ann = var_ann (t);
2318 ann->in_vuse_list = 0;
2322 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2324 tree t = VEC_index (tree, build_v_may_defs, x);
2325 if (TREE_CODE (t) != SSA_NAME)
2327 var_ann_t ann = var_ann (t);
2328 ann->in_v_may_def_list = 0;
2332 /* Remove any virtual operands that were found. */
2333 VEC_truncate (tree, build_v_may_defs, 0);
2334 VEC_truncate (tree, build_v_must_defs, 0);
2335 VEC_truncate (tree, build_vuses, 0);
2337 /* For each VDEF on the original statement, we want to create a
2338 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2339 statement. */
2340 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2341 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2342 append_vuse (op);
2344 /* Now build the operands for this new stmt. */
2345 finalize_ssa_stmt_operands (new_stmt);
2347 /* All uses in this fake stmt must not be in the immediate use lists. */
2348 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2349 delink_imm_use (use_p);
2353 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2354 to test the validity of the swap operation. */
2356 void
2357 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2359 tree op0, op1;
2360 op0 = *exp0;
2361 op1 = *exp1;
2363 /* If the operand cache is active, attempt to preserve the relative
2364 positions of these two operands in their respective immediate use
2365 lists. */
2366 if (ssa_operands_active () && op0 != op1)
2368 use_optype_p use0, use1, ptr;
2369 use0 = use1 = NULL;
2371 /* Find the 2 operands in the cache, if they are there. */
2372 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2373 if (USE_OP_PTR (ptr)->use == exp0)
2375 use0 = ptr;
2376 break;
2379 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2380 if (USE_OP_PTR (ptr)->use == exp1)
2382 use1 = ptr;
2383 break;
2386 /* If both uses don't have operand entries, there isn't much we can do
2387 at this point. Presumably we don't need to worry about it. */
2388 if (use0 && use1)
2390 tree *tmp = USE_OP_PTR (use1)->use;
2391 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2392 USE_OP_PTR (use0)->use = tmp;
2396 /* Now swap the data. */
2397 *exp0 = op1;
2398 *exp1 = op0;
2402 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2403 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2404 a single variable whose address has been taken or any other valid
2405 GIMPLE memory reference (structure reference, array, etc). If the
2406 base address of REF is a decl that has sub-variables, also add all
2407 of its sub-variables. */
2409 void
2410 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2412 tree var;
2413 subvar_t svars;
2415 gcc_assert (addresses_taken);
2417 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2418 as the only thing we take the address of. If VAR is a structure,
2419 taking the address of a field means that the whole structure may
2420 be referenced using pointer arithmetic. See PR 21407 and the
2421 ensuing mailing list discussion. */
2422 var = get_base_address (ref);
2423 if (var && SSA_VAR_P (var))
2425 if (*addresses_taken == NULL)
2426 *addresses_taken = BITMAP_GGC_ALLOC ();
2428 if (var_can_have_subvars (var)
2429 && (svars = get_subvars_for_var (var)))
2431 subvar_t sv;
2432 for (sv = svars; sv; sv = sv->next)
2434 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2435 TREE_ADDRESSABLE (sv->var) = 1;
2438 else
2440 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2441 TREE_ADDRESSABLE (var) = 1;
2447 /* Scan the immediate_use list for VAR making sure its linked properly.
2448 Return TRUE if there is a problem and emit an error message to F. */
2450 bool
2451 verify_imm_links (FILE *f, tree var)
2453 use_operand_p ptr, prev, list;
2454 int count;
2456 gcc_assert (TREE_CODE (var) == SSA_NAME);
2458 list = &(SSA_NAME_IMM_USE_NODE (var));
2459 gcc_assert (list->use == NULL);
2461 if (list->prev == NULL)
2463 gcc_assert (list->next == NULL);
2464 return false;
2467 prev = list;
2468 count = 0;
2469 for (ptr = list->next; ptr != list; )
2471 if (prev != ptr->prev)
2472 goto error;
2474 if (ptr->use == NULL)
2475 goto error; /* 2 roots, or SAFE guard node. */
2476 else if (*(ptr->use) != var)
2477 goto error;
2479 prev = ptr;
2480 ptr = ptr->next;
2482 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2483 problem. */
2484 if (count++ > 50000000)
2485 goto error;
2488 /* Verify list in the other direction. */
2489 prev = list;
2490 for (ptr = list->prev; ptr != list; )
2492 if (prev != ptr->next)
2493 goto error;
2494 prev = ptr;
2495 ptr = ptr->prev;
2496 if (count-- < 0)
2497 goto error;
2500 if (count != 0)
2501 goto error;
2503 return false;
2505 error:
2506 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2508 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2509 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2511 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2512 (void *)ptr->use);
2513 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2514 fprintf(f, "\n");
2515 return true;
2519 /* Dump all the immediate uses to FILE. */
2521 void
2522 dump_immediate_uses_for (FILE *file, tree var)
2524 imm_use_iterator iter;
2525 use_operand_p use_p;
2527 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2529 print_generic_expr (file, var, TDF_SLIM);
2530 fprintf (file, " : -->");
2531 if (has_zero_uses (var))
2532 fprintf (file, " no uses.\n");
2533 else
2534 if (has_single_use (var))
2535 fprintf (file, " single use.\n");
2536 else
2537 fprintf (file, "%d uses.\n", num_imm_uses (var));
2539 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2541 if (use_p->stmt == NULL && use_p->use == NULL)
2542 fprintf (file, "***end of stmt iterator marker***\n");
2543 else
2544 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2545 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2546 else
2547 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2549 fprintf(file, "\n");
2553 /* Dump all the immediate uses to FILE. */
2555 void
2556 dump_immediate_uses (FILE *file)
2558 tree var;
2559 unsigned int x;
2561 fprintf (file, "Immediate_uses: \n\n");
2562 for (x = 1; x < num_ssa_names; x++)
2564 var = ssa_name(x);
2565 if (!var)
2566 continue;
2567 dump_immediate_uses_for (file, var);
2572 /* Dump def-use edges on stderr. */
2574 void
2575 debug_immediate_uses (void)
2577 dump_immediate_uses (stderr);
2581 /* Dump def-use edges on stderr. */
2583 void
2584 debug_immediate_uses_for (tree var)
2586 dump_immediate_uses_for (stderr, var);