* testsuite/17_intro/static.cc: Ignore AIX TOC reload warnings.
[official-gcc.git] / gcc / tree-ssa-operands.c
blob3508b34b77a8fae20c15aebee7bbd494dce8a4a4
1 /* SSA operands management for trees.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stmt.h"
26 #include "print-tree.h"
27 #include "flags.h"
28 #include "function.h"
29 #include "gimple-pretty-print.h"
30 #include "bitmap.h"
31 #include "gimple.h"
32 #include "gimple-ssa.h"
33 #include "tree-phinodes.h"
34 #include "ssa-iterators.h"
35 #include "stringpool.h"
36 #include "tree-ssanames.h"
37 #include "tree-inline.h"
38 #include "timevar.h"
39 #include "dumpfile.h"
40 #include "ggc.h"
41 #include "timevar.h"
42 #include "langhooks.h"
43 #include "diagnostic-core.h"
46 /* This file contains the code required to manage the operands cache of the
47 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
48 annotation. This cache contains operands that will be of interest to
49 optimizers and other passes wishing to manipulate the IL.
51 The operand type are broken up into REAL and VIRTUAL operands. The real
52 operands are represented as pointers into the stmt's operand tree. Thus
53 any manipulation of the real operands will be reflected in the actual tree.
54 Virtual operands are represented solely in the cache, although the base
55 variable for the SSA_NAME may, or may not occur in the stmt's tree.
56 Manipulation of the virtual operands will not be reflected in the stmt tree.
58 The routines in this file are concerned with creating this operand cache
59 from a stmt tree.
61 The operand tree is the parsed by the various get_* routines which look
62 through the stmt tree for the occurrence of operands which may be of
63 interest, and calls are made to the append_* routines whenever one is
64 found. There are 4 of these routines, each representing one of the
65 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
67 The append_* routines check for duplication, and simply keep a list of
68 unique objects for each operand type in the build_* extendable vectors.
70 Once the stmt tree is completely parsed, the finalize_ssa_operands()
71 routine is called, which proceeds to perform the finalization routine
72 on each of the 4 operand vectors which have been built up.
74 If the stmt had a previous operand cache, the finalization routines
75 attempt to match up the new operands with the old ones. If it's a perfect
76 match, the old vector is simply reused. If it isn't a perfect match, then
77 a new vector is created and the new operands are placed there. For
78 virtual operands, if the previous cache had SSA_NAME version of a
79 variable, and that same variable occurs in the same operands cache, then
80 the new cache vector will also get the same SSA_NAME.
82 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
83 operand vector for VUSE, then the new vector will also be modified
84 such that it contains 'a_5' rather than 'a'. */
87 /* Flags to describe operand properties in helpers. */
89 /* By default, operands are loaded. */
90 #define opf_use 0
92 /* Operand is the target of an assignment expression or a
93 call-clobbered variable. */
94 #define opf_def (1 << 0)
96 /* No virtual operands should be created in the expression. This is used
97 when traversing ADDR_EXPR nodes which have different semantics than
98 other expressions. Inside an ADDR_EXPR node, the only operands that we
99 need to consider are indices into arrays. For instance, &a.b[i] should
100 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
101 VUSE for 'b'. */
102 #define opf_no_vops (1 << 1)
104 /* Operand is an implicit reference. This is used to distinguish
105 explicit assignments in the form of MODIFY_EXPR from
106 clobbering sites like function calls or ASM_EXPRs. */
107 #define opf_implicit (1 << 2)
109 /* Operand is in a place where address-taken does not imply addressable. */
110 #define opf_non_addressable (1 << 3)
112 /* Operand is in a place where opf_non_addressable does not apply. */
113 #define opf_not_non_addressable (1 << 4)
115 /* Array for building all the use operands. */
116 static vec<tree> build_uses;
118 /* The built VDEF operand. */
119 static tree build_vdef;
121 /* The built VUSE operand. */
122 static tree build_vuse;
124 /* Bitmap obstack for our datastructures that needs to survive across
125 compilations of multiple functions. */
126 static bitmap_obstack operands_bitmap_obstack;
128 static void get_expr_operands (gimple, tree *, int);
130 /* Number of functions with initialized ssa_operands. */
131 static int n_initialized = 0;
133 /* Accessor to tree-ssa-operands.c caches. */
134 static inline struct ssa_operands *
135 gimple_ssa_operands (const struct function *fun)
137 return &fun->gimple_df->ssa_operands;
141 /* Return true if the SSA operands cache is active. */
143 bool
144 ssa_operands_active (struct function *fun)
146 if (fun == NULL)
147 return false;
149 return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
153 /* Create the VOP variable, an artificial global variable to act as a
154 representative of all of the virtual operands FUD chain. */
156 static void
157 create_vop_var (struct function *fn)
159 tree global_var;
161 gcc_assert (fn->gimple_df->vop == NULL_TREE);
163 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
164 get_identifier (".MEM"),
165 void_type_node);
166 DECL_ARTIFICIAL (global_var) = 1;
167 TREE_READONLY (global_var) = 0;
168 DECL_EXTERNAL (global_var) = 1;
169 TREE_STATIC (global_var) = 1;
170 TREE_USED (global_var) = 1;
171 DECL_CONTEXT (global_var) = NULL_TREE;
172 TREE_THIS_VOLATILE (global_var) = 0;
173 TREE_ADDRESSABLE (global_var) = 0;
174 VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
176 fn->gimple_df->vop = global_var;
179 /* These are the sizes of the operand memory buffer in bytes which gets
180 allocated each time more operands space is required. The final value is
181 the amount that is allocated every time after that.
182 In 1k we can fit 25 use operands (or 63 def operands) on a host with
183 8 byte pointers, that would be 10 statements each with 1 def and 2
184 uses. */
186 #define OP_SIZE_INIT 0
187 #define OP_SIZE_1 (1024 - sizeof (void *))
188 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
189 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
191 /* Initialize the operand cache routines. */
193 void
194 init_ssa_operands (struct function *fn)
196 if (!n_initialized++)
198 build_uses.create (10);
199 build_vuse = NULL_TREE;
200 build_vdef = NULL_TREE;
201 bitmap_obstack_initialize (&operands_bitmap_obstack);
204 gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
205 gimple_ssa_operands (fn)->operand_memory_index
206 = gimple_ssa_operands (fn)->ssa_operand_mem_size;
207 gimple_ssa_operands (fn)->ops_active = true;
208 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
209 create_vop_var (fn);
213 /* Dispose of anything required by the operand routines. */
215 void
216 fini_ssa_operands (void)
218 struct ssa_operand_memory_d *ptr;
220 if (!--n_initialized)
222 build_uses.release ();
223 build_vdef = NULL_TREE;
224 build_vuse = NULL_TREE;
227 gimple_ssa_operands (cfun)->free_uses = NULL;
229 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
231 gimple_ssa_operands (cfun)->operand_memory
232 = gimple_ssa_operands (cfun)->operand_memory->next;
233 ggc_free (ptr);
236 gimple_ssa_operands (cfun)->ops_active = false;
238 if (!n_initialized)
239 bitmap_obstack_release (&operands_bitmap_obstack);
241 cfun->gimple_df->vop = NULL_TREE;
245 /* Return memory for an operand of size SIZE. */
247 static inline void *
248 ssa_operand_alloc (unsigned size)
250 char *ptr;
252 gcc_assert (size == sizeof (struct use_optype_d));
254 if (gimple_ssa_operands (cfun)->operand_memory_index + size
255 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
257 struct ssa_operand_memory_d *ptr;
259 switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
261 case OP_SIZE_INIT:
262 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
263 break;
264 case OP_SIZE_1:
265 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
266 break;
267 case OP_SIZE_2:
268 case OP_SIZE_3:
269 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
270 break;
271 default:
272 gcc_unreachable ();
276 ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
277 + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
279 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
280 gimple_ssa_operands (cfun)->operand_memory = ptr;
281 gimple_ssa_operands (cfun)->operand_memory_index = 0;
284 ptr = &(gimple_ssa_operands (cfun)->operand_memory
285 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
286 gimple_ssa_operands (cfun)->operand_memory_index += size;
287 return ptr;
291 /* Allocate a USE operand. */
293 static inline struct use_optype_d *
294 alloc_use (void)
296 struct use_optype_d *ret;
297 if (gimple_ssa_operands (cfun)->free_uses)
299 ret = gimple_ssa_operands (cfun)->free_uses;
300 gimple_ssa_operands (cfun)->free_uses
301 = gimple_ssa_operands (cfun)->free_uses->next;
303 else
304 ret = (struct use_optype_d *)
305 ssa_operand_alloc (sizeof (struct use_optype_d));
306 return ret;
310 /* Adds OP to the list of uses of statement STMT after LAST. */
312 static inline use_optype_p
313 add_use_op (gimple stmt, tree *op, use_optype_p last)
315 use_optype_p new_use;
317 new_use = alloc_use ();
318 USE_OP_PTR (new_use)->use = op;
319 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
320 last->next = new_use;
321 new_use->next = NULL;
322 return new_use;
327 /* Takes elements from build_defs and turns them into def operands of STMT.
328 TODO -- Make build_defs vec of tree *. */
330 static inline void
331 finalize_ssa_defs (gimple stmt)
333 /* Pre-pend the vdef we may have built. */
334 if (build_vdef != NULL_TREE)
336 tree oldvdef = gimple_vdef (stmt);
337 if (oldvdef
338 && TREE_CODE (oldvdef) == SSA_NAME)
339 oldvdef = SSA_NAME_VAR (oldvdef);
340 if (oldvdef != build_vdef)
341 gimple_set_vdef (stmt, build_vdef);
344 /* Clear and unlink a no longer necessary VDEF. */
345 if (build_vdef == NULL_TREE
346 && gimple_vdef (stmt) != NULL_TREE)
348 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
350 unlink_stmt_vdef (stmt);
351 release_ssa_name (gimple_vdef (stmt));
353 gimple_set_vdef (stmt, NULL_TREE);
356 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
357 if (gimple_vdef (stmt)
358 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
360 cfun->gimple_df->rename_vops = 1;
361 cfun->gimple_df->ssa_renaming_needed = 1;
366 /* Takes elements from build_uses and turns them into use operands of STMT.
367 TODO -- Make build_uses vec of tree *. */
369 static inline void
370 finalize_ssa_uses (gimple stmt)
372 unsigned new_i;
373 struct use_optype_d new_list;
374 use_optype_p old_ops, ptr, last;
376 /* Pre-pend the VUSE we may have built. */
377 if (build_vuse != NULL_TREE)
379 tree oldvuse = gimple_vuse (stmt);
380 if (oldvuse
381 && TREE_CODE (oldvuse) == SSA_NAME)
382 oldvuse = SSA_NAME_VAR (oldvuse);
383 if (oldvuse != (build_vuse != NULL_TREE
384 ? build_vuse : build_vdef))
385 gimple_set_vuse (stmt, NULL_TREE);
386 build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt));
389 new_list.next = NULL;
390 last = &new_list;
392 old_ops = gimple_use_ops (stmt);
394 /* Clear a no longer necessary VUSE. */
395 if (build_vuse == NULL_TREE
396 && gimple_vuse (stmt) != NULL_TREE)
397 gimple_set_vuse (stmt, NULL_TREE);
399 /* If there is anything in the old list, free it. */
400 if (old_ops)
402 for (ptr = old_ops; ptr; ptr = ptr->next)
403 delink_imm_use (USE_OP_PTR (ptr));
404 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
405 gimple_ssa_operands (cfun)->free_uses = old_ops;
408 /* If we added a VUSE, make sure to set the operand if it is not already
409 present and mark it for renaming. */
410 if (build_vuse != NULL_TREE
411 && gimple_vuse (stmt) == NULL_TREE)
413 gimple_set_vuse (stmt, gimple_vop (cfun));
414 cfun->gimple_df->rename_vops = 1;
415 cfun->gimple_df->ssa_renaming_needed = 1;
418 /* Now create nodes for all the new nodes. */
419 for (new_i = 0; new_i < build_uses.length (); new_i++)
421 tree *op = (tree *) build_uses[new_i];
422 last = add_use_op (stmt, op, last);
425 /* Now set the stmt's operands. */
426 gimple_set_use_ops (stmt, new_list.next);
430 /* Clear the in_list bits and empty the build array for VDEFs and
431 VUSEs. */
433 static inline void
434 cleanup_build_arrays (void)
436 build_vdef = NULL_TREE;
437 build_vuse = NULL_TREE;
438 build_uses.truncate (0);
442 /* Finalize all the build vectors, fill the new ones into INFO. */
444 static inline void
445 finalize_ssa_stmt_operands (gimple stmt)
447 finalize_ssa_defs (stmt);
448 finalize_ssa_uses (stmt);
449 cleanup_build_arrays ();
453 /* Start the process of building up operands vectors in INFO. */
455 static inline void
456 start_ssa_stmt_operands (void)
458 gcc_assert (build_uses.length () == 0);
459 gcc_assert (build_vuse == NULL_TREE);
460 gcc_assert (build_vdef == NULL_TREE);
464 /* Add USE_P to the list of pointers to operands. */
466 static inline void
467 append_use (tree *use_p)
469 build_uses.safe_push ((tree) use_p);
473 /* Add VAR to the set of variables that require a VDEF operator. */
475 static inline void
476 append_vdef (tree var)
478 if (!optimize)
479 return;
481 gcc_assert ((build_vdef == NULL_TREE
482 || build_vdef == var)
483 && (build_vuse == NULL_TREE
484 || build_vuse == var));
486 build_vdef = var;
487 build_vuse = var;
491 /* Add VAR to the set of variables that require a VUSE operator. */
493 static inline void
494 append_vuse (tree var)
496 if (!optimize)
497 return;
499 gcc_assert (build_vuse == NULL_TREE
500 || build_vuse == var);
502 build_vuse = var;
505 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
507 static void
508 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
510 /* Add virtual operands to the stmt, unless the caller has specifically
511 requested not to do that (used when adding operands inside an
512 ADDR_EXPR expression). */
513 if (flags & opf_no_vops)
514 return;
516 gcc_assert (!is_gimple_debug (stmt));
518 if (flags & opf_def)
519 append_vdef (gimple_vop (cfun));
520 else
521 append_vuse (gimple_vop (cfun));
525 /* Add *VAR_P to the appropriate operand array for statement STMT.
526 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
527 it will be added to the statement's real operands, otherwise it is
528 added to virtual operands. */
530 static void
531 add_stmt_operand (tree *var_p, gimple stmt, int flags)
533 tree var = *var_p;
535 gcc_assert (SSA_VAR_P (*var_p));
537 if (is_gimple_reg (var))
539 /* The variable is a GIMPLE register. Add it to real operands. */
540 if (flags & opf_def)
542 else
543 append_use (var_p);
544 if (DECL_P (*var_p))
545 cfun->gimple_df->ssa_renaming_needed = 1;
547 else
549 /* Mark statements with volatile operands. */
550 if (!(flags & opf_no_vops)
551 && TREE_THIS_VOLATILE (var))
552 gimple_set_has_volatile_ops (stmt, true);
554 /* The variable is a memory access. Add virtual operands. */
555 add_virtual_operand (stmt, flags);
559 /* Mark the base address of REF as having its address taken.
560 REF may be a single variable whose address has been taken or any
561 other valid GIMPLE memory reference (structure reference, array,
562 etc). */
564 static void
565 mark_address_taken (tree ref)
567 tree var;
569 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
570 as the only thing we take the address of. If VAR is a structure,
571 taking the address of a field means that the whole structure may
572 be referenced using pointer arithmetic. See PR 21407 and the
573 ensuing mailing list discussion. */
574 var = get_base_address (ref);
575 if (var)
577 if (DECL_P (var))
578 TREE_ADDRESSABLE (var) = 1;
579 else if (TREE_CODE (var) == MEM_REF
580 && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
581 && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
582 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
587 /* A subroutine of get_expr_operands to handle MEM_REF.
589 STMT is the statement being processed, EXPR is the MEM_REF
590 that got us here.
592 FLAGS is as in get_expr_operands. */
594 static void
595 get_indirect_ref_operands (gimple stmt, tree expr, int flags)
597 tree *pptr = &TREE_OPERAND (expr, 0);
599 if (!(flags & opf_no_vops)
600 && TREE_THIS_VOLATILE (expr))
601 gimple_set_has_volatile_ops (stmt, true);
603 /* Add the VOP. */
604 add_virtual_operand (stmt, flags);
606 /* If requested, add a USE operand for the base pointer. */
607 get_expr_operands (stmt, pptr,
608 opf_non_addressable | opf_use
609 | (flags & (opf_no_vops|opf_not_non_addressable)));
613 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
615 static void
616 get_tmr_operands (gimple stmt, tree expr, int flags)
618 if (!(flags & opf_no_vops)
619 && TREE_THIS_VOLATILE (expr))
620 gimple_set_has_volatile_ops (stmt, true);
622 /* First record the real operands. */
623 get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
624 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
625 get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
627 add_virtual_operand (stmt, flags);
631 /* If STMT is a call that may clobber globals and other symbols that
632 escape, add them to the VDEF/VUSE lists for it. */
634 static void
635 maybe_add_call_vops (gimple stmt)
637 int call_flags = gimple_call_flags (stmt);
639 /* If aliases have been computed already, add VDEF or VUSE
640 operands for all the symbols that have been found to be
641 call-clobbered. */
642 if (!(call_flags & ECF_NOVOPS))
644 /* A 'pure' or a 'const' function never call-clobbers anything.
645 A 'noreturn' function might, but since we don't return anyway
646 there is no point in recording that. */
647 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
648 add_virtual_operand (stmt, opf_def);
649 else if (!(call_flags & ECF_CONST))
650 add_virtual_operand (stmt, opf_use);
655 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
657 static void
658 get_asm_expr_operands (gimple stmt)
660 size_t i, noutputs;
661 const char **oconstraints;
662 const char *constraint;
663 bool allows_mem, allows_reg, is_inout;
665 noutputs = gimple_asm_noutputs (stmt);
666 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
668 /* Gather all output operands. */
669 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
671 tree link = gimple_asm_output_op (stmt, i);
672 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
673 oconstraints[i] = constraint;
674 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
675 &allows_reg, &is_inout);
677 /* This should have been split in gimplify_asm_expr. */
678 gcc_assert (!allows_reg || !is_inout);
680 /* Memory operands are addressable. Note that STMT needs the
681 address of this operand. */
682 if (!allows_reg && allows_mem)
683 mark_address_taken (TREE_VALUE (link));
685 get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
688 /* Gather all input operands. */
689 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
691 tree link = gimple_asm_input_op (stmt, i);
692 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
693 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
694 &allows_mem, &allows_reg);
696 /* Memory operands are addressable. Note that STMT needs the
697 address of this operand. */
698 if (!allows_reg && allows_mem)
699 mark_address_taken (TREE_VALUE (link));
701 get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
704 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
705 if (gimple_asm_clobbers_memory_p (stmt))
706 add_virtual_operand (stmt, opf_def);
710 /* Recursively scan the expression pointed to by EXPR_P in statement
711 STMT. FLAGS is one of the OPF_* constants modifying how to
712 interpret the operands found. */
714 static void
715 get_expr_operands (gimple stmt, tree *expr_p, int flags)
717 enum tree_code code;
718 enum tree_code_class codeclass;
719 tree expr = *expr_p;
720 int uflags = opf_use;
722 if (expr == NULL)
723 return;
725 if (is_gimple_debug (stmt))
726 uflags |= (flags & opf_no_vops);
728 code = TREE_CODE (expr);
729 codeclass = TREE_CODE_CLASS (code);
731 switch (code)
733 case ADDR_EXPR:
734 /* Taking the address of a variable does not represent a
735 reference to it, but the fact that the statement takes its
736 address will be of interest to some passes (e.g. alias
737 resolution). */
738 if ((!(flags & opf_non_addressable)
739 || (flags & opf_not_non_addressable))
740 && !is_gimple_debug (stmt))
741 mark_address_taken (TREE_OPERAND (expr, 0));
743 /* If the address is invariant, there may be no interesting
744 variable references inside. */
745 if (is_gimple_min_invariant (expr))
746 return;
748 /* Otherwise, there may be variables referenced inside but there
749 should be no VUSEs created, since the referenced objects are
750 not really accessed. The only operands that we should find
751 here are ARRAY_REF indices which will always be real operands
752 (GIMPLE does not allow non-registers as array indices). */
753 flags |= opf_no_vops;
754 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
755 flags | opf_not_non_addressable);
756 return;
758 case SSA_NAME:
759 case VAR_DECL:
760 case PARM_DECL:
761 case RESULT_DECL:
762 add_stmt_operand (expr_p, stmt, flags);
763 return;
765 case DEBUG_EXPR_DECL:
766 gcc_assert (gimple_debug_bind_p (stmt));
767 return;
769 case MEM_REF:
770 get_indirect_ref_operands (stmt, expr, flags);
771 return;
773 case TARGET_MEM_REF:
774 get_tmr_operands (stmt, expr, flags);
775 return;
777 case ARRAY_REF:
778 case ARRAY_RANGE_REF:
779 case COMPONENT_REF:
780 case REALPART_EXPR:
781 case IMAGPART_EXPR:
783 if (!(flags & opf_no_vops)
784 && TREE_THIS_VOLATILE (expr))
785 gimple_set_has_volatile_ops (stmt, true);
787 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
789 if (code == COMPONENT_REF)
791 if (!(flags & opf_no_vops)
792 && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
793 gimple_set_has_volatile_ops (stmt, true);
794 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
796 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
798 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
799 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
800 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
803 return;
806 case WITH_SIZE_EXPR:
807 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
808 and an rvalue reference to its second argument. */
809 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
810 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
811 return;
813 case COND_EXPR:
814 case VEC_COND_EXPR:
815 case VEC_PERM_EXPR:
816 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
817 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
818 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
819 return;
821 case CONSTRUCTOR:
823 /* General aggregate CONSTRUCTORs have been decomposed, but they
824 are still in use as the COMPLEX_EXPR equivalent for vectors. */
825 constructor_elt *ce;
826 unsigned HOST_WIDE_INT idx;
828 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
829 the volatility to the statement, don't use TREE_CLOBBER_P for
830 mirroring the other uses of THIS_VOLATILE in this file. */
831 if (!(flags & opf_no_vops)
832 && TREE_THIS_VOLATILE (expr))
833 gimple_set_has_volatile_ops (stmt, true);
835 for (idx = 0;
836 vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
837 idx++)
838 get_expr_operands (stmt, &ce->value, uflags);
840 return;
843 case BIT_FIELD_REF:
844 if (!(flags & opf_no_vops)
845 && TREE_THIS_VOLATILE (expr))
846 gimple_set_has_volatile_ops (stmt, true);
847 /* FALLTHRU */
849 case VIEW_CONVERT_EXPR:
850 do_unary:
851 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
852 return;
854 case COMPOUND_EXPR:
855 case OBJ_TYPE_REF:
856 case ASSERT_EXPR:
857 do_binary:
859 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
860 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
861 return;
864 case DOT_PROD_EXPR:
865 case REALIGN_LOAD_EXPR:
866 case WIDEN_MULT_PLUS_EXPR:
867 case WIDEN_MULT_MINUS_EXPR:
868 case FMA_EXPR:
870 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
871 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
872 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
873 return;
876 case FUNCTION_DECL:
877 case LABEL_DECL:
878 case CONST_DECL:
879 case CASE_LABEL_EXPR:
880 /* Expressions that make no memory references. */
881 return;
883 default:
884 if (codeclass == tcc_unary)
885 goto do_unary;
886 if (codeclass == tcc_binary || codeclass == tcc_comparison)
887 goto do_binary;
888 if (codeclass == tcc_constant || codeclass == tcc_type)
889 return;
892 /* If we get here, something has gone wrong. */
893 #ifdef ENABLE_CHECKING
894 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
895 debug_tree (expr);
896 fputs ("\n", stderr);
897 #endif
898 gcc_unreachable ();
902 /* Parse STMT looking for operands. When finished, the various
903 build_* operand vectors will have potential operands in them. */
905 static void
906 parse_ssa_operands (gimple stmt)
908 enum gimple_code code = gimple_code (stmt);
909 size_t i, n, start = 0;
911 switch (code)
913 case GIMPLE_ASM:
914 get_asm_expr_operands (stmt);
915 break;
917 case GIMPLE_TRANSACTION:
918 /* The start of a transaction is a memory barrier. */
919 add_virtual_operand (stmt, opf_def | opf_use);
920 break;
922 case GIMPLE_DEBUG:
923 if (gimple_debug_bind_p (stmt)
924 && gimple_debug_bind_has_value_p (stmt))
925 get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
926 opf_use | opf_no_vops);
927 break;
929 case GIMPLE_RETURN:
930 append_vuse (gimple_vop (cfun));
931 goto do_default;
933 case GIMPLE_CALL:
934 /* Add call-clobbered operands, if needed. */
935 maybe_add_call_vops (stmt);
936 /* FALLTHRU */
938 case GIMPLE_ASSIGN:
939 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
940 start = 1;
941 /* FALLTHRU */
943 default:
944 do_default:
945 n = gimple_num_ops (stmt);
946 for (i = start; i < n; i++)
947 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
948 break;
953 /* Create an operands cache for STMT. */
955 static void
956 build_ssa_operands (gimple stmt)
958 /* Initially assume that the statement has no volatile operands. */
959 gimple_set_has_volatile_ops (stmt, false);
961 start_ssa_stmt_operands ();
962 parse_ssa_operands (stmt);
963 finalize_ssa_stmt_operands (stmt);
966 /* Verifies SSA statement operands. */
968 DEBUG_FUNCTION bool
969 verify_ssa_operands (gimple stmt)
971 use_operand_p use_p;
972 def_operand_p def_p;
973 ssa_op_iter iter;
974 unsigned i;
975 tree use, def;
976 bool volatile_p = gimple_has_volatile_ops (stmt);
978 /* build_ssa_operands w/o finalizing them. */
979 gimple_set_has_volatile_ops (stmt, false);
980 start_ssa_stmt_operands ();
981 parse_ssa_operands (stmt);
983 /* Now verify the built operands are the same as present in STMT. */
984 def = gimple_vdef (stmt);
985 if (def
986 && TREE_CODE (def) == SSA_NAME)
987 def = SSA_NAME_VAR (def);
988 if (build_vdef != def)
990 error ("virtual definition of statement not up-to-date");
991 return true;
993 if (gimple_vdef (stmt)
994 && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
995 || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
997 error ("virtual def operand missing for stmt");
998 return true;
1001 use = gimple_vuse (stmt);
1002 if (use
1003 && TREE_CODE (use) == SSA_NAME)
1004 use = SSA_NAME_VAR (use);
1005 if (build_vuse != use)
1007 error ("virtual use of statement not up-to-date");
1008 return true;
1010 if (gimple_vuse (stmt)
1011 && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
1012 || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1014 error ("virtual use operand missing for stmt");
1015 return true;
1018 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1020 FOR_EACH_VEC_ELT (build_uses, i, use)
1022 if (use_p->use == (tree *)use)
1024 build_uses[i] = NULL_TREE;
1025 break;
1028 if (i == build_uses.length ())
1030 error ("excess use operand for stmt");
1031 debug_generic_expr (USE_FROM_PTR (use_p));
1032 return true;
1035 FOR_EACH_VEC_ELT (build_uses, i, use)
1036 if (use != NULL_TREE)
1038 error ("use operand missing for stmt");
1039 debug_generic_expr (*(tree *)use);
1040 return true;
1043 if (gimple_has_volatile_ops (stmt) != volatile_p)
1045 error ("stmt volatile flag not up-to-date");
1046 return true;
1049 cleanup_build_arrays ();
1050 return false;
1054 /* Releases the operands of STMT back to their freelists, and clears
1055 the stmt operand lists. */
1057 void
1058 free_stmt_operands (gimple stmt)
1060 use_optype_p uses = gimple_use_ops (stmt), last_use;
1062 if (uses)
1064 for (last_use = uses; last_use->next; last_use = last_use->next)
1065 delink_imm_use (USE_OP_PTR (last_use));
1066 delink_imm_use (USE_OP_PTR (last_use));
1067 last_use->next = gimple_ssa_operands (cfun)->free_uses;
1068 gimple_ssa_operands (cfun)->free_uses = uses;
1069 gimple_set_use_ops (stmt, NULL);
1072 if (gimple_has_mem_ops (stmt))
1074 gimple_set_vuse (stmt, NULL_TREE);
1075 gimple_set_vdef (stmt, NULL_TREE);
1080 /* Get the operands of statement STMT. */
1082 void
1083 update_stmt_operands (gimple stmt)
1085 /* If update_stmt_operands is called before SSA is initialized, do
1086 nothing. */
1087 if (!ssa_operands_active (cfun))
1088 return;
1090 timevar_push (TV_TREE_OPS);
1092 /* If the stmt is a noreturn call queue it to be processed by
1093 split_bbs_on_noreturn_calls during cfg cleanup. */
1094 if (is_gimple_call (stmt)
1095 && gimple_call_noreturn_p (stmt))
1096 vec_safe_push (MODIFIED_NORETURN_CALLS (cfun), stmt);
1098 gcc_assert (gimple_modified_p (stmt));
1099 build_ssa_operands (stmt);
1100 gimple_set_modified (stmt, false);
1102 timevar_pop (TV_TREE_OPS);
1106 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1107 to test the validity of the swap operation. */
1109 void
1110 swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1)
1112 tree op0, op1;
1113 op0 = *exp0;
1114 op1 = *exp1;
1116 gcc_checking_assert (ssa_operands_active (cfun));
1118 if (op0 != op1)
1120 /* Attempt to preserve the relative positions of these two operands in
1121 their * respective immediate use lists by adjusting their use pointer
1122 to point to the new operand position. */
1123 use_optype_p use0, use1, ptr;
1124 use0 = use1 = NULL;
1126 /* Find the 2 operands in the cache, if they are there. */
1127 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1128 if (USE_OP_PTR (ptr)->use == exp0)
1130 use0 = ptr;
1131 break;
1134 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1135 if (USE_OP_PTR (ptr)->use == exp1)
1137 use1 = ptr;
1138 break;
1141 /* And adjust their location to point to the new position of the
1142 operand. */
1143 if (use0)
1144 USE_OP_PTR (use0)->use = exp1;
1145 if (use1)
1146 USE_OP_PTR (use1)->use = exp0;
1148 /* Now swap the data. */
1149 *exp0 = op1;
1150 *exp1 = op0;
1155 /* Scan the immediate_use list for VAR making sure its linked properly.
1156 Return TRUE if there is a problem and emit an error message to F. */
1158 DEBUG_FUNCTION bool
1159 verify_imm_links (FILE *f, tree var)
1161 use_operand_p ptr, prev, list;
1162 int count;
1164 gcc_assert (TREE_CODE (var) == SSA_NAME);
1166 list = &(SSA_NAME_IMM_USE_NODE (var));
1167 gcc_assert (list->use == NULL);
1169 if (list->prev == NULL)
1171 gcc_assert (list->next == NULL);
1172 return false;
1175 prev = list;
1176 count = 0;
1177 for (ptr = list->next; ptr != list; )
1179 if (prev != ptr->prev)
1180 goto error;
1182 if (ptr->use == NULL)
1183 goto error; /* 2 roots, or SAFE guard node. */
1184 else if (*(ptr->use) != var)
1185 goto error;
1187 prev = ptr;
1188 ptr = ptr->next;
1190 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1191 problem. */
1192 if (count++ > 50000000)
1193 goto error;
1196 /* Verify list in the other direction. */
1197 prev = list;
1198 for (ptr = list->prev; ptr != list; )
1200 if (prev != ptr->next)
1201 goto error;
1202 prev = ptr;
1203 ptr = ptr->prev;
1204 if (count-- < 0)
1205 goto error;
1208 if (count != 0)
1209 goto error;
1211 return false;
1213 error:
1214 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1216 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1217 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1219 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1220 (void *)ptr->use);
1221 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1222 fprintf (f, "\n");
1223 return true;
1227 /* Dump all the immediate uses to FILE. */
1229 void
1230 dump_immediate_uses_for (FILE *file, tree var)
1232 imm_use_iterator iter;
1233 use_operand_p use_p;
1235 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1237 print_generic_expr (file, var, TDF_SLIM);
1238 fprintf (file, " : -->");
1239 if (has_zero_uses (var))
1240 fprintf (file, " no uses.\n");
1241 else
1242 if (has_single_use (var))
1243 fprintf (file, " single use.\n");
1244 else
1245 fprintf (file, "%d uses.\n", num_imm_uses (var));
1247 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1249 if (use_p->loc.stmt == NULL && use_p->use == NULL)
1250 fprintf (file, "***end of stmt iterator marker***\n");
1251 else
1252 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1253 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1254 else
1255 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1257 fprintf (file, "\n");
1261 /* Dump all the immediate uses to FILE. */
1263 void
1264 dump_immediate_uses (FILE *file)
1266 tree var;
1267 unsigned int x;
1269 fprintf (file, "Immediate_uses: \n\n");
1270 for (x = 1; x < num_ssa_names; x++)
1272 var = ssa_name (x);
1273 if (!var)
1274 continue;
1275 dump_immediate_uses_for (file, var);
1280 /* Dump def-use edges on stderr. */
1282 DEBUG_FUNCTION void
1283 debug_immediate_uses (void)
1285 dump_immediate_uses (stderr);
1289 /* Dump def-use edges on stderr. */
1291 DEBUG_FUNCTION void
1292 debug_immediate_uses_for (tree var)
1294 dump_immediate_uses_for (stderr, var);
1298 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1300 void
1301 unlink_stmt_vdef (gimple stmt)
1303 use_operand_p use_p;
1304 imm_use_iterator iter;
1305 gimple use_stmt;
1306 tree vdef = gimple_vdef (stmt);
1307 tree vuse = gimple_vuse (stmt);
1309 if (!vdef
1310 || TREE_CODE (vdef) != SSA_NAME)
1311 return;
1313 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1315 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1316 SET_USE (use_p, vuse);
1319 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1320 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1324 /* Return true if the var whose chain of uses starts at PTR has no
1325 nondebug uses. */
1326 bool
1327 has_zero_uses_1 (const ssa_use_operand_t *head)
1329 const ssa_use_operand_t *ptr;
1331 for (ptr = head->next; ptr != head; ptr = ptr->next)
1332 if (!is_gimple_debug (USE_STMT (ptr)))
1333 return false;
1335 return true;
1339 /* Return true if the var whose chain of uses starts at PTR has a
1340 single nondebug use. Set USE_P and STMT to that single nondebug
1341 use, if so, or to NULL otherwise. */
1342 bool
1343 single_imm_use_1 (const ssa_use_operand_t *head,
1344 use_operand_p *use_p, gimple *stmt)
1346 ssa_use_operand_t *ptr, *single_use = 0;
1348 for (ptr = head->next; ptr != head; ptr = ptr->next)
1349 if (!is_gimple_debug (USE_STMT (ptr)))
1351 if (single_use)
1353 single_use = NULL;
1354 break;
1356 single_use = ptr;
1359 if (use_p)
1360 *use_p = single_use;
1362 if (stmt)
1363 *stmt = single_use ? single_use->loc.stmt : NULL;
1365 return single_use;