Optimize powerpc*-*-linux* e500 hardfp/soft-fp use.
[official-gcc.git] / gcc / tree-ssa-operands.c
blobd1d5a01cb581a2731a0663025c390ff0d7898d3b
1 /* SSA operands management for trees.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stmt.h"
26 #include "print-tree.h"
27 #include "flags.h"
28 #include "hashtab.h"
29 #include "hash-set.h"
30 #include "vec.h"
31 #include "machmode.h"
32 #include "hard-reg-set.h"
33 #include "input.h"
34 #include "function.h"
35 #include "gimple-pretty-print.h"
36 #include "bitmap.h"
37 #include "predict.h"
38 #include "basic-block.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-expr.h"
42 #include "is-a.h"
43 #include "gimple.h"
44 #include "gimple-ssa.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "stringpool.h"
48 #include "tree-ssanames.h"
49 #include "tree-inline.h"
50 #include "timevar.h"
51 #include "dumpfile.h"
52 #include "timevar.h"
53 #include "langhooks.h"
54 #include "diagnostic-core.h"
57 /* This file contains the code required to manage the operands cache of the
58 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
59 annotation. This cache contains operands that will be of interest to
60 optimizers and other passes wishing to manipulate the IL.
62 The operand type are broken up into REAL and VIRTUAL operands. The real
63 operands are represented as pointers into the stmt's operand tree. Thus
64 any manipulation of the real operands will be reflected in the actual tree.
65 Virtual operands are represented solely in the cache, although the base
66 variable for the SSA_NAME may, or may not occur in the stmt's tree.
67 Manipulation of the virtual operands will not be reflected in the stmt tree.
69 The routines in this file are concerned with creating this operand cache
70 from a stmt tree.
72 The operand tree is the parsed by the various get_* routines which look
73 through the stmt tree for the occurrence of operands which may be of
74 interest, and calls are made to the append_* routines whenever one is
75 found. There are 4 of these routines, each representing one of the
76 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
78 The append_* routines check for duplication, and simply keep a list of
79 unique objects for each operand type in the build_* extendable vectors.
81 Once the stmt tree is completely parsed, the finalize_ssa_operands()
82 routine is called, which proceeds to perform the finalization routine
83 on each of the 4 operand vectors which have been built up.
85 If the stmt had a previous operand cache, the finalization routines
86 attempt to match up the new operands with the old ones. If it's a perfect
87 match, the old vector is simply reused. If it isn't a perfect match, then
88 a new vector is created and the new operands are placed there. For
89 virtual operands, if the previous cache had SSA_NAME version of a
90 variable, and that same variable occurs in the same operands cache, then
91 the new cache vector will also get the same SSA_NAME.
93 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
94 operand vector for VUSE, then the new vector will also be modified
95 such that it contains 'a_5' rather than 'a'. */
98 /* Flags to describe operand properties in helpers. */
100 /* By default, operands are loaded. */
101 #define opf_use 0
103 /* Operand is the target of an assignment expression or a
104 call-clobbered variable. */
105 #define opf_def (1 << 0)
107 /* No virtual operands should be created in the expression. This is used
108 when traversing ADDR_EXPR nodes which have different semantics than
109 other expressions. Inside an ADDR_EXPR node, the only operands that we
110 need to consider are indices into arrays. For instance, &a.b[i] should
111 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
112 VUSE for 'b'. */
113 #define opf_no_vops (1 << 1)
115 /* Operand is in a place where address-taken does not imply addressable. */
116 #define opf_non_addressable (1 << 3)
118 /* Operand is in a place where opf_non_addressable does not apply. */
119 #define opf_not_non_addressable (1 << 4)
121 /* Operand is having its address taken. */
122 #define opf_address_taken (1 << 5)
124 /* Array for building all the use operands. */
125 static vec<tree> build_uses;
127 /* The built VDEF operand. */
128 static tree build_vdef;
130 /* The built VUSE operand. */
131 static tree build_vuse;
133 /* Bitmap obstack for our datastructures that needs to survive across
134 compilations of multiple functions. */
135 static bitmap_obstack operands_bitmap_obstack;
137 static void get_expr_operands (struct function *, gimple, tree *, int);
139 /* Number of functions with initialized ssa_operands. */
140 static int n_initialized = 0;
142 /* Accessor to tree-ssa-operands.c caches. */
143 static inline struct ssa_operands *
144 gimple_ssa_operands (const struct function *fun)
146 return &fun->gimple_df->ssa_operands;
150 /* Return true if the SSA operands cache is active. */
152 bool
153 ssa_operands_active (struct function *fun)
155 if (fun == NULL)
156 return false;
158 return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
162 /* Create the VOP variable, an artificial global variable to act as a
163 representative of all of the virtual operands FUD chain. */
165 static void
166 create_vop_var (struct function *fn)
168 tree global_var;
170 gcc_assert (fn->gimple_df->vop == NULL_TREE);
172 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
173 get_identifier (".MEM"),
174 void_type_node);
175 DECL_ARTIFICIAL (global_var) = 1;
176 DECL_IGNORED_P (global_var) = 1;
177 TREE_READONLY (global_var) = 0;
178 DECL_EXTERNAL (global_var) = 1;
179 TREE_STATIC (global_var) = 1;
180 TREE_USED (global_var) = 1;
181 DECL_CONTEXT (global_var) = NULL_TREE;
182 TREE_THIS_VOLATILE (global_var) = 0;
183 TREE_ADDRESSABLE (global_var) = 0;
184 VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
186 fn->gimple_df->vop = global_var;
189 /* These are the sizes of the operand memory buffer in bytes which gets
190 allocated each time more operands space is required. The final value is
191 the amount that is allocated every time after that.
192 In 1k we can fit 25 use operands (or 63 def operands) on a host with
193 8 byte pointers, that would be 10 statements each with 1 def and 2
194 uses. */
196 #define OP_SIZE_INIT 0
197 #define OP_SIZE_1 (1024 - sizeof (void *))
198 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
199 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
201 /* Initialize the operand cache routines. */
203 void
204 init_ssa_operands (struct function *fn)
206 if (!n_initialized++)
208 build_uses.create (10);
209 build_vuse = NULL_TREE;
210 build_vdef = NULL_TREE;
211 bitmap_obstack_initialize (&operands_bitmap_obstack);
214 gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
215 gimple_ssa_operands (fn)->operand_memory_index
216 = gimple_ssa_operands (fn)->ssa_operand_mem_size;
217 gimple_ssa_operands (fn)->ops_active = true;
218 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
219 create_vop_var (fn);
223 /* Dispose of anything required by the operand routines. */
225 void
226 fini_ssa_operands (struct function *fn)
228 struct ssa_operand_memory_d *ptr;
230 if (!--n_initialized)
232 build_uses.release ();
233 build_vdef = NULL_TREE;
234 build_vuse = NULL_TREE;
237 gimple_ssa_operands (fn)->free_uses = NULL;
239 while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
241 gimple_ssa_operands (fn)->operand_memory
242 = gimple_ssa_operands (fn)->operand_memory->next;
243 ggc_free (ptr);
246 gimple_ssa_operands (fn)->ops_active = false;
248 if (!n_initialized)
249 bitmap_obstack_release (&operands_bitmap_obstack);
251 fn->gimple_df->vop = NULL_TREE;
255 /* Return memory for an operand of size SIZE. */
257 static inline void *
258 ssa_operand_alloc (struct function *fn, unsigned size)
260 char *ptr;
262 gcc_assert (size == sizeof (struct use_optype_d));
264 if (gimple_ssa_operands (fn)->operand_memory_index + size
265 >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
267 struct ssa_operand_memory_d *ptr;
269 switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
271 case OP_SIZE_INIT:
272 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
273 break;
274 case OP_SIZE_1:
275 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
276 break;
277 case OP_SIZE_2:
278 case OP_SIZE_3:
279 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
280 break;
281 default:
282 gcc_unreachable ();
286 ptr = (ssa_operand_memory_d *) ggc_internal_alloc
287 (sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size);
289 ptr->next = gimple_ssa_operands (fn)->operand_memory;
290 gimple_ssa_operands (fn)->operand_memory = ptr;
291 gimple_ssa_operands (fn)->operand_memory_index = 0;
294 ptr = &(gimple_ssa_operands (fn)->operand_memory
295 ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
296 gimple_ssa_operands (fn)->operand_memory_index += size;
297 return ptr;
301 /* Allocate a USE operand. */
303 static inline struct use_optype_d *
304 alloc_use (struct function *fn)
306 struct use_optype_d *ret;
307 if (gimple_ssa_operands (fn)->free_uses)
309 ret = gimple_ssa_operands (fn)->free_uses;
310 gimple_ssa_operands (fn)->free_uses
311 = gimple_ssa_operands (fn)->free_uses->next;
313 else
314 ret = (struct use_optype_d *)
315 ssa_operand_alloc (fn, sizeof (struct use_optype_d));
316 return ret;
320 /* Adds OP to the list of uses of statement STMT after LAST. */
322 static inline use_optype_p
323 add_use_op (struct function *fn, gimple stmt, tree *op, use_optype_p last)
325 use_optype_p new_use;
327 new_use = alloc_use (fn);
328 USE_OP_PTR (new_use)->use = op;
329 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
330 last->next = new_use;
331 new_use->next = NULL;
332 return new_use;
337 /* Takes elements from build_defs and turns them into def operands of STMT.
338 TODO -- Make build_defs vec of tree *. */
340 static inline void
341 finalize_ssa_defs (struct function *fn, gimple stmt)
343 /* Pre-pend the vdef we may have built. */
344 if (build_vdef != NULL_TREE)
346 tree oldvdef = gimple_vdef (stmt);
347 if (oldvdef
348 && TREE_CODE (oldvdef) == SSA_NAME)
349 oldvdef = SSA_NAME_VAR (oldvdef);
350 if (oldvdef != build_vdef)
351 gimple_set_vdef (stmt, build_vdef);
354 /* Clear and unlink a no longer necessary VDEF. */
355 if (build_vdef == NULL_TREE
356 && gimple_vdef (stmt) != NULL_TREE)
358 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
360 unlink_stmt_vdef (stmt);
361 release_ssa_name_fn (fn, gimple_vdef (stmt));
363 gimple_set_vdef (stmt, NULL_TREE);
366 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
367 if (gimple_vdef (stmt)
368 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
370 fn->gimple_df->rename_vops = 1;
371 fn->gimple_df->ssa_renaming_needed = 1;
376 /* Takes elements from build_uses and turns them into use operands of STMT.
377 TODO -- Make build_uses vec of tree *. */
379 static inline void
380 finalize_ssa_uses (struct function *fn, gimple stmt)
382 unsigned new_i;
383 struct use_optype_d new_list;
384 use_optype_p old_ops, ptr, last;
386 /* Pre-pend the VUSE we may have built. */
387 if (build_vuse != NULL_TREE)
389 tree oldvuse = gimple_vuse (stmt);
390 if (oldvuse
391 && TREE_CODE (oldvuse) == SSA_NAME)
392 oldvuse = SSA_NAME_VAR (oldvuse);
393 if (oldvuse != (build_vuse != NULL_TREE
394 ? build_vuse : build_vdef))
395 gimple_set_vuse (stmt, NULL_TREE);
396 build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt));
399 new_list.next = NULL;
400 last = &new_list;
402 old_ops = gimple_use_ops (stmt);
404 /* Clear a no longer necessary VUSE. */
405 if (build_vuse == NULL_TREE
406 && gimple_vuse (stmt) != NULL_TREE)
407 gimple_set_vuse (stmt, NULL_TREE);
409 /* If there is anything in the old list, free it. */
410 if (old_ops)
412 for (ptr = old_ops; ptr; ptr = ptr->next)
413 delink_imm_use (USE_OP_PTR (ptr));
414 old_ops->next = gimple_ssa_operands (fn)->free_uses;
415 gimple_ssa_operands (fn)->free_uses = old_ops;
418 /* If we added a VUSE, make sure to set the operand if it is not already
419 present and mark it for renaming. */
420 if (build_vuse != NULL_TREE
421 && gimple_vuse (stmt) == NULL_TREE)
423 gimple_set_vuse (stmt, gimple_vop (fn));
424 fn->gimple_df->rename_vops = 1;
425 fn->gimple_df->ssa_renaming_needed = 1;
428 /* Now create nodes for all the new nodes. */
429 for (new_i = 0; new_i < build_uses.length (); new_i++)
431 tree *op = (tree *) build_uses[new_i];
432 last = add_use_op (fn, stmt, op, last);
435 /* Now set the stmt's operands. */
436 gimple_set_use_ops (stmt, new_list.next);
440 /* Clear the in_list bits and empty the build array for VDEFs and
441 VUSEs. */
443 static inline void
444 cleanup_build_arrays (void)
446 build_vdef = NULL_TREE;
447 build_vuse = NULL_TREE;
448 build_uses.truncate (0);
452 /* Finalize all the build vectors, fill the new ones into INFO. */
454 static inline void
455 finalize_ssa_stmt_operands (struct function *fn, gimple stmt)
457 finalize_ssa_defs (fn, stmt);
458 finalize_ssa_uses (fn, stmt);
459 cleanup_build_arrays ();
463 /* Start the process of building up operands vectors in INFO. */
465 static inline void
466 start_ssa_stmt_operands (void)
468 gcc_assert (build_uses.length () == 0);
469 gcc_assert (build_vuse == NULL_TREE);
470 gcc_assert (build_vdef == NULL_TREE);
474 /* Add USE_P to the list of pointers to operands. */
476 static inline void
477 append_use (tree *use_p)
479 build_uses.safe_push ((tree) use_p);
483 /* Add VAR to the set of variables that require a VDEF operator. */
485 static inline void
486 append_vdef (tree var)
488 gcc_assert ((build_vdef == NULL_TREE
489 || build_vdef == var)
490 && (build_vuse == NULL_TREE
491 || build_vuse == var));
493 build_vdef = var;
494 build_vuse = var;
498 /* Add VAR to the set of variables that require a VUSE operator. */
500 static inline void
501 append_vuse (tree var)
503 gcc_assert (build_vuse == NULL_TREE
504 || build_vuse == var);
506 build_vuse = var;
509 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
511 static void
512 add_virtual_operand (struct function *fn,
513 gimple stmt ATTRIBUTE_UNUSED, int flags)
515 /* Add virtual operands to the stmt, unless the caller has specifically
516 requested not to do that (used when adding operands inside an
517 ADDR_EXPR expression). */
518 if (flags & opf_no_vops)
519 return;
521 gcc_assert (!is_gimple_debug (stmt));
523 if (flags & opf_def)
524 append_vdef (gimple_vop (fn));
525 else
526 append_vuse (gimple_vop (fn));
530 /* Add *VAR_P to the appropriate operand array for statement STMT.
531 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
532 it will be added to the statement's real operands, otherwise it is
533 added to virtual operands. */
535 static void
536 add_stmt_operand (struct function *fn, tree *var_p, gimple stmt, int flags)
538 tree var = *var_p;
540 gcc_assert (SSA_VAR_P (*var_p));
542 if (is_gimple_reg (var))
544 /* The variable is a GIMPLE register. Add it to real operands. */
545 if (flags & opf_def)
547 else
548 append_use (var_p);
549 if (DECL_P (*var_p))
550 fn->gimple_df->ssa_renaming_needed = 1;
552 else
554 /* Mark statements with volatile operands. */
555 if (!(flags & opf_no_vops)
556 && TREE_THIS_VOLATILE (var))
557 gimple_set_has_volatile_ops (stmt, true);
559 /* The variable is a memory access. Add virtual operands. */
560 add_virtual_operand (fn, stmt, flags);
564 /* Mark the base address of REF as having its address taken.
565 REF may be a single variable whose address has been taken or any
566 other valid GIMPLE memory reference (structure reference, array,
567 etc). */
569 static void
570 mark_address_taken (tree ref)
572 tree var;
574 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
575 as the only thing we take the address of. If VAR is a structure,
576 taking the address of a field means that the whole structure may
577 be referenced using pointer arithmetic. See PR 21407 and the
578 ensuing mailing list discussion. */
579 var = get_base_address (ref);
580 if (var)
582 if (DECL_P (var))
583 TREE_ADDRESSABLE (var) = 1;
584 else if (TREE_CODE (var) == MEM_REF
585 && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
586 && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
587 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
592 /* A subroutine of get_expr_operands to handle MEM_REF.
594 STMT is the statement being processed, EXPR is the MEM_REF
595 that got us here.
597 FLAGS is as in get_expr_operands. */
599 static void
600 get_mem_ref_operands (struct function *fn,
601 gimple stmt, tree expr, int flags)
603 tree *pptr = &TREE_OPERAND (expr, 0);
605 if (!(flags & opf_no_vops)
606 && TREE_THIS_VOLATILE (expr))
607 gimple_set_has_volatile_ops (stmt, true);
609 /* Add the VOP. */
610 add_virtual_operand (fn, stmt, flags);
612 /* If requested, add a USE operand for the base pointer. */
613 get_expr_operands (fn, stmt, pptr,
614 opf_non_addressable | opf_use
615 | (flags & (opf_no_vops|opf_not_non_addressable)));
619 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
621 static void
622 get_tmr_operands (struct function *fn, gimple stmt, tree expr, int flags)
624 if (!(flags & opf_no_vops)
625 && TREE_THIS_VOLATILE (expr))
626 gimple_set_has_volatile_ops (stmt, true);
628 /* First record the real operands. */
629 get_expr_operands (fn, stmt,
630 &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
631 get_expr_operands (fn, stmt,
632 &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
633 get_expr_operands (fn, stmt,
634 &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
636 add_virtual_operand (fn, stmt, flags);
640 /* If STMT is a call that may clobber globals and other symbols that
641 escape, add them to the VDEF/VUSE lists for it. */
643 static void
644 maybe_add_call_vops (struct function *fn, gimple stmt)
646 int call_flags = gimple_call_flags (stmt);
648 /* If aliases have been computed already, add VDEF or VUSE
649 operands for all the symbols that have been found to be
650 call-clobbered. */
651 if (!(call_flags & ECF_NOVOPS))
653 /* A 'pure' or a 'const' function never call-clobbers anything. */
654 if (!(call_flags & (ECF_PURE | ECF_CONST)))
655 add_virtual_operand (fn, stmt, opf_def);
656 else if (!(call_flags & ECF_CONST))
657 add_virtual_operand (fn, stmt, opf_use);
662 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
664 static void
665 get_asm_stmt_operands (struct function *fn, gimple stmt)
667 size_t i, noutputs;
668 const char **oconstraints;
669 const char *constraint;
670 bool allows_mem, allows_reg, is_inout;
672 noutputs = gimple_asm_noutputs (stmt);
673 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
675 /* Gather all output operands. */
676 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
678 tree link = gimple_asm_output_op (stmt, i);
679 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
680 oconstraints[i] = constraint;
681 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
682 &allows_reg, &is_inout);
684 /* This should have been split in gimplify_asm_expr. */
685 gcc_assert (!allows_reg || !is_inout);
687 /* Memory operands are addressable. Note that STMT needs the
688 address of this operand. */
689 if (!allows_reg && allows_mem)
690 mark_address_taken (TREE_VALUE (link));
692 get_expr_operands (fn, stmt,
693 &TREE_VALUE (link), opf_def | opf_not_non_addressable);
696 /* Gather all input operands. */
697 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
699 tree link = gimple_asm_input_op (stmt, i);
700 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
701 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
702 &allows_mem, &allows_reg);
704 /* Memory operands are addressable. Note that STMT needs the
705 address of this operand. */
706 if (!allows_reg && allows_mem)
707 mark_address_taken (TREE_VALUE (link));
709 get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
712 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
713 if (gimple_asm_clobbers_memory_p (stmt))
714 add_virtual_operand (fn, stmt, opf_def);
718 /* Recursively scan the expression pointed to by EXPR_P in statement
719 STMT. FLAGS is one of the OPF_* constants modifying how to
720 interpret the operands found. */
722 static void
723 get_expr_operands (struct function *fn, gimple stmt, tree *expr_p, int flags)
725 enum tree_code code;
726 enum tree_code_class codeclass;
727 tree expr = *expr_p;
728 int uflags = opf_use;
730 if (expr == NULL)
731 return;
733 if (is_gimple_debug (stmt))
734 uflags |= (flags & opf_no_vops);
736 code = TREE_CODE (expr);
737 codeclass = TREE_CODE_CLASS (code);
739 switch (code)
741 case ADDR_EXPR:
742 /* Taking the address of a variable does not represent a
743 reference to it, but the fact that the statement takes its
744 address will be of interest to some passes (e.g. alias
745 resolution). */
746 if ((!(flags & opf_non_addressable)
747 || (flags & opf_not_non_addressable))
748 && !is_gimple_debug (stmt))
749 mark_address_taken (TREE_OPERAND (expr, 0));
751 /* Otherwise, there may be variables referenced inside but there
752 should be no VUSEs created, since the referenced objects are
753 not really accessed. The only operands that we should find
754 here are ARRAY_REF indices which will always be real operands
755 (GIMPLE does not allow non-registers as array indices). */
756 flags |= opf_no_vops;
757 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
758 flags | opf_not_non_addressable | opf_address_taken);
759 return;
761 case SSA_NAME:
762 case VAR_DECL:
763 case PARM_DECL:
764 case RESULT_DECL:
765 if (!(flags & opf_address_taken))
766 add_stmt_operand (fn, expr_p, stmt, flags);
767 return;
769 case DEBUG_EXPR_DECL:
770 gcc_assert (gimple_debug_bind_p (stmt));
771 return;
773 case MEM_REF:
774 get_mem_ref_operands (fn, stmt, expr, flags);
775 return;
777 case TARGET_MEM_REF:
778 get_tmr_operands (fn, stmt, expr, flags);
779 return;
781 case ARRAY_REF:
782 case ARRAY_RANGE_REF:
783 case COMPONENT_REF:
784 case REALPART_EXPR:
785 case IMAGPART_EXPR:
787 if (!(flags & opf_no_vops)
788 && TREE_THIS_VOLATILE (expr))
789 gimple_set_has_volatile_ops (stmt, true);
791 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
793 if (code == COMPONENT_REF)
795 if (!(flags & opf_no_vops)
796 && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
797 gimple_set_has_volatile_ops (stmt, true);
798 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
800 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
802 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
803 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
804 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
807 return;
810 case WITH_SIZE_EXPR:
811 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
812 and an rvalue reference to its second argument. */
813 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
814 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
815 return;
817 case COND_EXPR:
818 case VEC_COND_EXPR:
819 case VEC_PERM_EXPR:
820 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
821 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
822 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
823 return;
825 case CONSTRUCTOR:
827 /* General aggregate CONSTRUCTORs have been decomposed, but they
828 are still in use as the COMPLEX_EXPR equivalent for vectors. */
829 constructor_elt *ce;
830 unsigned HOST_WIDE_INT idx;
832 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
833 the volatility to the statement, don't use TREE_CLOBBER_P for
834 mirroring the other uses of THIS_VOLATILE in this file. */
835 if (!(flags & opf_no_vops)
836 && TREE_THIS_VOLATILE (expr))
837 gimple_set_has_volatile_ops (stmt, true);
839 for (idx = 0;
840 vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
841 idx++)
842 get_expr_operands (fn, stmt, &ce->value, uflags);
844 return;
847 case BIT_FIELD_REF:
848 if (!(flags & opf_no_vops)
849 && TREE_THIS_VOLATILE (expr))
850 gimple_set_has_volatile_ops (stmt, true);
851 /* FALLTHRU */
853 case VIEW_CONVERT_EXPR:
854 do_unary:
855 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
856 return;
858 case COMPOUND_EXPR:
859 case OBJ_TYPE_REF:
860 case ASSERT_EXPR:
861 do_binary:
863 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
864 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
865 return;
868 case DOT_PROD_EXPR:
869 case SAD_EXPR:
870 case REALIGN_LOAD_EXPR:
871 case WIDEN_MULT_PLUS_EXPR:
872 case WIDEN_MULT_MINUS_EXPR:
873 case FMA_EXPR:
875 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
876 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
877 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
878 return;
881 case FUNCTION_DECL:
882 case LABEL_DECL:
883 case CONST_DECL:
884 case CASE_LABEL_EXPR:
885 /* Expressions that make no memory references. */
886 return;
888 default:
889 if (codeclass == tcc_unary)
890 goto do_unary;
891 if (codeclass == tcc_binary || codeclass == tcc_comparison)
892 goto do_binary;
893 if (codeclass == tcc_constant || codeclass == tcc_type)
894 return;
897 /* If we get here, something has gone wrong. */
898 #ifdef ENABLE_CHECKING
899 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
900 debug_tree (expr);
901 fputs ("\n", stderr);
902 #endif
903 gcc_unreachable ();
907 /* Parse STMT looking for operands. When finished, the various
908 build_* operand vectors will have potential operands in them. */
910 static void
911 parse_ssa_operands (struct function *fn, gimple stmt)
913 enum gimple_code code = gimple_code (stmt);
914 size_t i, n, start = 0;
916 switch (code)
918 case GIMPLE_ASM:
919 get_asm_stmt_operands (fn, stmt);
920 break;
922 case GIMPLE_TRANSACTION:
923 /* The start of a transaction is a memory barrier. */
924 add_virtual_operand (fn, stmt, opf_def | opf_use);
925 break;
927 case GIMPLE_DEBUG:
928 if (gimple_debug_bind_p (stmt)
929 && gimple_debug_bind_has_value_p (stmt))
930 get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
931 opf_use | opf_no_vops);
932 break;
934 case GIMPLE_RETURN:
935 append_vuse (gimple_vop (fn));
936 goto do_default;
938 case GIMPLE_CALL:
939 /* Add call-clobbered operands, if needed. */
940 maybe_add_call_vops (fn, stmt);
941 /* FALLTHRU */
943 case GIMPLE_ASSIGN:
944 get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
945 start = 1;
946 /* FALLTHRU */
948 default:
949 do_default:
950 n = gimple_num_ops (stmt);
951 for (i = start; i < n; i++)
952 get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
953 break;
958 /* Create an operands cache for STMT. */
960 static void
961 build_ssa_operands (struct function *fn, gimple stmt)
963 /* Initially assume that the statement has no volatile operands. */
964 gimple_set_has_volatile_ops (stmt, false);
966 start_ssa_stmt_operands ();
967 parse_ssa_operands (fn, stmt);
968 finalize_ssa_stmt_operands (fn, stmt);
971 /* Verifies SSA statement operands. */
973 DEBUG_FUNCTION bool
974 verify_ssa_operands (struct function *fn, gimple stmt)
976 use_operand_p use_p;
977 def_operand_p def_p;
978 ssa_op_iter iter;
979 unsigned i;
980 tree use, def;
981 bool volatile_p = gimple_has_volatile_ops (stmt);
983 /* build_ssa_operands w/o finalizing them. */
984 gimple_set_has_volatile_ops (stmt, false);
985 start_ssa_stmt_operands ();
986 parse_ssa_operands (fn, stmt);
988 /* Now verify the built operands are the same as present in STMT. */
989 def = gimple_vdef (stmt);
990 if (def
991 && TREE_CODE (def) == SSA_NAME)
992 def = SSA_NAME_VAR (def);
993 if (build_vdef != def)
995 error ("virtual definition of statement not up-to-date");
996 return true;
998 if (gimple_vdef (stmt)
999 && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
1000 || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
1002 error ("virtual def operand missing for stmt");
1003 return true;
1006 use = gimple_vuse (stmt);
1007 if (use
1008 && TREE_CODE (use) == SSA_NAME)
1009 use = SSA_NAME_VAR (use);
1010 if (build_vuse != use)
1012 error ("virtual use of statement not up-to-date");
1013 return true;
1015 if (gimple_vuse (stmt)
1016 && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
1017 || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1019 error ("virtual use operand missing for stmt");
1020 return true;
1023 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1025 FOR_EACH_VEC_ELT (build_uses, i, use)
1027 if (use_p->use == (tree *)use)
1029 build_uses[i] = NULL_TREE;
1030 break;
1033 if (i == build_uses.length ())
1035 error ("excess use operand for stmt");
1036 debug_generic_expr (USE_FROM_PTR (use_p));
1037 return true;
1040 FOR_EACH_VEC_ELT (build_uses, i, use)
1041 if (use != NULL_TREE)
1043 error ("use operand missing for stmt");
1044 debug_generic_expr (*(tree *)use);
1045 return true;
1048 if (gimple_has_volatile_ops (stmt) != volatile_p)
1050 error ("stmt volatile flag not up-to-date");
1051 return true;
1054 cleanup_build_arrays ();
1055 return false;
1059 /* Releases the operands of STMT back to their freelists, and clears
1060 the stmt operand lists. */
1062 void
1063 free_stmt_operands (struct function *fn, gimple stmt)
1065 use_optype_p uses = gimple_use_ops (stmt), last_use;
1067 if (uses)
1069 for (last_use = uses; last_use->next; last_use = last_use->next)
1070 delink_imm_use (USE_OP_PTR (last_use));
1071 delink_imm_use (USE_OP_PTR (last_use));
1072 last_use->next = gimple_ssa_operands (fn)->free_uses;
1073 gimple_ssa_operands (fn)->free_uses = uses;
1074 gimple_set_use_ops (stmt, NULL);
1077 if (gimple_has_mem_ops (stmt))
1079 gimple_set_vuse (stmt, NULL_TREE);
1080 gimple_set_vdef (stmt, NULL_TREE);
1085 /* Get the operands of statement STMT. */
1087 void
1088 update_stmt_operands (struct function *fn, gimple stmt)
1090 /* If update_stmt_operands is called before SSA is initialized, do
1091 nothing. */
1092 if (!ssa_operands_active (fn))
1093 return;
1095 timevar_push (TV_TREE_OPS);
1097 gcc_assert (gimple_modified_p (stmt));
1098 build_ssa_operands (fn, stmt);
1099 gimple_set_modified (stmt, false);
1101 timevar_pop (TV_TREE_OPS);
1105 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1106 to test the validity of the swap operation. */
1108 void
1109 swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1)
1111 tree op0, op1;
1112 op0 = *exp0;
1113 op1 = *exp1;
1115 if (op0 != op1)
1117 /* Attempt to preserve the relative positions of these two operands in
1118 their * respective immediate use lists by adjusting their use pointer
1119 to point to the new operand position. */
1120 use_optype_p use0, use1, ptr;
1121 use0 = use1 = NULL;
1123 /* Find the 2 operands in the cache, if they are there. */
1124 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1125 if (USE_OP_PTR (ptr)->use == exp0)
1127 use0 = ptr;
1128 break;
1131 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1132 if (USE_OP_PTR (ptr)->use == exp1)
1134 use1 = ptr;
1135 break;
1138 /* And adjust their location to point to the new position of the
1139 operand. */
1140 if (use0)
1141 USE_OP_PTR (use0)->use = exp1;
1142 if (use1)
1143 USE_OP_PTR (use1)->use = exp0;
1145 /* Now swap the data. */
1146 *exp0 = op1;
1147 *exp1 = op0;
1152 /* Scan the immediate_use list for VAR making sure its linked properly.
1153 Return TRUE if there is a problem and emit an error message to F. */
1155 DEBUG_FUNCTION bool
1156 verify_imm_links (FILE *f, tree var)
1158 use_operand_p ptr, prev, list;
1159 int count;
1161 gcc_assert (TREE_CODE (var) == SSA_NAME);
1163 list = &(SSA_NAME_IMM_USE_NODE (var));
1164 gcc_assert (list->use == NULL);
1166 if (list->prev == NULL)
1168 gcc_assert (list->next == NULL);
1169 return false;
1172 prev = list;
1173 count = 0;
1174 for (ptr = list->next; ptr != list; )
1176 if (prev != ptr->prev)
1177 goto error;
1179 if (ptr->use == NULL)
1180 goto error; /* 2 roots, or SAFE guard node. */
1181 else if (*(ptr->use) != var)
1182 goto error;
1184 prev = ptr;
1185 ptr = ptr->next;
1187 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1188 problem. */
1189 if (count++ > 50000000)
1190 goto error;
1193 /* Verify list in the other direction. */
1194 prev = list;
1195 for (ptr = list->prev; ptr != list; )
1197 if (prev != ptr->next)
1198 goto error;
1199 prev = ptr;
1200 ptr = ptr->prev;
1201 if (count-- < 0)
1202 goto error;
1205 if (count != 0)
1206 goto error;
1208 return false;
1210 error:
1211 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1213 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1214 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1216 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1217 (void *)ptr->use);
1218 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1219 fprintf (f, "\n");
1220 return true;
1224 /* Dump all the immediate uses to FILE. */
1226 void
1227 dump_immediate_uses_for (FILE *file, tree var)
1229 imm_use_iterator iter;
1230 use_operand_p use_p;
1232 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1234 print_generic_expr (file, var, TDF_SLIM);
1235 fprintf (file, " : -->");
1236 if (has_zero_uses (var))
1237 fprintf (file, " no uses.\n");
1238 else
1239 if (has_single_use (var))
1240 fprintf (file, " single use.\n");
1241 else
1242 fprintf (file, "%d uses.\n", num_imm_uses (var));
1244 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1246 if (use_p->loc.stmt == NULL && use_p->use == NULL)
1247 fprintf (file, "***end of stmt iterator marker***\n");
1248 else
1249 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1250 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1251 else
1252 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1254 fprintf (file, "\n");
1258 /* Dump all the immediate uses to FILE. */
1260 void
1261 dump_immediate_uses (FILE *file)
1263 tree var;
1264 unsigned int x;
1266 fprintf (file, "Immediate_uses: \n\n");
1267 for (x = 1; x < num_ssa_names; x++)
1269 var = ssa_name (x);
1270 if (!var)
1271 continue;
1272 dump_immediate_uses_for (file, var);
1277 /* Dump def-use edges on stderr. */
1279 DEBUG_FUNCTION void
1280 debug_immediate_uses (void)
1282 dump_immediate_uses (stderr);
1286 /* Dump def-use edges on stderr. */
1288 DEBUG_FUNCTION void
1289 debug_immediate_uses_for (tree var)
1291 dump_immediate_uses_for (stderr, var);
1295 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1297 void
1298 unlink_stmt_vdef (gimple stmt)
1300 use_operand_p use_p;
1301 imm_use_iterator iter;
1302 gimple use_stmt;
1303 tree vdef = gimple_vdef (stmt);
1304 tree vuse = gimple_vuse (stmt);
1306 if (!vdef
1307 || TREE_CODE (vdef) != SSA_NAME)
1308 return;
1310 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1312 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1313 SET_USE (use_p, vuse);
1316 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1317 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1321 /* Return true if the var whose chain of uses starts at PTR has no
1322 nondebug uses. */
1323 bool
1324 has_zero_uses_1 (const ssa_use_operand_t *head)
1326 const ssa_use_operand_t *ptr;
1328 for (ptr = head->next; ptr != head; ptr = ptr->next)
1329 if (!is_gimple_debug (USE_STMT (ptr)))
1330 return false;
1332 return true;
1336 /* Return true if the var whose chain of uses starts at PTR has a
1337 single nondebug use. Set USE_P and STMT to that single nondebug
1338 use, if so, or to NULL otherwise. */
1339 bool
1340 single_imm_use_1 (const ssa_use_operand_t *head,
1341 use_operand_p *use_p, gimple *stmt)
1343 ssa_use_operand_t *ptr, *single_use = 0;
1345 for (ptr = head->next; ptr != head; ptr = ptr->next)
1346 if (!is_gimple_debug (USE_STMT (ptr)))
1348 if (single_use)
1350 single_use = NULL;
1351 break;
1353 single_use = ptr;
1356 if (use_p)
1357 *use_p = single_use;
1359 if (stmt)
1360 *stmt = single_use ? single_use->loc.stmt : NULL;
1362 return single_use;