2015-08-04 Thomas Preud'homme <thomas.preudhomme@arm.com>
[official-gcc.git] / gcc / tree-ssa-operands.c
blobb1e3f99337a5152e200e344ec48a3f1f5129272b
1 /* SSA operands management for trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "hard-reg-set.h"
27 #include "ssa.h"
28 #include "alias.h"
29 #include "fold-const.h"
30 #include "stmt.h"
31 #include "print-tree.h"
32 #include "flags.h"
33 #include "gimple-pretty-print.h"
34 #include "internal-fn.h"
35 #include "tree-inline.h"
36 #include "timevar.h"
37 #include "dumpfile.h"
38 #include "timevar.h"
39 #include "langhooks.h"
40 #include "diagnostic-core.h"
43 /* This file contains the code required to manage the operands cache of the
44 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
45 annotation. This cache contains operands that will be of interest to
46 optimizers and other passes wishing to manipulate the IL.
48 The operand type are broken up into REAL and VIRTUAL operands. The real
49 operands are represented as pointers into the stmt's operand tree. Thus
50 any manipulation of the real operands will be reflected in the actual tree.
51 Virtual operands are represented solely in the cache, although the base
52 variable for the SSA_NAME may, or may not occur in the stmt's tree.
53 Manipulation of the virtual operands will not be reflected in the stmt tree.
55 The routines in this file are concerned with creating this operand cache
56 from a stmt tree.
58 The operand tree is the parsed by the various get_* routines which look
59 through the stmt tree for the occurrence of operands which may be of
60 interest, and calls are made to the append_* routines whenever one is
61 found. There are 4 of these routines, each representing one of the
62 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
64 The append_* routines check for duplication, and simply keep a list of
65 unique objects for each operand type in the build_* extendable vectors.
67 Once the stmt tree is completely parsed, the finalize_ssa_operands()
68 routine is called, which proceeds to perform the finalization routine
69 on each of the 4 operand vectors which have been built up.
71 If the stmt had a previous operand cache, the finalization routines
72 attempt to match up the new operands with the old ones. If it's a perfect
73 match, the old vector is simply reused. If it isn't a perfect match, then
74 a new vector is created and the new operands are placed there. For
75 virtual operands, if the previous cache had SSA_NAME version of a
76 variable, and that same variable occurs in the same operands cache, then
77 the new cache vector will also get the same SSA_NAME.
79 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
80 operand vector for VUSE, then the new vector will also be modified
81 such that it contains 'a_5' rather than 'a'. */
84 /* Flags to describe operand properties in helpers. */
86 /* By default, operands are loaded. */
87 #define opf_use 0
89 /* Operand is the target of an assignment expression or a
90 call-clobbered variable. */
91 #define opf_def (1 << 0)
93 /* No virtual operands should be created in the expression. This is used
94 when traversing ADDR_EXPR nodes which have different semantics than
95 other expressions. Inside an ADDR_EXPR node, the only operands that we
96 need to consider are indices into arrays. For instance, &a.b[i] should
97 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
98 VUSE for 'b'. */
99 #define opf_no_vops (1 << 1)
101 /* Operand is in a place where address-taken does not imply addressable. */
102 #define opf_non_addressable (1 << 3)
104 /* Operand is in a place where opf_non_addressable does not apply. */
105 #define opf_not_non_addressable (1 << 4)
107 /* Operand is having its address taken. */
108 #define opf_address_taken (1 << 5)
110 /* Array for building all the use operands. */
111 static vec<tree> build_uses;
113 /* The built VDEF operand. */
114 static tree build_vdef;
116 /* The built VUSE operand. */
117 static tree build_vuse;
119 /* Bitmap obstack for our datastructures that needs to survive across
120 compilations of multiple functions. */
121 static bitmap_obstack operands_bitmap_obstack;
123 static void get_expr_operands (struct function *, gimple, tree *, int);
125 /* Number of functions with initialized ssa_operands. */
126 static int n_initialized = 0;
128 /* Accessor to tree-ssa-operands.c caches. */
129 static inline struct ssa_operands *
130 gimple_ssa_operands (const struct function *fun)
132 return &fun->gimple_df->ssa_operands;
136 /* Return true if the SSA operands cache is active. */
138 bool
139 ssa_operands_active (struct function *fun)
141 if (fun == NULL)
142 return false;
144 return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
148 /* Create the VOP variable, an artificial global variable to act as a
149 representative of all of the virtual operands FUD chain. */
151 static void
152 create_vop_var (struct function *fn)
154 tree global_var;
156 gcc_assert (fn->gimple_df->vop == NULL_TREE);
158 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
159 get_identifier (".MEM"),
160 void_type_node);
161 DECL_ARTIFICIAL (global_var) = 1;
162 DECL_IGNORED_P (global_var) = 1;
163 TREE_READONLY (global_var) = 0;
164 DECL_EXTERNAL (global_var) = 1;
165 TREE_STATIC (global_var) = 1;
166 TREE_USED (global_var) = 1;
167 DECL_CONTEXT (global_var) = NULL_TREE;
168 TREE_THIS_VOLATILE (global_var) = 0;
169 TREE_ADDRESSABLE (global_var) = 0;
170 VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
172 fn->gimple_df->vop = global_var;
175 /* These are the sizes of the operand memory buffer in bytes which gets
176 allocated each time more operands space is required. The final value is
177 the amount that is allocated every time after that.
178 In 1k we can fit 25 use operands (or 63 def operands) on a host with
179 8 byte pointers, that would be 10 statements each with 1 def and 2
180 uses. */
182 #define OP_SIZE_INIT 0
183 #define OP_SIZE_1 (1024 - sizeof (void *))
184 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
185 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
187 /* Initialize the operand cache routines. */
189 void
190 init_ssa_operands (struct function *fn)
192 if (!n_initialized++)
194 build_uses.create (10);
195 build_vuse = NULL_TREE;
196 build_vdef = NULL_TREE;
197 bitmap_obstack_initialize (&operands_bitmap_obstack);
200 gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
201 gimple_ssa_operands (fn)->operand_memory_index
202 = gimple_ssa_operands (fn)->ssa_operand_mem_size;
203 gimple_ssa_operands (fn)->ops_active = true;
204 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
205 create_vop_var (fn);
209 /* Dispose of anything required by the operand routines. */
211 void
212 fini_ssa_operands (struct function *fn)
214 struct ssa_operand_memory_d *ptr;
216 if (!--n_initialized)
218 build_uses.release ();
219 build_vdef = NULL_TREE;
220 build_vuse = NULL_TREE;
223 gimple_ssa_operands (fn)->free_uses = NULL;
225 while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
227 gimple_ssa_operands (fn)->operand_memory
228 = gimple_ssa_operands (fn)->operand_memory->next;
229 ggc_free (ptr);
232 gimple_ssa_operands (fn)->ops_active = false;
234 if (!n_initialized)
235 bitmap_obstack_release (&operands_bitmap_obstack);
237 fn->gimple_df->vop = NULL_TREE;
241 /* Return memory for an operand of size SIZE. */
243 static inline void *
244 ssa_operand_alloc (struct function *fn, unsigned size)
246 char *ptr;
248 gcc_assert (size == sizeof (struct use_optype_d));
250 if (gimple_ssa_operands (fn)->operand_memory_index + size
251 >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
253 struct ssa_operand_memory_d *ptr;
255 switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
257 case OP_SIZE_INIT:
258 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
259 break;
260 case OP_SIZE_1:
261 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
262 break;
263 case OP_SIZE_2:
264 case OP_SIZE_3:
265 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
266 break;
267 default:
268 gcc_unreachable ();
272 ptr = (ssa_operand_memory_d *) ggc_internal_alloc
273 (sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size);
275 ptr->next = gimple_ssa_operands (fn)->operand_memory;
276 gimple_ssa_operands (fn)->operand_memory = ptr;
277 gimple_ssa_operands (fn)->operand_memory_index = 0;
280 ptr = &(gimple_ssa_operands (fn)->operand_memory
281 ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
282 gimple_ssa_operands (fn)->operand_memory_index += size;
283 return ptr;
287 /* Allocate a USE operand. */
289 static inline struct use_optype_d *
290 alloc_use (struct function *fn)
292 struct use_optype_d *ret;
293 if (gimple_ssa_operands (fn)->free_uses)
295 ret = gimple_ssa_operands (fn)->free_uses;
296 gimple_ssa_operands (fn)->free_uses
297 = gimple_ssa_operands (fn)->free_uses->next;
299 else
300 ret = (struct use_optype_d *)
301 ssa_operand_alloc (fn, sizeof (struct use_optype_d));
302 return ret;
306 /* Adds OP to the list of uses of statement STMT after LAST. */
308 static inline use_optype_p
309 add_use_op (struct function *fn, gimple stmt, tree *op, use_optype_p last)
311 use_optype_p new_use;
313 new_use = alloc_use (fn);
314 USE_OP_PTR (new_use)->use = op;
315 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
316 last->next = new_use;
317 new_use->next = NULL;
318 return new_use;
323 /* Takes elements from build_defs and turns them into def operands of STMT.
324 TODO -- Make build_defs vec of tree *. */
326 static inline void
327 finalize_ssa_defs (struct function *fn, gimple stmt)
329 /* Pre-pend the vdef we may have built. */
330 if (build_vdef != NULL_TREE)
332 tree oldvdef = gimple_vdef (stmt);
333 if (oldvdef
334 && TREE_CODE (oldvdef) == SSA_NAME)
335 oldvdef = SSA_NAME_VAR (oldvdef);
336 if (oldvdef != build_vdef)
337 gimple_set_vdef (stmt, build_vdef);
340 /* Clear and unlink a no longer necessary VDEF. */
341 if (build_vdef == NULL_TREE
342 && gimple_vdef (stmt) != NULL_TREE)
344 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
346 unlink_stmt_vdef (stmt);
347 release_ssa_name_fn (fn, gimple_vdef (stmt));
349 gimple_set_vdef (stmt, NULL_TREE);
352 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
353 if (gimple_vdef (stmt)
354 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
356 fn->gimple_df->rename_vops = 1;
357 fn->gimple_df->ssa_renaming_needed = 1;
362 /* Takes elements from build_uses and turns them into use operands of STMT.
363 TODO -- Make build_uses vec of tree *. */
365 static inline void
366 finalize_ssa_uses (struct function *fn, gimple stmt)
368 unsigned new_i;
369 struct use_optype_d new_list;
370 use_optype_p old_ops, ptr, last;
372 /* Pre-pend the VUSE we may have built. */
373 if (build_vuse != NULL_TREE)
375 tree oldvuse = gimple_vuse (stmt);
376 if (oldvuse
377 && TREE_CODE (oldvuse) == SSA_NAME)
378 oldvuse = SSA_NAME_VAR (oldvuse);
379 if (oldvuse != (build_vuse != NULL_TREE
380 ? build_vuse : build_vdef))
381 gimple_set_vuse (stmt, NULL_TREE);
382 build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt));
385 new_list.next = NULL;
386 last = &new_list;
388 old_ops = gimple_use_ops (stmt);
390 /* Clear a no longer necessary VUSE. */
391 if (build_vuse == NULL_TREE
392 && gimple_vuse (stmt) != NULL_TREE)
393 gimple_set_vuse (stmt, NULL_TREE);
395 /* If there is anything in the old list, free it. */
396 if (old_ops)
398 for (ptr = old_ops; ptr->next; ptr = ptr->next)
399 delink_imm_use (USE_OP_PTR (ptr));
400 delink_imm_use (USE_OP_PTR (ptr));
401 ptr->next = gimple_ssa_operands (fn)->free_uses;
402 gimple_ssa_operands (fn)->free_uses = old_ops;
405 /* If we added a VUSE, make sure to set the operand if it is not already
406 present and mark it for renaming. */
407 if (build_vuse != NULL_TREE
408 && gimple_vuse (stmt) == NULL_TREE)
410 gimple_set_vuse (stmt, gimple_vop (fn));
411 fn->gimple_df->rename_vops = 1;
412 fn->gimple_df->ssa_renaming_needed = 1;
415 /* Now create nodes for all the new nodes. */
416 for (new_i = 0; new_i < build_uses.length (); new_i++)
418 tree *op = (tree *) build_uses[new_i];
419 last = add_use_op (fn, stmt, op, last);
422 /* Now set the stmt's operands. */
423 gimple_set_use_ops (stmt, new_list.next);
427 /* Clear the in_list bits and empty the build array for VDEFs and
428 VUSEs. */
430 static inline void
431 cleanup_build_arrays (void)
433 build_vdef = NULL_TREE;
434 build_vuse = NULL_TREE;
435 build_uses.truncate (0);
439 /* Finalize all the build vectors, fill the new ones into INFO. */
441 static inline void
442 finalize_ssa_stmt_operands (struct function *fn, gimple stmt)
444 finalize_ssa_defs (fn, stmt);
445 finalize_ssa_uses (fn, stmt);
446 cleanup_build_arrays ();
450 /* Start the process of building up operands vectors in INFO. */
452 static inline void
453 start_ssa_stmt_operands (void)
455 gcc_assert (build_uses.length () == 0);
456 gcc_assert (build_vuse == NULL_TREE);
457 gcc_assert (build_vdef == NULL_TREE);
461 /* Add USE_P to the list of pointers to operands. */
463 static inline void
464 append_use (tree *use_p)
466 build_uses.safe_push ((tree) use_p);
470 /* Add VAR to the set of variables that require a VDEF operator. */
472 static inline void
473 append_vdef (tree var)
475 gcc_assert ((build_vdef == NULL_TREE
476 || build_vdef == var)
477 && (build_vuse == NULL_TREE
478 || build_vuse == var));
480 build_vdef = var;
481 build_vuse = var;
485 /* Add VAR to the set of variables that require a VUSE operator. */
487 static inline void
488 append_vuse (tree var)
490 gcc_assert (build_vuse == NULL_TREE
491 || build_vuse == var);
493 build_vuse = var;
496 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
498 static void
499 add_virtual_operand (struct function *fn,
500 gimple stmt ATTRIBUTE_UNUSED, int flags)
502 /* Add virtual operands to the stmt, unless the caller has specifically
503 requested not to do that (used when adding operands inside an
504 ADDR_EXPR expression). */
505 if (flags & opf_no_vops)
506 return;
508 gcc_assert (!is_gimple_debug (stmt));
510 if (flags & opf_def)
511 append_vdef (gimple_vop (fn));
512 else
513 append_vuse (gimple_vop (fn));
517 /* Add *VAR_P to the appropriate operand array for statement STMT.
518 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
519 it will be added to the statement's real operands, otherwise it is
520 added to virtual operands. */
522 static void
523 add_stmt_operand (struct function *fn, tree *var_p, gimple stmt, int flags)
525 tree var = *var_p;
527 gcc_assert (SSA_VAR_P (*var_p));
529 if (is_gimple_reg (var))
531 /* The variable is a GIMPLE register. Add it to real operands. */
532 if (flags & opf_def)
534 else
535 append_use (var_p);
536 if (DECL_P (*var_p))
537 fn->gimple_df->ssa_renaming_needed = 1;
539 else
541 /* Mark statements with volatile operands. */
542 if (!(flags & opf_no_vops)
543 && TREE_THIS_VOLATILE (var))
544 gimple_set_has_volatile_ops (stmt, true);
546 /* The variable is a memory access. Add virtual operands. */
547 add_virtual_operand (fn, stmt, flags);
551 /* Mark the base address of REF as having its address taken.
552 REF may be a single variable whose address has been taken or any
553 other valid GIMPLE memory reference (structure reference, array,
554 etc). */
556 static void
557 mark_address_taken (tree ref)
559 tree var;
561 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
562 as the only thing we take the address of. If VAR is a structure,
563 taking the address of a field means that the whole structure may
564 be referenced using pointer arithmetic. See PR 21407 and the
565 ensuing mailing list discussion. */
566 var = get_base_address (ref);
567 if (var)
569 if (DECL_P (var))
570 TREE_ADDRESSABLE (var) = 1;
571 else if (TREE_CODE (var) == MEM_REF
572 && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
573 && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
574 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
579 /* A subroutine of get_expr_operands to handle MEM_REF.
581 STMT is the statement being processed, EXPR is the MEM_REF
582 that got us here.
584 FLAGS is as in get_expr_operands. */
586 static void
587 get_mem_ref_operands (struct function *fn,
588 gimple stmt, tree expr, int flags)
590 tree *pptr = &TREE_OPERAND (expr, 0);
592 if (!(flags & opf_no_vops)
593 && TREE_THIS_VOLATILE (expr))
594 gimple_set_has_volatile_ops (stmt, true);
596 /* Add the VOP. */
597 add_virtual_operand (fn, stmt, flags);
599 /* If requested, add a USE operand for the base pointer. */
600 get_expr_operands (fn, stmt, pptr,
601 opf_non_addressable | opf_use
602 | (flags & (opf_no_vops|opf_not_non_addressable)));
606 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
608 static void
609 get_tmr_operands (struct function *fn, gimple stmt, tree expr, int flags)
611 if (!(flags & opf_no_vops)
612 && TREE_THIS_VOLATILE (expr))
613 gimple_set_has_volatile_ops (stmt, true);
615 /* First record the real operands. */
616 get_expr_operands (fn, stmt,
617 &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
618 get_expr_operands (fn, stmt,
619 &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
620 get_expr_operands (fn, stmt,
621 &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
623 add_virtual_operand (fn, stmt, flags);
627 /* If STMT is a call that may clobber globals and other symbols that
628 escape, add them to the VDEF/VUSE lists for it. */
630 static void
631 maybe_add_call_vops (struct function *fn, gcall *stmt)
633 int call_flags = gimple_call_flags (stmt);
635 /* If aliases have been computed already, add VDEF or VUSE
636 operands for all the symbols that have been found to be
637 call-clobbered. */
638 if (!(call_flags & ECF_NOVOPS))
640 /* A 'pure' or a 'const' function never call-clobbers anything. */
641 if (!(call_flags & (ECF_PURE | ECF_CONST)))
642 add_virtual_operand (fn, stmt, opf_def);
643 else if (!(call_flags & ECF_CONST))
644 add_virtual_operand (fn, stmt, opf_use);
649 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
651 static void
652 get_asm_stmt_operands (struct function *fn, gasm *stmt)
654 size_t i, noutputs;
655 const char **oconstraints;
656 const char *constraint;
657 bool allows_mem, allows_reg, is_inout;
659 noutputs = gimple_asm_noutputs (stmt);
660 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
662 /* Gather all output operands. */
663 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
665 tree link = gimple_asm_output_op (stmt, i);
666 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
667 oconstraints[i] = constraint;
668 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
669 &allows_reg, &is_inout);
671 /* This should have been split in gimplify_asm_expr. */
672 gcc_assert (!allows_reg || !is_inout);
674 /* Memory operands are addressable. Note that STMT needs the
675 address of this operand. */
676 if (!allows_reg && allows_mem)
677 mark_address_taken (TREE_VALUE (link));
679 get_expr_operands (fn, stmt,
680 &TREE_VALUE (link), opf_def | opf_not_non_addressable);
683 /* Gather all input operands. */
684 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
686 tree link = gimple_asm_input_op (stmt, i);
687 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
688 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
689 &allows_mem, &allows_reg);
691 /* Memory operands are addressable. Note that STMT needs the
692 address of this operand. */
693 if (!allows_reg && allows_mem)
694 mark_address_taken (TREE_VALUE (link));
696 get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
699 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
700 if (gimple_asm_clobbers_memory_p (stmt))
701 add_virtual_operand (fn, stmt, opf_def);
705 /* Recursively scan the expression pointed to by EXPR_P in statement
706 STMT. FLAGS is one of the OPF_* constants modifying how to
707 interpret the operands found. */
709 static void
710 get_expr_operands (struct function *fn, gimple stmt, tree *expr_p, int flags)
712 enum tree_code code;
713 enum tree_code_class codeclass;
714 tree expr = *expr_p;
715 int uflags = opf_use;
717 if (expr == NULL)
718 return;
720 if (is_gimple_debug (stmt))
721 uflags |= (flags & opf_no_vops);
723 code = TREE_CODE (expr);
724 codeclass = TREE_CODE_CLASS (code);
726 switch (code)
728 case ADDR_EXPR:
729 /* Taking the address of a variable does not represent a
730 reference to it, but the fact that the statement takes its
731 address will be of interest to some passes (e.g. alias
732 resolution). */
733 if ((!(flags & opf_non_addressable)
734 || (flags & opf_not_non_addressable))
735 && !is_gimple_debug (stmt))
736 mark_address_taken (TREE_OPERAND (expr, 0));
738 /* Otherwise, there may be variables referenced inside but there
739 should be no VUSEs created, since the referenced objects are
740 not really accessed. The only operands that we should find
741 here are ARRAY_REF indices which will always be real operands
742 (GIMPLE does not allow non-registers as array indices). */
743 flags |= opf_no_vops;
744 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
745 flags | opf_not_non_addressable | opf_address_taken);
746 return;
748 case SSA_NAME:
749 case VAR_DECL:
750 case PARM_DECL:
751 case RESULT_DECL:
752 if (!(flags & opf_address_taken))
753 add_stmt_operand (fn, expr_p, stmt, flags);
754 return;
756 case DEBUG_EXPR_DECL:
757 gcc_assert (gimple_debug_bind_p (stmt));
758 return;
760 case MEM_REF:
761 get_mem_ref_operands (fn, stmt, expr, flags);
762 return;
764 case TARGET_MEM_REF:
765 get_tmr_operands (fn, stmt, expr, flags);
766 return;
768 case ARRAY_REF:
769 case ARRAY_RANGE_REF:
770 case COMPONENT_REF:
771 case REALPART_EXPR:
772 case IMAGPART_EXPR:
774 if (!(flags & opf_no_vops)
775 && TREE_THIS_VOLATILE (expr))
776 gimple_set_has_volatile_ops (stmt, true);
778 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
780 if (code == COMPONENT_REF)
782 if (!(flags & opf_no_vops)
783 && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
784 gimple_set_has_volatile_ops (stmt, true);
785 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
787 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
789 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
790 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
791 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
794 return;
797 case WITH_SIZE_EXPR:
798 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
799 and an rvalue reference to its second argument. */
800 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
801 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
802 return;
804 case COND_EXPR:
805 case VEC_COND_EXPR:
806 case VEC_PERM_EXPR:
807 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
808 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
809 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
810 return;
812 case CONSTRUCTOR:
814 /* General aggregate CONSTRUCTORs have been decomposed, but they
815 are still in use as the COMPLEX_EXPR equivalent for vectors. */
816 constructor_elt *ce;
817 unsigned HOST_WIDE_INT idx;
819 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
820 the volatility to the statement, don't use TREE_CLOBBER_P for
821 mirroring the other uses of THIS_VOLATILE in this file. */
822 if (!(flags & opf_no_vops)
823 && TREE_THIS_VOLATILE (expr))
824 gimple_set_has_volatile_ops (stmt, true);
826 for (idx = 0;
827 vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
828 idx++)
829 get_expr_operands (fn, stmt, &ce->value, uflags);
831 return;
834 case BIT_FIELD_REF:
835 if (!(flags & opf_no_vops)
836 && TREE_THIS_VOLATILE (expr))
837 gimple_set_has_volatile_ops (stmt, true);
838 /* FALLTHRU */
840 case VIEW_CONVERT_EXPR:
841 do_unary:
842 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
843 return;
845 case COMPOUND_EXPR:
846 case OBJ_TYPE_REF:
847 case ASSERT_EXPR:
848 do_binary:
850 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
851 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
852 return;
855 case DOT_PROD_EXPR:
856 case SAD_EXPR:
857 case REALIGN_LOAD_EXPR:
858 case WIDEN_MULT_PLUS_EXPR:
859 case WIDEN_MULT_MINUS_EXPR:
860 case FMA_EXPR:
862 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
863 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
864 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
865 return;
868 case FUNCTION_DECL:
869 case LABEL_DECL:
870 case CONST_DECL:
871 case CASE_LABEL_EXPR:
872 /* Expressions that make no memory references. */
873 return;
875 default:
876 if (codeclass == tcc_unary)
877 goto do_unary;
878 if (codeclass == tcc_binary || codeclass == tcc_comparison)
879 goto do_binary;
880 if (codeclass == tcc_constant || codeclass == tcc_type)
881 return;
884 /* If we get here, something has gone wrong. */
885 #ifdef ENABLE_CHECKING
886 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
887 debug_tree (expr);
888 fputs ("\n", stderr);
889 #endif
890 gcc_unreachable ();
894 /* Parse STMT looking for operands. When finished, the various
895 build_* operand vectors will have potential operands in them. */
897 static void
898 parse_ssa_operands (struct function *fn, gimple stmt)
900 enum gimple_code code = gimple_code (stmt);
901 size_t i, n, start = 0;
903 switch (code)
905 case GIMPLE_ASM:
906 get_asm_stmt_operands (fn, as_a <gasm *> (stmt));
907 break;
909 case GIMPLE_TRANSACTION:
910 /* The start of a transaction is a memory barrier. */
911 add_virtual_operand (fn, stmt, opf_def | opf_use);
912 break;
914 case GIMPLE_DEBUG:
915 if (gimple_debug_bind_p (stmt)
916 && gimple_debug_bind_has_value_p (stmt))
917 get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
918 opf_use | opf_no_vops);
919 break;
921 case GIMPLE_RETURN:
922 append_vuse (gimple_vop (fn));
923 goto do_default;
925 case GIMPLE_CALL:
926 /* Add call-clobbered operands, if needed. */
927 maybe_add_call_vops (fn, as_a <gcall *> (stmt));
928 /* FALLTHRU */
930 case GIMPLE_ASSIGN:
931 get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
932 start = 1;
933 /* FALLTHRU */
935 default:
936 do_default:
937 n = gimple_num_ops (stmt);
938 for (i = start; i < n; i++)
939 get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
940 break;
945 /* Create an operands cache for STMT. */
947 static void
948 build_ssa_operands (struct function *fn, gimple stmt)
950 /* Initially assume that the statement has no volatile operands. */
951 gimple_set_has_volatile_ops (stmt, false);
953 start_ssa_stmt_operands ();
954 parse_ssa_operands (fn, stmt);
955 finalize_ssa_stmt_operands (fn, stmt);
958 /* Verifies SSA statement operands. */
960 DEBUG_FUNCTION bool
961 verify_ssa_operands (struct function *fn, gimple stmt)
963 use_operand_p use_p;
964 def_operand_p def_p;
965 ssa_op_iter iter;
966 unsigned i;
967 tree use, def;
968 bool volatile_p = gimple_has_volatile_ops (stmt);
970 /* build_ssa_operands w/o finalizing them. */
971 gimple_set_has_volatile_ops (stmt, false);
972 start_ssa_stmt_operands ();
973 parse_ssa_operands (fn, stmt);
975 /* Now verify the built operands are the same as present in STMT. */
976 def = gimple_vdef (stmt);
977 if (def
978 && TREE_CODE (def) == SSA_NAME)
979 def = SSA_NAME_VAR (def);
980 if (build_vdef != def)
982 error ("virtual definition of statement not up-to-date");
983 return true;
985 if (gimple_vdef (stmt)
986 && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
987 || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
989 error ("virtual def operand missing for stmt");
990 return true;
993 use = gimple_vuse (stmt);
994 if (use
995 && TREE_CODE (use) == SSA_NAME)
996 use = SSA_NAME_VAR (use);
997 if (build_vuse != use)
999 error ("virtual use of statement not up-to-date");
1000 return true;
1002 if (gimple_vuse (stmt)
1003 && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
1004 || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1006 error ("virtual use operand missing for stmt");
1007 return true;
1010 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1012 FOR_EACH_VEC_ELT (build_uses, i, use)
1014 if (use_p->use == (tree *)use)
1016 build_uses[i] = NULL_TREE;
1017 break;
1020 if (i == build_uses.length ())
1022 error ("excess use operand for stmt");
1023 debug_generic_expr (USE_FROM_PTR (use_p));
1024 return true;
1027 FOR_EACH_VEC_ELT (build_uses, i, use)
1028 if (use != NULL_TREE)
1030 error ("use operand missing for stmt");
1031 debug_generic_expr (*(tree *)use);
1032 return true;
1035 if (gimple_has_volatile_ops (stmt) != volatile_p)
1037 error ("stmt volatile flag not up-to-date");
1038 return true;
1041 cleanup_build_arrays ();
1042 return false;
1046 /* Releases the operands of STMT back to their freelists, and clears
1047 the stmt operand lists. */
1049 void
1050 free_stmt_operands (struct function *fn, gimple stmt)
1052 use_optype_p uses = gimple_use_ops (stmt), last_use;
1054 if (uses)
1056 for (last_use = uses; last_use->next; last_use = last_use->next)
1057 delink_imm_use (USE_OP_PTR (last_use));
1058 delink_imm_use (USE_OP_PTR (last_use));
1059 last_use->next = gimple_ssa_operands (fn)->free_uses;
1060 gimple_ssa_operands (fn)->free_uses = uses;
1061 gimple_set_use_ops (stmt, NULL);
1064 if (gimple_has_mem_ops (stmt))
1066 gimple_set_vuse (stmt, NULL_TREE);
1067 gimple_set_vdef (stmt, NULL_TREE);
1072 /* Get the operands of statement STMT. */
1074 void
1075 update_stmt_operands (struct function *fn, gimple stmt)
1077 /* If update_stmt_operands is called before SSA is initialized, do
1078 nothing. */
1079 if (!ssa_operands_active (fn))
1080 return;
1082 timevar_push (TV_TREE_OPS);
1084 gcc_assert (gimple_modified_p (stmt));
1085 build_ssa_operands (fn, stmt);
1086 gimple_set_modified (stmt, false);
1088 timevar_pop (TV_TREE_OPS);
1092 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1093 to test the validity of the swap operation. */
1095 void
1096 swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1)
1098 tree op0, op1;
1099 op0 = *exp0;
1100 op1 = *exp1;
1102 if (op0 != op1)
1104 /* Attempt to preserve the relative positions of these two operands in
1105 their * respective immediate use lists by adjusting their use pointer
1106 to point to the new operand position. */
1107 use_optype_p use0, use1, ptr;
1108 use0 = use1 = NULL;
1110 /* Find the 2 operands in the cache, if they are there. */
1111 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1112 if (USE_OP_PTR (ptr)->use == exp0)
1114 use0 = ptr;
1115 break;
1118 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1119 if (USE_OP_PTR (ptr)->use == exp1)
1121 use1 = ptr;
1122 break;
1125 /* And adjust their location to point to the new position of the
1126 operand. */
1127 if (use0)
1128 USE_OP_PTR (use0)->use = exp1;
1129 if (use1)
1130 USE_OP_PTR (use1)->use = exp0;
1132 /* Now swap the data. */
1133 *exp0 = op1;
1134 *exp1 = op0;
1139 /* Scan the immediate_use list for VAR making sure its linked properly.
1140 Return TRUE if there is a problem and emit an error message to F. */
1142 DEBUG_FUNCTION bool
1143 verify_imm_links (FILE *f, tree var)
1145 use_operand_p ptr, prev, list;
1146 int count;
1148 gcc_assert (TREE_CODE (var) == SSA_NAME);
1150 list = &(SSA_NAME_IMM_USE_NODE (var));
1151 gcc_assert (list->use == NULL);
1153 if (list->prev == NULL)
1155 gcc_assert (list->next == NULL);
1156 return false;
1159 prev = list;
1160 count = 0;
1161 for (ptr = list->next; ptr != list; )
1163 if (prev != ptr->prev)
1164 goto error;
1166 if (ptr->use == NULL)
1167 goto error; /* 2 roots, or SAFE guard node. */
1168 else if (*(ptr->use) != var)
1169 goto error;
1171 prev = ptr;
1172 ptr = ptr->next;
1174 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1175 problem. */
1176 if (count++ > 50000000)
1177 goto error;
1180 /* Verify list in the other direction. */
1181 prev = list;
1182 for (ptr = list->prev; ptr != list; )
1184 if (prev != ptr->next)
1185 goto error;
1186 prev = ptr;
1187 ptr = ptr->prev;
1188 if (count-- < 0)
1189 goto error;
1192 if (count != 0)
1193 goto error;
1195 return false;
1197 error:
1198 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1200 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1201 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1203 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1204 (void *)ptr->use);
1205 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1206 fprintf (f, "\n");
1207 return true;
1211 /* Dump all the immediate uses to FILE. */
1213 void
1214 dump_immediate_uses_for (FILE *file, tree var)
1216 imm_use_iterator iter;
1217 use_operand_p use_p;
1219 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1221 print_generic_expr (file, var, TDF_SLIM);
1222 fprintf (file, " : -->");
1223 if (has_zero_uses (var))
1224 fprintf (file, " no uses.\n");
1225 else
1226 if (has_single_use (var))
1227 fprintf (file, " single use.\n");
1228 else
1229 fprintf (file, "%d uses.\n", num_imm_uses (var));
1231 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1233 if (use_p->loc.stmt == NULL && use_p->use == NULL)
1234 fprintf (file, "***end of stmt iterator marker***\n");
1235 else
1236 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1237 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1238 else
1239 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1241 fprintf (file, "\n");
1245 /* Dump all the immediate uses to FILE. */
1247 void
1248 dump_immediate_uses (FILE *file)
1250 tree var;
1251 unsigned int x;
1253 fprintf (file, "Immediate_uses: \n\n");
1254 for (x = 1; x < num_ssa_names; x++)
1256 var = ssa_name (x);
1257 if (!var)
1258 continue;
1259 dump_immediate_uses_for (file, var);
1264 /* Dump def-use edges on stderr. */
1266 DEBUG_FUNCTION void
1267 debug_immediate_uses (void)
1269 dump_immediate_uses (stderr);
1273 /* Dump def-use edges on stderr. */
1275 DEBUG_FUNCTION void
1276 debug_immediate_uses_for (tree var)
1278 dump_immediate_uses_for (stderr, var);
1282 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1284 void
1285 unlink_stmt_vdef (gimple stmt)
1287 use_operand_p use_p;
1288 imm_use_iterator iter;
1289 gimple use_stmt;
1290 tree vdef = gimple_vdef (stmt);
1291 tree vuse = gimple_vuse (stmt);
1293 if (!vdef
1294 || TREE_CODE (vdef) != SSA_NAME)
1295 return;
1297 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1299 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1300 SET_USE (use_p, vuse);
1303 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1304 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1307 /* Return true if the var whose chain of uses starts at PTR has a
1308 single nondebug use. Set USE_P and STMT to that single nondebug
1309 use, if so, or to NULL otherwise. */
1310 bool
1311 single_imm_use_1 (const ssa_use_operand_t *head,
1312 use_operand_p *use_p, gimple *stmt)
1314 ssa_use_operand_t *ptr, *single_use = 0;
1316 for (ptr = head->next; ptr != head; ptr = ptr->next)
1317 if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
1319 if (single_use)
1321 single_use = NULL;
1322 break;
1324 single_use = ptr;
1327 if (use_p)
1328 *use_p = single_use;
1330 if (stmt)
1331 *stmt = single_use ? single_use->loc.stmt : NULL;
1333 return single_use;