[RTL-ifcvt] PR rtl-optimization/68506: Fix emitting order of insns in IF-THEN-JOIN...
[official-gcc.git] / gcc / tree-ssa-operands.c
blobc5a09914ada13b6306ef6bc231e58f86d0595933
1 /* SSA operands management for trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "timevar.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "diagnostic-core.h"
30 #include "stmt.h"
31 #include "print-tree.h"
32 #include "dumpfile.h"
35 /* This file contains the code required to manage the operands cache of the
36 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
37 annotation. This cache contains operands that will be of interest to
38 optimizers and other passes wishing to manipulate the IL.
40 The operand type are broken up into REAL and VIRTUAL operands. The real
41 operands are represented as pointers into the stmt's operand tree. Thus
42 any manipulation of the real operands will be reflected in the actual tree.
43 Virtual operands are represented solely in the cache, although the base
44 variable for the SSA_NAME may, or may not occur in the stmt's tree.
45 Manipulation of the virtual operands will not be reflected in the stmt tree.
47 The routines in this file are concerned with creating this operand cache
48 from a stmt tree.
50 The operand tree is the parsed by the various get_* routines which look
51 through the stmt tree for the occurrence of operands which may be of
52 interest, and calls are made to the append_* routines whenever one is
53 found. There are 4 of these routines, each representing one of the
54 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
56 The append_* routines check for duplication, and simply keep a list of
57 unique objects for each operand type in the build_* extendable vectors.
59 Once the stmt tree is completely parsed, the finalize_ssa_operands()
60 routine is called, which proceeds to perform the finalization routine
61 on each of the 4 operand vectors which have been built up.
63 If the stmt had a previous operand cache, the finalization routines
64 attempt to match up the new operands with the old ones. If it's a perfect
65 match, the old vector is simply reused. If it isn't a perfect match, then
66 a new vector is created and the new operands are placed there. For
67 virtual operands, if the previous cache had SSA_NAME version of a
68 variable, and that same variable occurs in the same operands cache, then
69 the new cache vector will also get the same SSA_NAME.
71 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
72 operand vector for VUSE, then the new vector will also be modified
73 such that it contains 'a_5' rather than 'a'. */
76 /* Flags to describe operand properties in helpers. */
78 /* By default, operands are loaded. */
79 #define opf_use 0
81 /* Operand is the target of an assignment expression or a
82 call-clobbered variable. */
83 #define opf_def (1 << 0)
85 /* No virtual operands should be created in the expression. This is used
86 when traversing ADDR_EXPR nodes which have different semantics than
87 other expressions. Inside an ADDR_EXPR node, the only operands that we
88 need to consider are indices into arrays. For instance, &a.b[i] should
89 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
90 VUSE for 'b'. */
91 #define opf_no_vops (1 << 1)
93 /* Operand is in a place where address-taken does not imply addressable. */
94 #define opf_non_addressable (1 << 3)
96 /* Operand is in a place where opf_non_addressable does not apply. */
97 #define opf_not_non_addressable (1 << 4)
99 /* Operand is having its address taken. */
100 #define opf_address_taken (1 << 5)
102 /* Array for building all the use operands. */
103 static vec<tree *> build_uses;
105 /* The built VDEF operand. */
106 static tree build_vdef;
108 /* The built VUSE operand. */
109 static tree build_vuse;
111 /* Bitmap obstack for our datastructures that needs to survive across
112 compilations of multiple functions. */
113 static bitmap_obstack operands_bitmap_obstack;
115 static void get_expr_operands (struct function *, gimple *, tree *, int);
117 /* Number of functions with initialized ssa_operands. */
118 static int n_initialized = 0;
120 /* Accessor to tree-ssa-operands.c caches. */
121 static inline struct ssa_operands *
122 gimple_ssa_operands (const struct function *fun)
124 return &fun->gimple_df->ssa_operands;
128 /* Return true if the SSA operands cache is active. */
130 bool
131 ssa_operands_active (struct function *fun)
133 if (fun == NULL)
134 return false;
136 return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
140 /* Create the VOP variable, an artificial global variable to act as a
141 representative of all of the virtual operands FUD chain. */
143 static void
144 create_vop_var (struct function *fn)
146 tree global_var;
148 gcc_assert (fn->gimple_df->vop == NULL_TREE);
150 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
151 get_identifier (".MEM"),
152 void_type_node);
153 DECL_ARTIFICIAL (global_var) = 1;
154 DECL_IGNORED_P (global_var) = 1;
155 TREE_READONLY (global_var) = 0;
156 DECL_EXTERNAL (global_var) = 1;
157 TREE_STATIC (global_var) = 1;
158 TREE_USED (global_var) = 1;
159 DECL_CONTEXT (global_var) = NULL_TREE;
160 TREE_THIS_VOLATILE (global_var) = 0;
161 TREE_ADDRESSABLE (global_var) = 0;
162 VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
164 fn->gimple_df->vop = global_var;
167 /* These are the sizes of the operand memory buffer in bytes which gets
168 allocated each time more operands space is required. The final value is
169 the amount that is allocated every time after that.
170 In 1k we can fit 25 use operands (or 63 def operands) on a host with
171 8 byte pointers, that would be 10 statements each with 1 def and 2
172 uses. */
174 #define OP_SIZE_INIT 0
175 #define OP_SIZE_1 (1024 - sizeof (void *))
176 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
177 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
179 /* Initialize the operand cache routines. */
181 void
182 init_ssa_operands (struct function *fn)
184 if (!n_initialized++)
186 build_uses.create (10);
187 build_vuse = NULL_TREE;
188 build_vdef = NULL_TREE;
189 bitmap_obstack_initialize (&operands_bitmap_obstack);
192 gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
193 gimple_ssa_operands (fn)->operand_memory_index
194 = gimple_ssa_operands (fn)->ssa_operand_mem_size;
195 gimple_ssa_operands (fn)->ops_active = true;
196 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
197 create_vop_var (fn);
201 /* Dispose of anything required by the operand routines. */
203 void
204 fini_ssa_operands (struct function *fn)
206 struct ssa_operand_memory_d *ptr;
208 if (!--n_initialized)
210 build_uses.release ();
211 build_vdef = NULL_TREE;
212 build_vuse = NULL_TREE;
215 gimple_ssa_operands (fn)->free_uses = NULL;
217 while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
219 gimple_ssa_operands (fn)->operand_memory
220 = gimple_ssa_operands (fn)->operand_memory->next;
221 ggc_free (ptr);
224 gimple_ssa_operands (fn)->ops_active = false;
226 if (!n_initialized)
227 bitmap_obstack_release (&operands_bitmap_obstack);
229 fn->gimple_df->vop = NULL_TREE;
233 /* Return memory for an operand of size SIZE. */
235 static inline void *
236 ssa_operand_alloc (struct function *fn, unsigned size)
238 char *ptr;
240 gcc_assert (size == sizeof (struct use_optype_d));
242 if (gimple_ssa_operands (fn)->operand_memory_index + size
243 >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
245 struct ssa_operand_memory_d *ptr;
247 switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
249 case OP_SIZE_INIT:
250 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
251 break;
252 case OP_SIZE_1:
253 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
254 break;
255 case OP_SIZE_2:
256 case OP_SIZE_3:
257 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
258 break;
259 default:
260 gcc_unreachable ();
264 ptr = (ssa_operand_memory_d *) ggc_internal_alloc
265 (sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size);
267 ptr->next = gimple_ssa_operands (fn)->operand_memory;
268 gimple_ssa_operands (fn)->operand_memory = ptr;
269 gimple_ssa_operands (fn)->operand_memory_index = 0;
272 ptr = &(gimple_ssa_operands (fn)->operand_memory
273 ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
274 gimple_ssa_operands (fn)->operand_memory_index += size;
275 return ptr;
279 /* Allocate a USE operand. */
281 static inline struct use_optype_d *
282 alloc_use (struct function *fn)
284 struct use_optype_d *ret;
285 if (gimple_ssa_operands (fn)->free_uses)
287 ret = gimple_ssa_operands (fn)->free_uses;
288 gimple_ssa_operands (fn)->free_uses
289 = gimple_ssa_operands (fn)->free_uses->next;
291 else
292 ret = (struct use_optype_d *)
293 ssa_operand_alloc (fn, sizeof (struct use_optype_d));
294 return ret;
298 /* Adds OP to the list of uses of statement STMT after LAST. */
300 static inline use_optype_p
301 add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last)
303 use_optype_p new_use;
305 new_use = alloc_use (fn);
306 USE_OP_PTR (new_use)->use = op;
307 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
308 last->next = new_use;
309 new_use->next = NULL;
310 return new_use;
315 /* Takes elements from build_defs and turns them into def operands of STMT.
316 TODO -- Make build_defs vec of tree *. */
318 static inline void
319 finalize_ssa_defs (struct function *fn, gimple *stmt)
321 /* Pre-pend the vdef we may have built. */
322 if (build_vdef != NULL_TREE)
324 tree oldvdef = gimple_vdef (stmt);
325 if (oldvdef
326 && TREE_CODE (oldvdef) == SSA_NAME)
327 oldvdef = SSA_NAME_VAR (oldvdef);
328 if (oldvdef != build_vdef)
329 gimple_set_vdef (stmt, build_vdef);
332 /* Clear and unlink a no longer necessary VDEF. */
333 if (build_vdef == NULL_TREE
334 && gimple_vdef (stmt) != NULL_TREE)
336 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
338 unlink_stmt_vdef (stmt);
339 release_ssa_name_fn (fn, gimple_vdef (stmt));
341 gimple_set_vdef (stmt, NULL_TREE);
344 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
345 if (gimple_vdef (stmt)
346 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
348 fn->gimple_df->rename_vops = 1;
349 fn->gimple_df->ssa_renaming_needed = 1;
354 /* Takes elements from build_uses and turns them into use operands of STMT. */
356 static inline void
357 finalize_ssa_uses (struct function *fn, gimple *stmt)
359 unsigned new_i;
360 struct use_optype_d new_list;
361 use_optype_p old_ops, ptr, last;
363 /* Pre-pend the VUSE we may have built. */
364 if (build_vuse != NULL_TREE)
366 tree oldvuse = gimple_vuse (stmt);
367 if (oldvuse
368 && TREE_CODE (oldvuse) == SSA_NAME)
369 oldvuse = SSA_NAME_VAR (oldvuse);
370 if (oldvuse != (build_vuse != NULL_TREE
371 ? build_vuse : build_vdef))
372 gimple_set_vuse (stmt, NULL_TREE);
373 build_uses.safe_insert (0, gimple_vuse_ptr (stmt));
376 new_list.next = NULL;
377 last = &new_list;
379 old_ops = gimple_use_ops (stmt);
381 /* Clear a no longer necessary VUSE. */
382 if (build_vuse == NULL_TREE
383 && gimple_vuse (stmt) != NULL_TREE)
384 gimple_set_vuse (stmt, NULL_TREE);
386 /* If there is anything in the old list, free it. */
387 if (old_ops)
389 for (ptr = old_ops; ptr->next; ptr = ptr->next)
390 delink_imm_use (USE_OP_PTR (ptr));
391 delink_imm_use (USE_OP_PTR (ptr));
392 ptr->next = gimple_ssa_operands (fn)->free_uses;
393 gimple_ssa_operands (fn)->free_uses = old_ops;
396 /* If we added a VUSE, make sure to set the operand if it is not already
397 present and mark it for renaming. */
398 if (build_vuse != NULL_TREE
399 && gimple_vuse (stmt) == NULL_TREE)
401 gimple_set_vuse (stmt, gimple_vop (fn));
402 fn->gimple_df->rename_vops = 1;
403 fn->gimple_df->ssa_renaming_needed = 1;
406 /* Now create nodes for all the new nodes. */
407 for (new_i = 0; new_i < build_uses.length (); new_i++)
409 tree *op = build_uses[new_i];
410 last = add_use_op (fn, stmt, op, last);
413 /* Now set the stmt's operands. */
414 gimple_set_use_ops (stmt, new_list.next);
418 /* Clear the in_list bits and empty the build array for VDEFs and
419 VUSEs. */
421 static inline void
422 cleanup_build_arrays (void)
424 build_vdef = NULL_TREE;
425 build_vuse = NULL_TREE;
426 build_uses.truncate (0);
430 /* Finalize all the build vectors, fill the new ones into INFO. */
432 static inline void
433 finalize_ssa_stmt_operands (struct function *fn, gimple *stmt)
435 finalize_ssa_defs (fn, stmt);
436 finalize_ssa_uses (fn, stmt);
437 cleanup_build_arrays ();
441 /* Start the process of building up operands vectors in INFO. */
443 static inline void
444 start_ssa_stmt_operands (void)
446 gcc_assert (build_uses.length () == 0);
447 gcc_assert (build_vuse == NULL_TREE);
448 gcc_assert (build_vdef == NULL_TREE);
452 /* Add USE_P to the list of pointers to operands. */
454 static inline void
455 append_use (tree *use_p)
457 build_uses.safe_push (use_p);
461 /* Add VAR to the set of variables that require a VDEF operator. */
463 static inline void
464 append_vdef (tree var)
466 gcc_assert ((build_vdef == NULL_TREE
467 || build_vdef == var)
468 && (build_vuse == NULL_TREE
469 || build_vuse == var));
471 build_vdef = var;
472 build_vuse = var;
476 /* Add VAR to the set of variables that require a VUSE operator. */
478 static inline void
479 append_vuse (tree var)
481 gcc_assert (build_vuse == NULL_TREE
482 || build_vuse == var);
484 build_vuse = var;
487 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
489 static void
490 add_virtual_operand (struct function *fn,
491 gimple *stmt ATTRIBUTE_UNUSED, int flags)
493 /* Add virtual operands to the stmt, unless the caller has specifically
494 requested not to do that (used when adding operands inside an
495 ADDR_EXPR expression). */
496 if (flags & opf_no_vops)
497 return;
499 gcc_assert (!is_gimple_debug (stmt));
501 if (flags & opf_def)
502 append_vdef (gimple_vop (fn));
503 else
504 append_vuse (gimple_vop (fn));
508 /* Add *VAR_P to the appropriate operand array for statement STMT.
509 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
510 it will be added to the statement's real operands, otherwise it is
511 added to virtual operands. */
513 static void
514 add_stmt_operand (struct function *fn, tree *var_p, gimple *stmt, int flags)
516 tree var = *var_p;
518 gcc_assert (SSA_VAR_P (*var_p));
520 if (is_gimple_reg (var))
522 /* The variable is a GIMPLE register. Add it to real operands. */
523 if (flags & opf_def)
525 else
526 append_use (var_p);
527 if (DECL_P (*var_p))
528 fn->gimple_df->ssa_renaming_needed = 1;
530 else
532 /* Mark statements with volatile operands. */
533 if (!(flags & opf_no_vops)
534 && TREE_THIS_VOLATILE (var))
535 gimple_set_has_volatile_ops (stmt, true);
537 /* The variable is a memory access. Add virtual operands. */
538 add_virtual_operand (fn, stmt, flags);
542 /* Mark the base address of REF as having its address taken.
543 REF may be a single variable whose address has been taken or any
544 other valid GIMPLE memory reference (structure reference, array,
545 etc). */
547 static void
548 mark_address_taken (tree ref)
550 tree var;
552 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
553 as the only thing we take the address of. If VAR is a structure,
554 taking the address of a field means that the whole structure may
555 be referenced using pointer arithmetic. See PR 21407 and the
556 ensuing mailing list discussion. */
557 var = get_base_address (ref);
558 if (var)
560 if (DECL_P (var))
561 TREE_ADDRESSABLE (var) = 1;
562 else if (TREE_CODE (var) == MEM_REF
563 && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
564 && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
565 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
570 /* A subroutine of get_expr_operands to handle MEM_REF.
572 STMT is the statement being processed, EXPR is the MEM_REF
573 that got us here.
575 FLAGS is as in get_expr_operands. */
577 static void
578 get_mem_ref_operands (struct function *fn,
579 gimple *stmt, tree expr, int flags)
581 tree *pptr = &TREE_OPERAND (expr, 0);
583 if (!(flags & opf_no_vops)
584 && TREE_THIS_VOLATILE (expr))
585 gimple_set_has_volatile_ops (stmt, true);
587 /* Add the VOP. */
588 add_virtual_operand (fn, stmt, flags);
590 /* If requested, add a USE operand for the base pointer. */
591 get_expr_operands (fn, stmt, pptr,
592 opf_non_addressable | opf_use
593 | (flags & (opf_no_vops|opf_not_non_addressable)));
597 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
599 static void
600 get_tmr_operands (struct function *fn, gimple *stmt, tree expr, int flags)
602 if (!(flags & opf_no_vops)
603 && TREE_THIS_VOLATILE (expr))
604 gimple_set_has_volatile_ops (stmt, true);
606 /* First record the real operands. */
607 get_expr_operands (fn, stmt,
608 &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
609 get_expr_operands (fn, stmt,
610 &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
611 get_expr_operands (fn, stmt,
612 &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
614 add_virtual_operand (fn, stmt, flags);
618 /* If STMT is a call that may clobber globals and other symbols that
619 escape, add them to the VDEF/VUSE lists for it. */
621 static void
622 maybe_add_call_vops (struct function *fn, gcall *stmt)
624 int call_flags = gimple_call_flags (stmt);
626 /* If aliases have been computed already, add VDEF or VUSE
627 operands for all the symbols that have been found to be
628 call-clobbered. */
629 if (!(call_flags & ECF_NOVOPS))
631 /* A 'pure' or a 'const' function never call-clobbers anything. */
632 if (!(call_flags & (ECF_PURE | ECF_CONST)))
633 add_virtual_operand (fn, stmt, opf_def);
634 else if (!(call_flags & ECF_CONST))
635 add_virtual_operand (fn, stmt, opf_use);
640 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
642 static void
643 get_asm_stmt_operands (struct function *fn, gasm *stmt)
645 size_t i, noutputs;
646 const char **oconstraints;
647 const char *constraint;
648 bool allows_mem, allows_reg, is_inout;
650 noutputs = gimple_asm_noutputs (stmt);
651 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
653 /* Gather all output operands. */
654 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
656 tree link = gimple_asm_output_op (stmt, i);
657 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
658 oconstraints[i] = constraint;
659 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
660 &allows_reg, &is_inout);
662 /* This should have been split in gimplify_asm_expr. */
663 gcc_assert (!allows_reg || !is_inout);
665 /* Memory operands are addressable. Note that STMT needs the
666 address of this operand. */
667 if (!allows_reg && allows_mem)
668 mark_address_taken (TREE_VALUE (link));
670 get_expr_operands (fn, stmt,
671 &TREE_VALUE (link), opf_def | opf_not_non_addressable);
674 /* Gather all input operands. */
675 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
677 tree link = gimple_asm_input_op (stmt, i);
678 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
679 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
680 &allows_mem, &allows_reg);
682 /* Memory operands are addressable. Note that STMT needs the
683 address of this operand. */
684 if (!allows_reg && allows_mem)
685 mark_address_taken (TREE_VALUE (link));
687 get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
690 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
691 if (gimple_asm_clobbers_memory_p (stmt))
692 add_virtual_operand (fn, stmt, opf_def);
696 /* Recursively scan the expression pointed to by EXPR_P in statement
697 STMT. FLAGS is one of the OPF_* constants modifying how to
698 interpret the operands found. */
700 static void
701 get_expr_operands (struct function *fn, gimple *stmt, tree *expr_p, int flags)
703 enum tree_code code;
704 enum tree_code_class codeclass;
705 tree expr = *expr_p;
706 int uflags = opf_use;
708 if (expr == NULL)
709 return;
711 if (is_gimple_debug (stmt))
712 uflags |= (flags & opf_no_vops);
714 code = TREE_CODE (expr);
715 codeclass = TREE_CODE_CLASS (code);
717 switch (code)
719 case ADDR_EXPR:
720 /* Taking the address of a variable does not represent a
721 reference to it, but the fact that the statement takes its
722 address will be of interest to some passes (e.g. alias
723 resolution). */
724 if ((!(flags & opf_non_addressable)
725 || (flags & opf_not_non_addressable))
726 && !is_gimple_debug (stmt))
727 mark_address_taken (TREE_OPERAND (expr, 0));
729 /* Otherwise, there may be variables referenced inside but there
730 should be no VUSEs created, since the referenced objects are
731 not really accessed. The only operands that we should find
732 here are ARRAY_REF indices which will always be real operands
733 (GIMPLE does not allow non-registers as array indices). */
734 flags |= opf_no_vops;
735 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
736 flags | opf_not_non_addressable | opf_address_taken);
737 return;
739 case SSA_NAME:
740 case VAR_DECL:
741 case PARM_DECL:
742 case RESULT_DECL:
743 if (!(flags & opf_address_taken))
744 add_stmt_operand (fn, expr_p, stmt, flags);
745 return;
747 case DEBUG_EXPR_DECL:
748 gcc_assert (gimple_debug_bind_p (stmt));
749 return;
751 case MEM_REF:
752 get_mem_ref_operands (fn, stmt, expr, flags);
753 return;
755 case TARGET_MEM_REF:
756 get_tmr_operands (fn, stmt, expr, flags);
757 return;
759 case ARRAY_REF:
760 case ARRAY_RANGE_REF:
761 case COMPONENT_REF:
762 case REALPART_EXPR:
763 case IMAGPART_EXPR:
765 if (!(flags & opf_no_vops)
766 && TREE_THIS_VOLATILE (expr))
767 gimple_set_has_volatile_ops (stmt, true);
769 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
771 if (code == COMPONENT_REF)
773 if (!(flags & opf_no_vops)
774 && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
775 gimple_set_has_volatile_ops (stmt, true);
776 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
778 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
780 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
781 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
782 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
785 return;
788 case WITH_SIZE_EXPR:
789 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
790 and an rvalue reference to its second argument. */
791 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
792 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
793 return;
795 case COND_EXPR:
796 case VEC_COND_EXPR:
797 case VEC_PERM_EXPR:
798 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
799 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
800 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
801 return;
803 case CONSTRUCTOR:
805 /* General aggregate CONSTRUCTORs have been decomposed, but they
806 are still in use as the COMPLEX_EXPR equivalent for vectors. */
807 constructor_elt *ce;
808 unsigned HOST_WIDE_INT idx;
810 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
811 the volatility to the statement, don't use TREE_CLOBBER_P for
812 mirroring the other uses of THIS_VOLATILE in this file. */
813 if (!(flags & opf_no_vops)
814 && TREE_THIS_VOLATILE (expr))
815 gimple_set_has_volatile_ops (stmt, true);
817 for (idx = 0;
818 vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
819 idx++)
820 get_expr_operands (fn, stmt, &ce->value, uflags);
822 return;
825 case BIT_FIELD_REF:
826 if (!(flags & opf_no_vops)
827 && TREE_THIS_VOLATILE (expr))
828 gimple_set_has_volatile_ops (stmt, true);
829 /* FALLTHRU */
831 case VIEW_CONVERT_EXPR:
832 do_unary:
833 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
834 return;
836 case COMPOUND_EXPR:
837 case OBJ_TYPE_REF:
838 case ASSERT_EXPR:
839 do_binary:
841 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
842 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
843 return;
846 case DOT_PROD_EXPR:
847 case SAD_EXPR:
848 case REALIGN_LOAD_EXPR:
849 case WIDEN_MULT_PLUS_EXPR:
850 case WIDEN_MULT_MINUS_EXPR:
851 case FMA_EXPR:
853 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
854 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
855 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
856 return;
859 case FUNCTION_DECL:
860 case LABEL_DECL:
861 case CONST_DECL:
862 case CASE_LABEL_EXPR:
863 /* Expressions that make no memory references. */
864 return;
866 default:
867 if (codeclass == tcc_unary)
868 goto do_unary;
869 if (codeclass == tcc_binary || codeclass == tcc_comparison)
870 goto do_binary;
871 if (codeclass == tcc_constant || codeclass == tcc_type)
872 return;
875 /* If we get here, something has gone wrong. */
876 if (flag_checking)
878 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
879 debug_tree (expr);
880 fputs ("\n", stderr);
881 gcc_unreachable ();
886 /* Parse STMT looking for operands. When finished, the various
887 build_* operand vectors will have potential operands in them. */
889 static void
890 parse_ssa_operands (struct function *fn, gimple *stmt)
892 enum gimple_code code = gimple_code (stmt);
893 size_t i, n, start = 0;
895 switch (code)
897 case GIMPLE_ASM:
898 get_asm_stmt_operands (fn, as_a <gasm *> (stmt));
899 break;
901 case GIMPLE_TRANSACTION:
902 /* The start of a transaction is a memory barrier. */
903 add_virtual_operand (fn, stmt, opf_def | opf_use);
904 break;
906 case GIMPLE_DEBUG:
907 if (gimple_debug_bind_p (stmt)
908 && gimple_debug_bind_has_value_p (stmt))
909 get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
910 opf_use | opf_no_vops);
911 break;
913 case GIMPLE_RETURN:
914 append_vuse (gimple_vop (fn));
915 goto do_default;
917 case GIMPLE_CALL:
918 /* Add call-clobbered operands, if needed. */
919 maybe_add_call_vops (fn, as_a <gcall *> (stmt));
920 /* FALLTHRU */
922 case GIMPLE_ASSIGN:
923 get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
924 start = 1;
925 /* FALLTHRU */
927 default:
928 do_default:
929 n = gimple_num_ops (stmt);
930 for (i = start; i < n; i++)
931 get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
932 break;
937 /* Create an operands cache for STMT. */
939 static void
940 build_ssa_operands (struct function *fn, gimple *stmt)
942 /* Initially assume that the statement has no volatile operands. */
943 gimple_set_has_volatile_ops (stmt, false);
945 start_ssa_stmt_operands ();
946 parse_ssa_operands (fn, stmt);
947 finalize_ssa_stmt_operands (fn, stmt);
950 /* Verifies SSA statement operands. */
952 DEBUG_FUNCTION bool
953 verify_ssa_operands (struct function *fn, gimple *stmt)
955 use_operand_p use_p;
956 def_operand_p def_p;
957 ssa_op_iter iter;
958 unsigned i;
959 tree def;
960 bool volatile_p = gimple_has_volatile_ops (stmt);
962 /* build_ssa_operands w/o finalizing them. */
963 gimple_set_has_volatile_ops (stmt, false);
964 start_ssa_stmt_operands ();
965 parse_ssa_operands (fn, stmt);
967 /* Now verify the built operands are the same as present in STMT. */
968 def = gimple_vdef (stmt);
969 if (def
970 && TREE_CODE (def) == SSA_NAME)
971 def = SSA_NAME_VAR (def);
972 if (build_vdef != def)
974 error ("virtual definition of statement not up-to-date");
975 return true;
977 if (gimple_vdef (stmt)
978 && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
979 || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
981 error ("virtual def operand missing for stmt");
982 return true;
985 tree use = gimple_vuse (stmt);
986 if (use
987 && TREE_CODE (use) == SSA_NAME)
988 use = SSA_NAME_VAR (use);
989 if (build_vuse != use)
991 error ("virtual use of statement not up-to-date");
992 return true;
994 if (gimple_vuse (stmt)
995 && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
996 || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
998 error ("virtual use operand missing for stmt");
999 return true;
1002 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1004 tree *op;
1005 FOR_EACH_VEC_ELT (build_uses, i, op)
1007 if (use_p->use == op)
1009 build_uses[i] = NULL;
1010 break;
1013 if (i == build_uses.length ())
1015 error ("excess use operand for stmt");
1016 debug_generic_expr (USE_FROM_PTR (use_p));
1017 return true;
1021 tree *op;
1022 FOR_EACH_VEC_ELT (build_uses, i, op)
1023 if (op != NULL)
1025 error ("use operand missing for stmt");
1026 debug_generic_expr (*op);
1027 return true;
1030 if (gimple_has_volatile_ops (stmt) != volatile_p)
1032 error ("stmt volatile flag not up-to-date");
1033 return true;
1036 cleanup_build_arrays ();
1037 return false;
1041 /* Releases the operands of STMT back to their freelists, and clears
1042 the stmt operand lists. */
1044 void
1045 free_stmt_operands (struct function *fn, gimple *stmt)
1047 use_optype_p uses = gimple_use_ops (stmt), last_use;
1049 if (uses)
1051 for (last_use = uses; last_use->next; last_use = last_use->next)
1052 delink_imm_use (USE_OP_PTR (last_use));
1053 delink_imm_use (USE_OP_PTR (last_use));
1054 last_use->next = gimple_ssa_operands (fn)->free_uses;
1055 gimple_ssa_operands (fn)->free_uses = uses;
1056 gimple_set_use_ops (stmt, NULL);
1059 if (gimple_has_mem_ops (stmt))
1061 gimple_set_vuse (stmt, NULL_TREE);
1062 gimple_set_vdef (stmt, NULL_TREE);
1067 /* Get the operands of statement STMT. */
1069 void
1070 update_stmt_operands (struct function *fn, gimple *stmt)
1072 /* If update_stmt_operands is called before SSA is initialized, do
1073 nothing. */
1074 if (!ssa_operands_active (fn))
1075 return;
1077 timevar_push (TV_TREE_OPS);
1079 gcc_assert (gimple_modified_p (stmt));
1080 build_ssa_operands (fn, stmt);
1081 gimple_set_modified (stmt, false);
1083 timevar_pop (TV_TREE_OPS);
1087 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1088 to test the validity of the swap operation. */
1090 void
1091 swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1)
1093 tree op0, op1;
1094 op0 = *exp0;
1095 op1 = *exp1;
1097 if (op0 != op1)
1099 /* Attempt to preserve the relative positions of these two operands in
1100 their * respective immediate use lists by adjusting their use pointer
1101 to point to the new operand position. */
1102 use_optype_p use0, use1, ptr;
1103 use0 = use1 = NULL;
1105 /* Find the 2 operands in the cache, if they are there. */
1106 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1107 if (USE_OP_PTR (ptr)->use == exp0)
1109 use0 = ptr;
1110 break;
1113 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1114 if (USE_OP_PTR (ptr)->use == exp1)
1116 use1 = ptr;
1117 break;
1120 /* And adjust their location to point to the new position of the
1121 operand. */
1122 if (use0)
1123 USE_OP_PTR (use0)->use = exp1;
1124 if (use1)
1125 USE_OP_PTR (use1)->use = exp0;
1127 /* Now swap the data. */
1128 *exp0 = op1;
1129 *exp1 = op0;
1134 /* Scan the immediate_use list for VAR making sure its linked properly.
1135 Return TRUE if there is a problem and emit an error message to F. */
1137 DEBUG_FUNCTION bool
1138 verify_imm_links (FILE *f, tree var)
1140 use_operand_p ptr, prev, list;
1141 int count;
1143 gcc_assert (TREE_CODE (var) == SSA_NAME);
1145 list = &(SSA_NAME_IMM_USE_NODE (var));
1146 gcc_assert (list->use == NULL);
1148 if (list->prev == NULL)
1150 gcc_assert (list->next == NULL);
1151 return false;
1154 prev = list;
1155 count = 0;
1156 for (ptr = list->next; ptr != list; )
1158 if (prev != ptr->prev)
1159 goto error;
1161 if (ptr->use == NULL)
1162 goto error; /* 2 roots, or SAFE guard node. */
1163 else if (*(ptr->use) != var)
1164 goto error;
1166 prev = ptr;
1167 ptr = ptr->next;
1169 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1170 problem. */
1171 if (count++ > 50000000)
1172 goto error;
1175 /* Verify list in the other direction. */
1176 prev = list;
1177 for (ptr = list->prev; ptr != list; )
1179 if (prev != ptr->next)
1180 goto error;
1181 prev = ptr;
1182 ptr = ptr->prev;
1183 if (count-- < 0)
1184 goto error;
1187 if (count != 0)
1188 goto error;
1190 return false;
1192 error:
1193 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1195 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1196 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1198 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1199 (void *)ptr->use);
1200 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1201 fprintf (f, "\n");
1202 return true;
1206 /* Dump all the immediate uses to FILE. */
1208 void
1209 dump_immediate_uses_for (FILE *file, tree var)
1211 imm_use_iterator iter;
1212 use_operand_p use_p;
1214 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1216 print_generic_expr (file, var, TDF_SLIM);
1217 fprintf (file, " : -->");
1218 if (has_zero_uses (var))
1219 fprintf (file, " no uses.\n");
1220 else
1221 if (has_single_use (var))
1222 fprintf (file, " single use.\n");
1223 else
1224 fprintf (file, "%d uses.\n", num_imm_uses (var));
1226 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1228 if (use_p->loc.stmt == NULL && use_p->use == NULL)
1229 fprintf (file, "***end of stmt iterator marker***\n");
1230 else
1231 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1232 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1233 else
1234 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1236 fprintf (file, "\n");
1240 /* Dump all the immediate uses to FILE. */
1242 void
1243 dump_immediate_uses (FILE *file)
1245 tree var;
1246 unsigned int x;
1248 fprintf (file, "Immediate_uses: \n\n");
1249 for (x = 1; x < num_ssa_names; x++)
1251 var = ssa_name (x);
1252 if (!var)
1253 continue;
1254 dump_immediate_uses_for (file, var);
1259 /* Dump def-use edges on stderr. */
1261 DEBUG_FUNCTION void
1262 debug_immediate_uses (void)
1264 dump_immediate_uses (stderr);
1268 /* Dump def-use edges on stderr. */
1270 DEBUG_FUNCTION void
1271 debug_immediate_uses_for (tree var)
1273 dump_immediate_uses_for (stderr, var);
1277 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1279 void
1280 unlink_stmt_vdef (gimple *stmt)
1282 use_operand_p use_p;
1283 imm_use_iterator iter;
1284 gimple *use_stmt;
1285 tree vdef = gimple_vdef (stmt);
1286 tree vuse = gimple_vuse (stmt);
1288 if (!vdef
1289 || TREE_CODE (vdef) != SSA_NAME)
1290 return;
1292 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1294 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1295 SET_USE (use_p, vuse);
1298 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1299 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1302 /* Return true if the var whose chain of uses starts at PTR has a
1303 single nondebug use. Set USE_P and STMT to that single nondebug
1304 use, if so, or to NULL otherwise. */
1305 bool
1306 single_imm_use_1 (const ssa_use_operand_t *head,
1307 use_operand_p *use_p, gimple **stmt)
1309 ssa_use_operand_t *ptr, *single_use = 0;
1311 for (ptr = head->next; ptr != head; ptr = ptr->next)
1312 if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
1314 if (single_use)
1316 single_use = NULL;
1317 break;
1319 single_use = ptr;
1322 if (use_p)
1323 *use_p = single_use;
1325 if (stmt)
1326 *stmt = single_use ? single_use->loc.stmt : NULL;
1328 return single_use;