Import gcc-4.4.1
[dragonfly.git] / contrib / gcc-4.4 / gcc / tree-flow-inline.h
blobaeba17204f9e2b2bb1640f2e346c7d1e54c3589f
1 /* Inline functions for tree-flow.h
2 Copyright (C) 2001, 2003, 2005, 2006, 2007, 2008 Free Software
3 Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #ifndef _TREE_FLOW_INLINE_H
23 #define _TREE_FLOW_INLINE_H 1
25 /* Inline functions for manipulating various data structures defined in
26 tree-flow.h. See tree-flow.h for documentation. */
28 /* Return true when gimple SSA form was built.
29 gimple_in_ssa_p is queried by gimplifier in various early stages before SSA
30 infrastructure is initialized. Check for presence of the datastructures
31 at first place. */
32 static inline bool
33 gimple_in_ssa_p (const struct function *fun)
35 return fun && fun->gimple_df && fun->gimple_df->in_ssa_p;
38 /* 'true' after aliases have been computed (see compute_may_aliases). */
39 static inline bool
40 gimple_aliases_computed_p (const struct function *fun)
42 gcc_assert (fun && fun->gimple_df);
43 return fun->gimple_df->aliases_computed_p;
46 /* Addressable variables in the function. If bit I is set, then
47 REFERENCED_VARS (I) has had its address taken. Note that
48 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
49 addressable variable is not necessarily call-clobbered (e.g., a
50 local addressable whose address does not escape) and not all
51 call-clobbered variables are addressable (e.g., a local static
52 variable). */
53 static inline bitmap
54 gimple_addressable_vars (const struct function *fun)
56 gcc_assert (fun && fun->gimple_df);
57 return fun->gimple_df->addressable_vars;
60 /* Call clobbered variables in the function. If bit I is set, then
61 REFERENCED_VARS (I) is call-clobbered. */
62 static inline bitmap
63 gimple_call_clobbered_vars (const struct function *fun)
65 gcc_assert (fun && fun->gimple_df);
66 return fun->gimple_df->call_clobbered_vars;
69 /* Call-used variables in the function. If bit I is set, then
70 REFERENCED_VARS (I) is call-used at pure function call-sites. */
71 static inline bitmap
72 gimple_call_used_vars (const struct function *fun)
74 gcc_assert (fun && fun->gimple_df);
75 return fun->gimple_df->call_used_vars;
78 /* Array of all variables referenced in the function. */
79 static inline htab_t
80 gimple_referenced_vars (const struct function *fun)
82 if (!fun->gimple_df)
83 return NULL;
84 return fun->gimple_df->referenced_vars;
87 /* Artificial variable used to model the effects of function calls. */
88 static inline tree
89 gimple_global_var (const struct function *fun)
91 gcc_assert (fun && fun->gimple_df);
92 return fun->gimple_df->global_var;
95 /* Artificial variable used to model the effects of nonlocal
96 variables. */
97 static inline tree
98 gimple_nonlocal_all (const struct function *fun)
100 gcc_assert (fun && fun->gimple_df);
101 return fun->gimple_df->nonlocal_all;
104 /* Initialize the hashtable iterator HTI to point to hashtable TABLE */
106 static inline void *
107 first_htab_element (htab_iterator *hti, htab_t table)
109 hti->htab = table;
110 hti->slot = table->entries;
111 hti->limit = hti->slot + htab_size (table);
114 PTR x = *(hti->slot);
115 if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
116 break;
117 } while (++(hti->slot) < hti->limit);
119 if (hti->slot < hti->limit)
120 return *(hti->slot);
121 return NULL;
124 /* Return current non-empty/deleted slot of the hashtable pointed to by HTI,
125 or NULL if we have reached the end. */
127 static inline bool
128 end_htab_p (const htab_iterator *hti)
130 if (hti->slot >= hti->limit)
131 return true;
132 return false;
135 /* Advance the hashtable iterator pointed to by HTI to the next element of the
136 hashtable. */
138 static inline void *
139 next_htab_element (htab_iterator *hti)
141 while (++(hti->slot) < hti->limit)
143 PTR x = *(hti->slot);
144 if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
145 return x;
147 return NULL;
150 /* Initialize ITER to point to the first referenced variable in the
151 referenced_vars hashtable, and return that variable. */
153 static inline tree
154 first_referenced_var (referenced_var_iterator *iter)
156 return (tree) first_htab_element (&iter->hti,
157 gimple_referenced_vars (cfun));
160 /* Return true if we have hit the end of the referenced variables ITER is
161 iterating through. */
163 static inline bool
164 end_referenced_vars_p (const referenced_var_iterator *iter)
166 return end_htab_p (&iter->hti);
169 /* Make ITER point to the next referenced_var in the referenced_var hashtable,
170 and return that variable. */
172 static inline tree
173 next_referenced_var (referenced_var_iterator *iter)
175 return (tree) next_htab_element (&iter->hti);
178 /* Fill up VEC with the variables in the referenced vars hashtable. */
180 static inline void
181 fill_referenced_var_vec (VEC (tree, heap) **vec)
183 referenced_var_iterator rvi;
184 tree var;
185 *vec = NULL;
186 FOR_EACH_REFERENCED_VAR (var, rvi)
187 VEC_safe_push (tree, heap, *vec, var);
190 /* Return the variable annotation for T, which must be a _DECL node.
191 Return NULL if the variable annotation doesn't already exist. */
192 static inline var_ann_t
193 var_ann (const_tree t)
195 var_ann_t ann;
197 if (!t->base.ann)
198 return NULL;
199 ann = (var_ann_t) t->base.ann;
201 gcc_assert (ann->common.type == VAR_ANN);
203 return ann;
206 /* Return the variable annotation for T, which must be a _DECL node.
207 Create the variable annotation if it doesn't exist. */
208 static inline var_ann_t
209 get_var_ann (tree var)
211 var_ann_t ann = var_ann (var);
212 return (ann) ? ann : create_var_ann (var);
215 /* Return the function annotation for T, which must be a FUNCTION_DECL node.
216 Return NULL if the function annotation doesn't already exist. */
217 static inline function_ann_t
218 function_ann (const_tree t)
220 gcc_assert (t);
221 gcc_assert (TREE_CODE (t) == FUNCTION_DECL);
222 gcc_assert (!t->base.ann
223 || t->base.ann->common.type == FUNCTION_ANN);
225 return (function_ann_t) t->base.ann;
228 /* Return the function annotation for T, which must be a FUNCTION_DECL node.
229 Create the function annotation if it doesn't exist. */
230 static inline function_ann_t
231 get_function_ann (tree var)
233 function_ann_t ann = function_ann (var);
234 gcc_assert (!var->base.ann || var->base.ann->common.type == FUNCTION_ANN);
235 return (ann) ? ann : create_function_ann (var);
238 /* Get the number of the next statement uid to be allocated. */
239 static inline unsigned int
240 gimple_stmt_max_uid (struct function *fn)
242 return fn->last_stmt_uid;
245 /* Set the number of the next statement uid to be allocated. */
246 static inline void
247 set_gimple_stmt_max_uid (struct function *fn, unsigned int maxid)
249 fn->last_stmt_uid = maxid;
252 /* Set the number of the next statement uid to be allocated. */
253 static inline unsigned int
254 inc_gimple_stmt_max_uid (struct function *fn)
256 return fn->last_stmt_uid++;
259 /* Return the annotation type for annotation ANN. */
260 static inline enum tree_ann_type
261 ann_type (tree_ann_t ann)
263 return ann->common.type;
266 /* Return the may_aliases bitmap for variable VAR, or NULL if it has
267 no may aliases. */
268 static inline bitmap
269 may_aliases (const_tree var)
271 return MTAG_ALIASES (var);
274 /* Return the line number for EXPR, or return -1 if we have no line
275 number information for it. */
276 static inline int
277 get_lineno (const_gimple stmt)
279 location_t loc;
281 if (!stmt)
282 return -1;
284 loc = gimple_location (stmt);
285 if (loc != UNKNOWN_LOCATION)
286 return -1;
288 return LOCATION_LINE (loc);
291 /* Delink an immediate_uses node from its chain. */
292 static inline void
293 delink_imm_use (ssa_use_operand_t *linknode)
295 /* Return if this node is not in a list. */
296 if (linknode->prev == NULL)
297 return;
299 linknode->prev->next = linknode->next;
300 linknode->next->prev = linknode->prev;
301 linknode->prev = NULL;
302 linknode->next = NULL;
305 /* Link ssa_imm_use node LINKNODE into the chain for LIST. */
306 static inline void
307 link_imm_use_to_list (ssa_use_operand_t *linknode, ssa_use_operand_t *list)
309 /* Link the new node at the head of the list. If we are in the process of
310 traversing the list, we won't visit any new nodes added to it. */
311 linknode->prev = list;
312 linknode->next = list->next;
313 list->next->prev = linknode;
314 list->next = linknode;
317 /* Link ssa_imm_use node LINKNODE into the chain for DEF. */
318 static inline void
319 link_imm_use (ssa_use_operand_t *linknode, tree def)
321 ssa_use_operand_t *root;
323 if (!def || TREE_CODE (def) != SSA_NAME)
324 linknode->prev = NULL;
325 else
327 root = &(SSA_NAME_IMM_USE_NODE (def));
328 #ifdef ENABLE_CHECKING
329 if (linknode->use)
330 gcc_assert (*(linknode->use) == def);
331 #endif
332 link_imm_use_to_list (linknode, root);
336 /* Set the value of a use pointed to by USE to VAL. */
337 static inline void
338 set_ssa_use_from_ptr (use_operand_p use, tree val)
340 delink_imm_use (use);
341 *(use->use) = val;
342 link_imm_use (use, val);
345 /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
346 in STMT. */
347 static inline void
348 link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple stmt)
350 if (stmt)
351 link_imm_use (linknode, def);
352 else
353 link_imm_use (linknode, NULL);
354 linknode->loc.stmt = stmt;
357 /* Relink a new node in place of an old node in the list. */
358 static inline void
359 relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
361 /* The node one had better be in the same list. */
362 gcc_assert (*(old->use) == *(node->use));
363 node->prev = old->prev;
364 node->next = old->next;
365 if (old->prev)
367 old->prev->next = node;
368 old->next->prev = node;
369 /* Remove the old node from the list. */
370 old->prev = NULL;
374 /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
375 in STMT. */
376 static inline void
377 relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
378 gimple stmt)
380 if (stmt)
381 relink_imm_use (linknode, old);
382 else
383 link_imm_use (linknode, NULL);
384 linknode->loc.stmt = stmt;
388 /* Return true is IMM has reached the end of the immediate use list. */
389 static inline bool
390 end_readonly_imm_use_p (const imm_use_iterator *imm)
392 return (imm->imm_use == imm->end_p);
395 /* Initialize iterator IMM to process the list for VAR. */
396 static inline use_operand_p
397 first_readonly_imm_use (imm_use_iterator *imm, tree var)
399 gcc_assert (TREE_CODE (var) == SSA_NAME);
401 imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
402 imm->imm_use = imm->end_p->next;
403 #ifdef ENABLE_CHECKING
404 imm->iter_node.next = imm->imm_use->next;
405 #endif
406 if (end_readonly_imm_use_p (imm))
407 return NULL_USE_OPERAND_P;
408 return imm->imm_use;
411 /* Bump IMM to the next use in the list. */
412 static inline use_operand_p
413 next_readonly_imm_use (imm_use_iterator *imm)
415 use_operand_p old = imm->imm_use;
417 #ifdef ENABLE_CHECKING
418 /* If this assertion fails, it indicates the 'next' pointer has changed
419 since the last bump. This indicates that the list is being modified
420 via stmt changes, or SET_USE, or somesuch thing, and you need to be
421 using the SAFE version of the iterator. */
422 gcc_assert (imm->iter_node.next == old->next);
423 imm->iter_node.next = old->next->next;
424 #endif
426 imm->imm_use = old->next;
427 if (end_readonly_imm_use_p (imm))
428 return NULL_USE_OPERAND_P;
429 return imm->imm_use;
432 /* Return true if VAR has no uses. */
433 static inline bool
434 has_zero_uses (const_tree var)
436 const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
437 /* A single use means there is no items in the list. */
438 return (ptr == ptr->next);
441 /* Return true if VAR has a single use. */
442 static inline bool
443 has_single_use (const_tree var)
445 const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
446 /* A single use means there is one item in the list. */
447 return (ptr != ptr->next && ptr == ptr->next->next);
451 /* If VAR has only a single immediate use, return true, and set USE_P and STMT
452 to the use pointer and stmt of occurrence. */
453 static inline bool
454 single_imm_use (const_tree var, use_operand_p *use_p, gimple *stmt)
456 const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
457 if (ptr != ptr->next && ptr == ptr->next->next)
459 *use_p = ptr->next;
460 *stmt = ptr->next->loc.stmt;
461 return true;
463 *use_p = NULL_USE_OPERAND_P;
464 *stmt = NULL;
465 return false;
468 /* Return the number of immediate uses of VAR. */
469 static inline unsigned int
470 num_imm_uses (const_tree var)
472 const ssa_use_operand_t *const start = &(SSA_NAME_IMM_USE_NODE (var));
473 const ssa_use_operand_t *ptr;
474 unsigned int num = 0;
476 for (ptr = start->next; ptr != start; ptr = ptr->next)
477 num++;
479 return num;
482 /* Return the tree pointed-to by USE. */
483 static inline tree
484 get_use_from_ptr (use_operand_p use)
486 return *(use->use);
489 /* Return the tree pointed-to by DEF. */
490 static inline tree
491 get_def_from_ptr (def_operand_p def)
493 return *def;
496 /* Return a use_operand_p pointer for argument I of PHI node GS. */
498 static inline use_operand_p
499 gimple_phi_arg_imm_use_ptr (gimple gs, int i)
501 return &gimple_phi_arg (gs, i)->imm_use;
504 /* Return the tree operand for argument I of PHI node GS. */
506 static inline tree
507 gimple_phi_arg_def (gimple gs, size_t index)
509 struct phi_arg_d *pd = gimple_phi_arg (gs, index);
510 return get_use_from_ptr (&pd->imm_use);
513 /* Return a pointer to the tree operand for argument I of PHI node GS. */
515 static inline tree *
516 gimple_phi_arg_def_ptr (gimple gs, size_t index)
518 return &gimple_phi_arg (gs, index)->def;
521 /* Return the edge associated with argument I of phi node GS. */
523 static inline edge
524 gimple_phi_arg_edge (gimple gs, size_t i)
526 return EDGE_PRED (gimple_bb (gs), i);
529 /* Return the PHI nodes for basic block BB, or NULL if there are no
530 PHI nodes. */
531 static inline gimple_seq
532 phi_nodes (const_basic_block bb)
534 gcc_assert (!(bb->flags & BB_RTL));
535 if (!bb->il.gimple)
536 return NULL;
537 return bb->il.gimple->phi_nodes;
540 /* Set PHI nodes of a basic block BB to SEQ. */
542 static inline void
543 set_phi_nodes (basic_block bb, gimple_seq seq)
545 gimple_stmt_iterator i;
547 gcc_assert (!(bb->flags & BB_RTL));
548 bb->il.gimple->phi_nodes = seq;
549 if (seq)
550 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
551 gimple_set_bb (gsi_stmt (i), bb);
554 /* Return the phi argument which contains the specified use. */
556 static inline int
557 phi_arg_index_from_use (use_operand_p use)
559 struct phi_arg_d *element, *root;
560 size_t index;
561 gimple phi;
563 /* Since the use is the first thing in a PHI argument element, we can
564 calculate its index based on casting it to an argument, and performing
565 pointer arithmetic. */
567 phi = USE_STMT (use);
568 gcc_assert (gimple_code (phi) == GIMPLE_PHI);
570 element = (struct phi_arg_d *)use;
571 root = gimple_phi_arg (phi, 0);
572 index = element - root;
574 #ifdef ENABLE_CHECKING
575 /* Make sure the calculation doesn't have any leftover bytes. If it does,
576 then imm_use is likely not the first element in phi_arg_d. */
577 gcc_assert (
578 (((char *)element - (char *)root) % sizeof (struct phi_arg_d)) == 0);
579 gcc_assert (index < gimple_phi_capacity (phi));
580 #endif
582 return index;
585 /* Mark VAR as used, so that it'll be preserved during rtl expansion. */
587 static inline void
588 set_is_used (tree var)
590 var_ann_t ann = get_var_ann (var);
591 ann->used = 1;
595 /* Return true if T (assumed to be a DECL) is a global variable. */
597 static inline bool
598 is_global_var (const_tree t)
600 if (MTAG_P (t))
601 return MTAG_GLOBAL (t);
602 else
603 return (TREE_STATIC (t) || DECL_EXTERNAL (t));
606 /* PHI nodes should contain only ssa_names and invariants. A test
607 for ssa_name is definitely simpler; don't let invalid contents
608 slip in in the meantime. */
610 static inline bool
611 phi_ssa_name_p (const_tree t)
613 if (TREE_CODE (t) == SSA_NAME)
614 return true;
615 #ifdef ENABLE_CHECKING
616 gcc_assert (is_gimple_min_invariant (t));
617 #endif
618 return false;
622 /* Returns the loop of the statement STMT. */
624 static inline struct loop *
625 loop_containing_stmt (gimple stmt)
627 basic_block bb = gimple_bb (stmt);
628 if (!bb)
629 return NULL;
631 return bb->loop_father;
635 /* Return the memory partition tag associated with symbol SYM. */
637 static inline tree
638 memory_partition (tree sym)
640 tree tag;
642 /* MPTs belong to their own partition. */
643 if (TREE_CODE (sym) == MEMORY_PARTITION_TAG)
644 return sym;
646 gcc_assert (!is_gimple_reg (sym));
647 /* Autoparallelization moves statements from the original function (which has
648 aliases computed) to the new one (which does not). When rebuilding
649 operands for the statement in the new function, we do not want to
650 record the memory partition tags of the original function. */
651 if (!gimple_aliases_computed_p (cfun))
652 return NULL_TREE;
653 tag = get_var_ann (sym)->mpt;
655 #if defined ENABLE_CHECKING
656 if (tag)
657 gcc_assert (TREE_CODE (tag) == MEMORY_PARTITION_TAG);
658 #endif
660 return tag;
663 /* Return true if NAME is a memory factoring SSA name (i.e., an SSA
664 name for a memory partition. */
666 static inline bool
667 factoring_name_p (const_tree name)
669 return TREE_CODE (SSA_NAME_VAR (name)) == MEMORY_PARTITION_TAG;
672 /* Return true if VAR is used by function calls. */
673 static inline bool
674 is_call_used (const_tree var)
676 return (var_ann (var)->call_clobbered
677 || bitmap_bit_p (gimple_call_used_vars (cfun), DECL_UID (var)));
680 /* Return true if VAR is clobbered by function calls. */
681 static inline bool
682 is_call_clobbered (const_tree var)
684 return var_ann (var)->call_clobbered;
687 /* Mark variable VAR as being clobbered by function calls. */
688 static inline void
689 mark_call_clobbered (tree var, unsigned int escape_type)
691 var_ann (var)->escape_mask |= escape_type;
692 var_ann (var)->call_clobbered = true;
693 bitmap_set_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
696 /* Clear the call-clobbered attribute from variable VAR. */
697 static inline void
698 clear_call_clobbered (tree var)
700 var_ann_t ann = var_ann (var);
701 ann->escape_mask = 0;
702 if (MTAG_P (var))
703 MTAG_GLOBAL (var) = 0;
704 var_ann (var)->call_clobbered = false;
705 bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
708 /* Return the common annotation for T. Return NULL if the annotation
709 doesn't already exist. */
710 static inline tree_ann_common_t
711 tree_common_ann (const_tree t)
713 /* Watch out static variables with unshared annotations. */
714 if (DECL_P (t) && TREE_CODE (t) == VAR_DECL)
715 return &var_ann (t)->common;
716 return &t->base.ann->common;
719 /* Return a common annotation for T. Create the constant annotation if it
720 doesn't exist. */
721 static inline tree_ann_common_t
722 get_tree_common_ann (tree t)
724 tree_ann_common_t ann = tree_common_ann (t);
725 return (ann) ? ann : create_tree_common_ann (t);
728 /* ----------------------------------------------------------------------- */
730 /* The following set of routines are used to iterator over various type of
731 SSA operands. */
733 /* Return true if PTR is finished iterating. */
734 static inline bool
735 op_iter_done (const ssa_op_iter *ptr)
737 return ptr->done;
740 /* Get the next iterator use value for PTR. */
741 static inline use_operand_p
742 op_iter_next_use (ssa_op_iter *ptr)
744 use_operand_p use_p;
745 #ifdef ENABLE_CHECKING
746 gcc_assert (ptr->iter_type == ssa_op_iter_use);
747 #endif
748 if (ptr->uses)
750 use_p = USE_OP_PTR (ptr->uses);
751 ptr->uses = ptr->uses->next;
752 return use_p;
754 if (ptr->vuses)
756 use_p = VUSE_OP_PTR (ptr->vuses, ptr->vuse_index);
757 if (++(ptr->vuse_index) >= VUSE_NUM (ptr->vuses))
759 ptr->vuse_index = 0;
760 ptr->vuses = ptr->vuses->next;
762 return use_p;
764 if (ptr->mayuses)
766 use_p = VDEF_OP_PTR (ptr->mayuses, ptr->mayuse_index);
767 if (++(ptr->mayuse_index) >= VDEF_NUM (ptr->mayuses))
769 ptr->mayuse_index = 0;
770 ptr->mayuses = ptr->mayuses->next;
772 return use_p;
774 if (ptr->phi_i < ptr->num_phi)
776 return PHI_ARG_DEF_PTR (ptr->phi_stmt, (ptr->phi_i)++);
778 ptr->done = true;
779 return NULL_USE_OPERAND_P;
782 /* Get the next iterator def value for PTR. */
783 static inline def_operand_p
784 op_iter_next_def (ssa_op_iter *ptr)
786 def_operand_p def_p;
787 #ifdef ENABLE_CHECKING
788 gcc_assert (ptr->iter_type == ssa_op_iter_def);
789 #endif
790 if (ptr->defs)
792 def_p = DEF_OP_PTR (ptr->defs);
793 ptr->defs = ptr->defs->next;
794 return def_p;
796 if (ptr->vdefs)
798 def_p = VDEF_RESULT_PTR (ptr->vdefs);
799 ptr->vdefs = ptr->vdefs->next;
800 return def_p;
802 ptr->done = true;
803 return NULL_DEF_OPERAND_P;
806 /* Get the next iterator tree value for PTR. */
807 static inline tree
808 op_iter_next_tree (ssa_op_iter *ptr)
810 tree val;
811 #ifdef ENABLE_CHECKING
812 gcc_assert (ptr->iter_type == ssa_op_iter_tree);
813 #endif
814 if (ptr->uses)
816 val = USE_OP (ptr->uses);
817 ptr->uses = ptr->uses->next;
818 return val;
820 if (ptr->vuses)
822 val = VUSE_OP (ptr->vuses, ptr->vuse_index);
823 if (++(ptr->vuse_index) >= VUSE_NUM (ptr->vuses))
825 ptr->vuse_index = 0;
826 ptr->vuses = ptr->vuses->next;
828 return val;
830 if (ptr->mayuses)
832 val = VDEF_OP (ptr->mayuses, ptr->mayuse_index);
833 if (++(ptr->mayuse_index) >= VDEF_NUM (ptr->mayuses))
835 ptr->mayuse_index = 0;
836 ptr->mayuses = ptr->mayuses->next;
838 return val;
840 if (ptr->defs)
842 val = DEF_OP (ptr->defs);
843 ptr->defs = ptr->defs->next;
844 return val;
846 if (ptr->vdefs)
848 val = VDEF_RESULT (ptr->vdefs);
849 ptr->vdefs = ptr->vdefs->next;
850 return val;
853 ptr->done = true;
854 return NULL_TREE;
859 /* This functions clears the iterator PTR, and marks it done. This is normally
860 used to prevent warnings in the compile about might be uninitialized
861 components. */
863 static inline void
864 clear_and_done_ssa_iter (ssa_op_iter *ptr)
866 ptr->defs = NULL;
867 ptr->uses = NULL;
868 ptr->vuses = NULL;
869 ptr->vdefs = NULL;
870 ptr->mayuses = NULL;
871 ptr->iter_type = ssa_op_iter_none;
872 ptr->phi_i = 0;
873 ptr->num_phi = 0;
874 ptr->phi_stmt = NULL;
875 ptr->done = true;
876 ptr->vuse_index = 0;
877 ptr->mayuse_index = 0;
880 /* Initialize the iterator PTR to the virtual defs in STMT. */
881 static inline void
882 op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
884 ptr->defs = (flags & SSA_OP_DEF) ? gimple_def_ops (stmt) : NULL;
885 ptr->uses = (flags & SSA_OP_USE) ? gimple_use_ops (stmt) : NULL;
886 ptr->vuses = (flags & SSA_OP_VUSE) ? gimple_vuse_ops (stmt) : NULL;
887 ptr->vdefs = (flags & SSA_OP_VDEF) ? gimple_vdef_ops (stmt) : NULL;
888 ptr->mayuses = (flags & SSA_OP_VMAYUSE) ? gimple_vdef_ops (stmt) : NULL;
889 ptr->done = false;
891 ptr->phi_i = 0;
892 ptr->num_phi = 0;
893 ptr->phi_stmt = NULL;
894 ptr->vuse_index = 0;
895 ptr->mayuse_index = 0;
898 /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
899 the first use. */
900 static inline use_operand_p
901 op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
903 gcc_assert ((flags & SSA_OP_ALL_DEFS) == 0);
904 op_iter_init (ptr, stmt, flags);
905 ptr->iter_type = ssa_op_iter_use;
906 return op_iter_next_use (ptr);
909 /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
910 the first def. */
911 static inline def_operand_p
912 op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
914 gcc_assert ((flags & SSA_OP_ALL_USES) == 0);
915 op_iter_init (ptr, stmt, flags);
916 ptr->iter_type = ssa_op_iter_def;
917 return op_iter_next_def (ptr);
920 /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
921 the first operand as a tree. */
922 static inline tree
923 op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
925 op_iter_init (ptr, stmt, flags);
926 ptr->iter_type = ssa_op_iter_tree;
927 return op_iter_next_tree (ptr);
930 /* Get the next iterator mustdef value for PTR, returning the mustdef values in
931 KILL and DEF. */
932 static inline void
933 op_iter_next_vdef (vuse_vec_p *use, def_operand_p *def,
934 ssa_op_iter *ptr)
936 #ifdef ENABLE_CHECKING
937 gcc_assert (ptr->iter_type == ssa_op_iter_vdef);
938 #endif
939 if (ptr->mayuses)
941 *def = VDEF_RESULT_PTR (ptr->mayuses);
942 *use = VDEF_VECT (ptr->mayuses);
943 ptr->mayuses = ptr->mayuses->next;
944 return;
947 *def = NULL_DEF_OPERAND_P;
948 *use = NULL;
949 ptr->done = true;
950 return;
954 static inline void
955 op_iter_next_mustdef (use_operand_p *use, def_operand_p *def,
956 ssa_op_iter *ptr)
958 vuse_vec_p vp;
959 op_iter_next_vdef (&vp, def, ptr);
960 if (vp != NULL)
962 gcc_assert (VUSE_VECT_NUM_ELEM (*vp) == 1);
963 *use = VUSE_ELEMENT_PTR (*vp, 0);
965 else
966 *use = NULL_USE_OPERAND_P;
969 /* Initialize iterator PTR to the operands in STMT. Return the first operands
970 in USE and DEF. */
971 static inline void
972 op_iter_init_vdef (ssa_op_iter *ptr, gimple stmt, vuse_vec_p *use,
973 def_operand_p *def)
975 gcc_assert (gimple_code (stmt) != GIMPLE_PHI);
977 op_iter_init (ptr, stmt, SSA_OP_VMAYUSE);
978 ptr->iter_type = ssa_op_iter_vdef;
979 op_iter_next_vdef (use, def, ptr);
983 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
984 return NULL. */
985 static inline tree
986 single_ssa_tree_operand (gimple stmt, int flags)
988 tree var;
989 ssa_op_iter iter;
991 var = op_iter_init_tree (&iter, stmt, flags);
992 if (op_iter_done (&iter))
993 return NULL_TREE;
994 op_iter_next_tree (&iter);
995 if (op_iter_done (&iter))
996 return var;
997 return NULL_TREE;
1001 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
1002 return NULL. */
1003 static inline use_operand_p
1004 single_ssa_use_operand (gimple stmt, int flags)
1006 use_operand_p var;
1007 ssa_op_iter iter;
1009 var = op_iter_init_use (&iter, stmt, flags);
1010 if (op_iter_done (&iter))
1011 return NULL_USE_OPERAND_P;
1012 op_iter_next_use (&iter);
1013 if (op_iter_done (&iter))
1014 return var;
1015 return NULL_USE_OPERAND_P;
1020 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
1021 return NULL. */
1022 static inline def_operand_p
1023 single_ssa_def_operand (gimple stmt, int flags)
1025 def_operand_p var;
1026 ssa_op_iter iter;
1028 var = op_iter_init_def (&iter, stmt, flags);
1029 if (op_iter_done (&iter))
1030 return NULL_DEF_OPERAND_P;
1031 op_iter_next_def (&iter);
1032 if (op_iter_done (&iter))
1033 return var;
1034 return NULL_DEF_OPERAND_P;
1038 /* Return true if there are zero operands in STMT matching the type
1039 given in FLAGS. */
1040 static inline bool
1041 zero_ssa_operands (gimple stmt, int flags)
1043 ssa_op_iter iter;
1045 op_iter_init_tree (&iter, stmt, flags);
1046 return op_iter_done (&iter);
1050 /* Return the number of operands matching FLAGS in STMT. */
1051 static inline int
1052 num_ssa_operands (gimple stmt, int flags)
1054 ssa_op_iter iter;
1055 tree t;
1056 int num = 0;
1058 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, flags)
1059 num++;
1060 return num;
1064 /* Delink all immediate_use information for STMT. */
1065 static inline void
1066 delink_stmt_imm_use (gimple stmt)
1068 ssa_op_iter iter;
1069 use_operand_p use_p;
1071 if (ssa_operands_active ())
1072 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
1073 delink_imm_use (use_p);
1077 /* This routine will compare all the operands matching FLAGS in STMT1 to those
1078 in STMT2. TRUE is returned if they are the same. STMTs can be NULL. */
1079 static inline bool
1080 compare_ssa_operands_equal (gimple stmt1, gimple stmt2, int flags)
1082 ssa_op_iter iter1, iter2;
1083 tree op1 = NULL_TREE;
1084 tree op2 = NULL_TREE;
1085 bool look1, look2;
1087 if (stmt1 == stmt2)
1088 return true;
1090 look1 = stmt1 != NULL;
1091 look2 = stmt2 != NULL;
1093 if (look1)
1095 op1 = op_iter_init_tree (&iter1, stmt1, flags);
1096 if (!look2)
1097 return op_iter_done (&iter1);
1099 else
1100 clear_and_done_ssa_iter (&iter1);
1102 if (look2)
1104 op2 = op_iter_init_tree (&iter2, stmt2, flags);
1105 if (!look1)
1106 return op_iter_done (&iter2);
1108 else
1109 clear_and_done_ssa_iter (&iter2);
1111 while (!op_iter_done (&iter1) && !op_iter_done (&iter2))
1113 if (op1 != op2)
1114 return false;
1115 op1 = op_iter_next_tree (&iter1);
1116 op2 = op_iter_next_tree (&iter2);
1119 return (op_iter_done (&iter1) && op_iter_done (&iter2));
1123 /* If there is a single DEF in the PHI node which matches FLAG, return it.
1124 Otherwise return NULL_DEF_OPERAND_P. */
1125 static inline tree
1126 single_phi_def (gimple stmt, int flags)
1128 tree def = PHI_RESULT (stmt);
1129 if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
1130 return def;
1131 if ((flags & SSA_OP_VIRTUAL_DEFS) && !is_gimple_reg (def))
1132 return def;
1133 return NULL_TREE;
1136 /* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
1137 be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
1138 static inline use_operand_p
1139 op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags)
1141 tree phi_def = gimple_phi_result (phi);
1142 int comp;
1144 clear_and_done_ssa_iter (ptr);
1145 ptr->done = false;
1147 gcc_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0);
1149 comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
1151 /* If the PHI node doesn't the operand type we care about, we're done. */
1152 if ((flags & comp) == 0)
1154 ptr->done = true;
1155 return NULL_USE_OPERAND_P;
1158 ptr->phi_stmt = phi;
1159 ptr->num_phi = gimple_phi_num_args (phi);
1160 ptr->iter_type = ssa_op_iter_use;
1161 return op_iter_next_use (ptr);
1165 /* Start an iterator for a PHI definition. */
1167 static inline def_operand_p
1168 op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags)
1170 tree phi_def = PHI_RESULT (phi);
1171 int comp;
1173 clear_and_done_ssa_iter (ptr);
1174 ptr->done = false;
1176 gcc_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0);
1178 comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS);
1180 /* If the PHI node doesn't the operand type we care about, we're done. */
1181 if ((flags & comp) == 0)
1183 ptr->done = true;
1184 return NULL_USE_OPERAND_P;
1187 ptr->iter_type = ssa_op_iter_def;
1188 /* The first call to op_iter_next_def will terminate the iterator since
1189 all the fields are NULL. Simply return the result here as the first and
1190 therefore only result. */
1191 return PHI_RESULT_PTR (phi);
1194 /* Return true is IMM has reached the end of the immediate use stmt list. */
1196 static inline bool
1197 end_imm_use_stmt_p (const imm_use_iterator *imm)
1199 return (imm->imm_use == imm->end_p);
1202 /* Finished the traverse of an immediate use stmt list IMM by removing the
1203 placeholder node from the list. */
1205 static inline void
1206 end_imm_use_stmt_traverse (imm_use_iterator *imm)
1208 delink_imm_use (&(imm->iter_node));
1211 /* Immediate use traversal of uses within a stmt require that all the
1212 uses on a stmt be sequentially listed. This routine is used to build up
1213 this sequential list by adding USE_P to the end of the current list
1214 currently delimited by HEAD and LAST_P. The new LAST_P value is
1215 returned. */
1217 static inline use_operand_p
1218 move_use_after_head (use_operand_p use_p, use_operand_p head,
1219 use_operand_p last_p)
1221 gcc_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head));
1222 /* Skip head when we find it. */
1223 if (use_p != head)
1225 /* If use_p is already linked in after last_p, continue. */
1226 if (last_p->next == use_p)
1227 last_p = use_p;
1228 else
1230 /* Delink from current location, and link in at last_p. */
1231 delink_imm_use (use_p);
1232 link_imm_use_to_list (use_p, last_p);
1233 last_p = use_p;
1236 return last_p;
1240 /* This routine will relink all uses with the same stmt as HEAD into the list
1241 immediately following HEAD for iterator IMM. */
1243 static inline void
1244 link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
1246 use_operand_p use_p;
1247 use_operand_p last_p = head;
1248 gimple head_stmt = USE_STMT (head);
1249 tree use = USE_FROM_PTR (head);
1250 ssa_op_iter op_iter;
1251 int flag;
1253 /* Only look at virtual or real uses, depending on the type of HEAD. */
1254 flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
1256 if (gimple_code (head_stmt) == GIMPLE_PHI)
1258 FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
1259 if (USE_FROM_PTR (use_p) == use)
1260 last_p = move_use_after_head (use_p, head, last_p);
1262 else
1264 FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
1265 if (USE_FROM_PTR (use_p) == use)
1266 last_p = move_use_after_head (use_p, head, last_p);
1268 /* Link iter node in after last_p. */
1269 if (imm->iter_node.prev != NULL)
1270 delink_imm_use (&imm->iter_node);
1271 link_imm_use_to_list (&(imm->iter_node), last_p);
1274 /* Initialize IMM to traverse over uses of VAR. Return the first statement. */
1275 static inline gimple
1276 first_imm_use_stmt (imm_use_iterator *imm, tree var)
1278 gcc_assert (TREE_CODE (var) == SSA_NAME);
1280 imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
1281 imm->imm_use = imm->end_p->next;
1282 imm->next_imm_name = NULL_USE_OPERAND_P;
1284 /* iter_node is used as a marker within the immediate use list to indicate
1285 where the end of the current stmt's uses are. Initialize it to NULL
1286 stmt and use, which indicates a marker node. */
1287 imm->iter_node.prev = NULL_USE_OPERAND_P;
1288 imm->iter_node.next = NULL_USE_OPERAND_P;
1289 imm->iter_node.loc.stmt = NULL;
1290 imm->iter_node.use = NULL_USE_OPERAND_P;
1292 if (end_imm_use_stmt_p (imm))
1293 return NULL;
1295 link_use_stmts_after (imm->imm_use, imm);
1297 return USE_STMT (imm->imm_use);
1300 /* Bump IMM to the next stmt which has a use of var. */
1302 static inline gimple
1303 next_imm_use_stmt (imm_use_iterator *imm)
1305 imm->imm_use = imm->iter_node.next;
1306 if (end_imm_use_stmt_p (imm))
1308 if (imm->iter_node.prev != NULL)
1309 delink_imm_use (&imm->iter_node);
1310 return NULL;
1313 link_use_stmts_after (imm->imm_use, imm);
1314 return USE_STMT (imm->imm_use);
1317 /* This routine will return the first use on the stmt IMM currently refers
1318 to. */
1320 static inline use_operand_p
1321 first_imm_use_on_stmt (imm_use_iterator *imm)
1323 imm->next_imm_name = imm->imm_use->next;
1324 return imm->imm_use;
1327 /* Return TRUE if the last use on the stmt IMM refers to has been visited. */
1329 static inline bool
1330 end_imm_use_on_stmt_p (const imm_use_iterator *imm)
1332 return (imm->imm_use == &(imm->iter_node));
1335 /* Bump to the next use on the stmt IMM refers to, return NULL if done. */
1337 static inline use_operand_p
1338 next_imm_use_on_stmt (imm_use_iterator *imm)
1340 imm->imm_use = imm->next_imm_name;
1341 if (end_imm_use_on_stmt_p (imm))
1342 return NULL_USE_OPERAND_P;
1343 else
1345 imm->next_imm_name = imm->imm_use->next;
1346 return imm->imm_use;
1350 /* Return true if VAR cannot be modified by the program. */
1352 static inline bool
1353 unmodifiable_var_p (const_tree var)
1355 if (TREE_CODE (var) == SSA_NAME)
1356 var = SSA_NAME_VAR (var);
1358 if (MTAG_P (var))
1359 return false;
1361 return TREE_READONLY (var) && (TREE_STATIC (var) || DECL_EXTERNAL (var));
1364 /* Return true if REF, an ARRAY_REF, has an INDIRECT_REF somewhere in it. */
1366 static inline bool
1367 array_ref_contains_indirect_ref (const_tree ref)
1369 gcc_assert (TREE_CODE (ref) == ARRAY_REF);
1371 do {
1372 ref = TREE_OPERAND (ref, 0);
1373 } while (handled_component_p (ref));
1375 return TREE_CODE (ref) == INDIRECT_REF;
1378 /* Return true if REF, a handled component reference, has an ARRAY_REF
1379 somewhere in it. */
1381 static inline bool
1382 ref_contains_array_ref (const_tree ref)
1384 gcc_assert (handled_component_p (ref));
1386 do {
1387 if (TREE_CODE (ref) == ARRAY_REF)
1388 return true;
1389 ref = TREE_OPERAND (ref, 0);
1390 } while (handled_component_p (ref));
1392 return false;
1395 /* Return true, if the two ranges [POS1, SIZE1] and [POS2, SIZE2]
1396 overlap. SIZE1 and/or SIZE2 can be (unsigned)-1 in which case the
1397 range is open-ended. Otherwise return false. */
1399 static inline bool
1400 ranges_overlap_p (unsigned HOST_WIDE_INT pos1,
1401 unsigned HOST_WIDE_INT size1,
1402 unsigned HOST_WIDE_INT pos2,
1403 unsigned HOST_WIDE_INT size2)
1405 if (pos1 >= pos2
1406 && (size2 == (unsigned HOST_WIDE_INT)-1
1407 || pos1 < (pos2 + size2)))
1408 return true;
1409 if (pos2 >= pos1
1410 && (size1 == (unsigned HOST_WIDE_INT)-1
1411 || pos2 < (pos1 + size1)))
1412 return true;
1414 return false;
1417 /* Return the memory tag associated with symbol SYM. */
1419 static inline tree
1420 symbol_mem_tag (tree sym)
1422 tree tag = get_var_ann (sym)->symbol_mem_tag;
1424 #if defined ENABLE_CHECKING
1425 if (tag)
1426 gcc_assert (TREE_CODE (tag) == SYMBOL_MEMORY_TAG);
1427 #endif
1429 return tag;
1433 /* Set the memory tag associated with symbol SYM. */
1435 static inline void
1436 set_symbol_mem_tag (tree sym, tree tag)
1438 #if defined ENABLE_CHECKING
1439 if (tag)
1440 gcc_assert (TREE_CODE (tag) == SYMBOL_MEMORY_TAG);
1441 #endif
1443 get_var_ann (sym)->symbol_mem_tag = tag;
1446 /* Accessor to tree-ssa-operands.c caches. */
1447 static inline struct ssa_operands *
1448 gimple_ssa_operands (const struct function *fun)
1450 return &fun->gimple_df->ssa_operands;
1453 /* Map describing reference statistics for function FN. */
1454 static inline struct mem_ref_stats_d *
1455 gimple_mem_ref_stats (const struct function *fn)
1457 return &fn->gimple_df->mem_ref_stats;
1460 /* Given an edge_var_map V, return the PHI arg definition. */
1462 static inline tree
1463 redirect_edge_var_map_def (edge_var_map *v)
1465 return v->def;
1468 /* Given an edge_var_map V, return the PHI result. */
1470 static inline tree
1471 redirect_edge_var_map_result (edge_var_map *v)
1473 return v->result;
1477 /* Return an SSA_NAME node for variable VAR defined in statement STMT
1478 in function cfun. */
1480 static inline tree
1481 make_ssa_name (tree var, gimple stmt)
1483 return make_ssa_name_fn (cfun, var, stmt);
1486 #endif /* _TREE_FLOW_INLINE_H */