1 /* Inline functions for tree-flow.h
2 Copyright (C) 2001, 2003, 2005, 2006, 2007, 2008, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #ifndef _TREE_FLOW_INLINE_H
23 #define _TREE_FLOW_INLINE_H 1
25 /* Inline functions for manipulating various data structures defined in
26 tree-flow.h. See tree-flow.h for documentation. */
28 /* Return true when gimple SSA form was built.
29 gimple_in_ssa_p is queried by gimplifier in various early stages before SSA
30 infrastructure is initialized. Check for presence of the datastructures
33 gimple_in_ssa_p (const struct function
*fun
)
35 return fun
&& fun
->gimple_df
&& fun
->gimple_df
->in_ssa_p
;
38 /* Array of all variables referenced in the function. */
40 gimple_referenced_vars (const struct function
*fun
)
44 return fun
->gimple_df
->referenced_vars
;
47 /* Artificial variable used for the virtual operand FUD chain. */
49 gimple_vop (const struct function
*fun
)
51 gcc_checking_assert (fun
&& fun
->gimple_df
);
52 return fun
->gimple_df
->vop
;
55 /* Initialize the hashtable iterator HTI to point to hashtable TABLE */
58 first_htab_element (htab_iterator
*hti
, htab_t table
)
61 hti
->slot
= table
->entries
;
62 hti
->limit
= hti
->slot
+ htab_size (table
);
66 if (x
!= HTAB_EMPTY_ENTRY
&& x
!= HTAB_DELETED_ENTRY
)
68 } while (++(hti
->slot
) < hti
->limit
);
70 if (hti
->slot
< hti
->limit
)
75 /* Return current non-empty/deleted slot of the hashtable pointed to by HTI,
76 or NULL if we have reached the end. */
79 end_htab_p (const htab_iterator
*hti
)
81 if (hti
->slot
>= hti
->limit
)
86 /* Advance the hashtable iterator pointed to by HTI to the next element of the
90 next_htab_element (htab_iterator
*hti
)
92 while (++(hti
->slot
) < hti
->limit
)
95 if (x
!= HTAB_EMPTY_ENTRY
&& x
!= HTAB_DELETED_ENTRY
)
101 /* Initialize ITER to point to the first referenced variable in the
102 referenced_vars hashtable, and return that variable. */
105 first_referenced_var (referenced_var_iterator
*iter
)
107 return (tree
) first_htab_element (&iter
->hti
,
108 gimple_referenced_vars (cfun
));
111 /* Return true if we have hit the end of the referenced variables ITER is
112 iterating through. */
115 end_referenced_vars_p (const referenced_var_iterator
*iter
)
117 return end_htab_p (&iter
->hti
);
120 /* Make ITER point to the next referenced_var in the referenced_var hashtable,
121 and return that variable. */
124 next_referenced_var (referenced_var_iterator
*iter
)
126 return (tree
) next_htab_element (&iter
->hti
);
129 /* Return the variable annotation for T, which must be a _DECL node.
130 Return NULL if the variable annotation doesn't already exist. */
131 static inline var_ann_t
132 var_ann (const_tree t
)
134 const var_ann_t
*p
= DECL_VAR_ANN_PTR (t
);
135 return p
? *p
: NULL
;
138 /* Return the variable annotation for T, which must be a _DECL node.
139 Create the variable annotation if it doesn't exist. */
140 static inline var_ann_t
141 get_var_ann (tree var
)
143 var_ann_t
*p
= DECL_VAR_ANN_PTR (var
);
144 gcc_checking_assert (p
);
145 return *p
? *p
: create_var_ann (var
);
148 /* Get the number of the next statement uid to be allocated. */
149 static inline unsigned int
150 gimple_stmt_max_uid (struct function
*fn
)
152 return fn
->last_stmt_uid
;
155 /* Set the number of the next statement uid to be allocated. */
157 set_gimple_stmt_max_uid (struct function
*fn
, unsigned int maxid
)
159 fn
->last_stmt_uid
= maxid
;
162 /* Set the number of the next statement uid to be allocated. */
163 static inline unsigned int
164 inc_gimple_stmt_max_uid (struct function
*fn
)
166 return fn
->last_stmt_uid
++;
169 /* Return the line number for EXPR, or return -1 if we have no line
170 number information for it. */
172 get_lineno (const_gimple stmt
)
179 loc
= gimple_location (stmt
);
180 if (loc
== UNKNOWN_LOCATION
)
183 return LOCATION_LINE (loc
);
186 /* Delink an immediate_uses node from its chain. */
188 delink_imm_use (ssa_use_operand_t
*linknode
)
190 /* Return if this node is not in a list. */
191 if (linknode
->prev
== NULL
)
194 linknode
->prev
->next
= linknode
->next
;
195 linknode
->next
->prev
= linknode
->prev
;
196 linknode
->prev
= NULL
;
197 linknode
->next
= NULL
;
200 /* Link ssa_imm_use node LINKNODE into the chain for LIST. */
202 link_imm_use_to_list (ssa_use_operand_t
*linknode
, ssa_use_operand_t
*list
)
204 /* Link the new node at the head of the list. If we are in the process of
205 traversing the list, we won't visit any new nodes added to it. */
206 linknode
->prev
= list
;
207 linknode
->next
= list
->next
;
208 list
->next
->prev
= linknode
;
209 list
->next
= linknode
;
212 /* Link ssa_imm_use node LINKNODE into the chain for DEF. */
214 link_imm_use (ssa_use_operand_t
*linknode
, tree def
)
216 ssa_use_operand_t
*root
;
218 if (!def
|| TREE_CODE (def
) != SSA_NAME
)
219 linknode
->prev
= NULL
;
222 root
= &(SSA_NAME_IMM_USE_NODE (def
));
223 #ifdef ENABLE_CHECKING
225 gcc_checking_assert (*(linknode
->use
) == def
);
227 link_imm_use_to_list (linknode
, root
);
231 /* Set the value of a use pointed to by USE to VAL. */
233 set_ssa_use_from_ptr (use_operand_p use
, tree val
)
235 delink_imm_use (use
);
237 link_imm_use (use
, val
);
240 /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
243 link_imm_use_stmt (ssa_use_operand_t
*linknode
, tree def
, gimple stmt
)
246 link_imm_use (linknode
, def
);
248 link_imm_use (linknode
, NULL
);
249 linknode
->loc
.stmt
= stmt
;
252 /* Relink a new node in place of an old node in the list. */
254 relink_imm_use (ssa_use_operand_t
*node
, ssa_use_operand_t
*old
)
256 /* The node one had better be in the same list. */
257 gcc_checking_assert (*(old
->use
) == *(node
->use
));
258 node
->prev
= old
->prev
;
259 node
->next
= old
->next
;
262 old
->prev
->next
= node
;
263 old
->next
->prev
= node
;
264 /* Remove the old node from the list. */
269 /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
272 relink_imm_use_stmt (ssa_use_operand_t
*linknode
, ssa_use_operand_t
*old
,
276 relink_imm_use (linknode
, old
);
278 link_imm_use (linknode
, NULL
);
279 linknode
->loc
.stmt
= stmt
;
283 /* Return true is IMM has reached the end of the immediate use list. */
285 end_readonly_imm_use_p (const imm_use_iterator
*imm
)
287 return (imm
->imm_use
== imm
->end_p
);
290 /* Initialize iterator IMM to process the list for VAR. */
291 static inline use_operand_p
292 first_readonly_imm_use (imm_use_iterator
*imm
, tree var
)
294 imm
->end_p
= &(SSA_NAME_IMM_USE_NODE (var
));
295 imm
->imm_use
= imm
->end_p
->next
;
296 #ifdef ENABLE_CHECKING
297 imm
->iter_node
.next
= imm
->imm_use
->next
;
299 if (end_readonly_imm_use_p (imm
))
300 return NULL_USE_OPERAND_P
;
304 /* Bump IMM to the next use in the list. */
305 static inline use_operand_p
306 next_readonly_imm_use (imm_use_iterator
*imm
)
308 use_operand_p old
= imm
->imm_use
;
310 #ifdef ENABLE_CHECKING
311 /* If this assertion fails, it indicates the 'next' pointer has changed
312 since the last bump. This indicates that the list is being modified
313 via stmt changes, or SET_USE, or somesuch thing, and you need to be
314 using the SAFE version of the iterator. */
315 gcc_assert (imm
->iter_node
.next
== old
->next
);
316 imm
->iter_node
.next
= old
->next
->next
;
319 imm
->imm_use
= old
->next
;
320 if (end_readonly_imm_use_p (imm
))
321 return NULL_USE_OPERAND_P
;
326 extern bool has_zero_uses_1 (const ssa_use_operand_t
*head
);
327 extern bool single_imm_use_1 (const ssa_use_operand_t
*head
,
328 use_operand_p
*use_p
, gimple
*stmt
);
330 /* Return true if VAR has no nondebug uses. */
332 has_zero_uses (const_tree var
)
334 const ssa_use_operand_t
*const ptr
= &(SSA_NAME_IMM_USE_NODE (var
));
336 /* A single use_operand means there is no items in the list. */
337 if (ptr
== ptr
->next
)
340 /* If there are debug stmts, we have to look at each use and see
341 whether there are any nondebug uses. */
342 if (!MAY_HAVE_DEBUG_STMTS
)
345 return has_zero_uses_1 (ptr
);
348 /* Return true if VAR has a single nondebug use. */
350 has_single_use (const_tree var
)
352 const ssa_use_operand_t
*const ptr
= &(SSA_NAME_IMM_USE_NODE (var
));
354 /* If there aren't any uses whatsoever, we're done. */
355 if (ptr
== ptr
->next
)
358 /* If there's a single use, check that it's not a debug stmt. */
359 if (ptr
== ptr
->next
->next
)
360 return !is_gimple_debug (USE_STMT (ptr
->next
));
362 /* If there are debug stmts, we have to look at each of them. */
363 if (!MAY_HAVE_DEBUG_STMTS
)
366 return single_imm_use_1 (ptr
, NULL
, NULL
);
370 /* If VAR has only a single immediate nondebug use, return true, and
371 set USE_P and STMT to the use pointer and stmt of occurrence. */
373 single_imm_use (const_tree var
, use_operand_p
*use_p
, gimple
*stmt
)
375 const ssa_use_operand_t
*const ptr
= &(SSA_NAME_IMM_USE_NODE (var
));
377 /* If there aren't any uses whatsoever, we're done. */
378 if (ptr
== ptr
->next
)
381 *use_p
= NULL_USE_OPERAND_P
;
386 /* If there's a single use, check that it's not a debug stmt. */
387 if (ptr
== ptr
->next
->next
)
389 if (!is_gimple_debug (USE_STMT (ptr
->next
)))
392 *stmt
= ptr
->next
->loc
.stmt
;
399 /* If there are debug stmts, we have to look at each of them. */
400 if (!MAY_HAVE_DEBUG_STMTS
)
403 return single_imm_use_1 (ptr
, use_p
, stmt
);
406 /* Return the number of nondebug immediate uses of VAR. */
407 static inline unsigned int
408 num_imm_uses (const_tree var
)
410 const ssa_use_operand_t
*const start
= &(SSA_NAME_IMM_USE_NODE (var
));
411 const ssa_use_operand_t
*ptr
;
412 unsigned int num
= 0;
414 if (!MAY_HAVE_DEBUG_STMTS
)
415 for (ptr
= start
->next
; ptr
!= start
; ptr
= ptr
->next
)
418 for (ptr
= start
->next
; ptr
!= start
; ptr
= ptr
->next
)
419 if (!is_gimple_debug (USE_STMT (ptr
)))
425 /* Return the tree pointed-to by USE. */
427 get_use_from_ptr (use_operand_p use
)
432 /* Return the tree pointed-to by DEF. */
434 get_def_from_ptr (def_operand_p def
)
439 /* Return a use_operand_p pointer for argument I of PHI node GS. */
441 static inline use_operand_p
442 gimple_phi_arg_imm_use_ptr (gimple gs
, int i
)
444 return &gimple_phi_arg (gs
, i
)->imm_use
;
447 /* Return the tree operand for argument I of PHI node GS. */
450 gimple_phi_arg_def (gimple gs
, size_t index
)
452 struct phi_arg_d
*pd
= gimple_phi_arg (gs
, index
);
453 return get_use_from_ptr (&pd
->imm_use
);
456 /* Return a pointer to the tree operand for argument I of PHI node GS. */
459 gimple_phi_arg_def_ptr (gimple gs
, size_t index
)
461 return &gimple_phi_arg (gs
, index
)->def
;
464 /* Return the edge associated with argument I of phi node GS. */
467 gimple_phi_arg_edge (gimple gs
, size_t i
)
469 return EDGE_PRED (gimple_bb (gs
), i
);
472 /* Return the source location of gimple argument I of phi node GS. */
474 static inline source_location
475 gimple_phi_arg_location (gimple gs
, size_t i
)
477 return gimple_phi_arg (gs
, i
)->locus
;
480 /* Return the source location of the argument on edge E of phi node GS. */
482 static inline source_location
483 gimple_phi_arg_location_from_edge (gimple gs
, edge e
)
485 return gimple_phi_arg (gs
, e
->dest_idx
)->locus
;
488 /* Set the source location of gimple argument I of phi node GS to LOC. */
491 gimple_phi_arg_set_location (gimple gs
, size_t i
, source_location loc
)
493 gimple_phi_arg (gs
, i
)->locus
= loc
;
496 /* Return TRUE if argument I of phi node GS has a location record. */
499 gimple_phi_arg_has_location (gimple gs
, size_t i
)
501 return gimple_phi_arg_location (gs
, i
) != UNKNOWN_LOCATION
;
505 /* Return the PHI nodes for basic block BB, or NULL if there are no
507 static inline gimple_seq
508 phi_nodes (const_basic_block bb
)
510 gcc_checking_assert (!(bb
->flags
& BB_RTL
));
513 return bb
->il
.gimple
->phi_nodes
;
516 /* Set PHI nodes of a basic block BB to SEQ. */
519 set_phi_nodes (basic_block bb
, gimple_seq seq
)
521 gimple_stmt_iterator i
;
523 gcc_checking_assert (!(bb
->flags
& BB_RTL
));
524 bb
->il
.gimple
->phi_nodes
= seq
;
526 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
527 gimple_set_bb (gsi_stmt (i
), bb
);
530 /* Return the phi argument which contains the specified use. */
533 phi_arg_index_from_use (use_operand_p use
)
535 struct phi_arg_d
*element
, *root
;
539 /* Since the use is the first thing in a PHI argument element, we can
540 calculate its index based on casting it to an argument, and performing
541 pointer arithmetic. */
543 phi
= USE_STMT (use
);
545 element
= (struct phi_arg_d
*)use
;
546 root
= gimple_phi_arg (phi
, 0);
547 index
= element
- root
;
549 #ifdef ENABLE_CHECKING
550 /* Make sure the calculation doesn't have any leftover bytes. If it does,
551 then imm_use is likely not the first element in phi_arg_d. */
552 gcc_assert ((((char *)element
- (char *)root
)
553 % sizeof (struct phi_arg_d
)) == 0
554 && index
< gimple_phi_capacity (phi
));
560 /* Mark VAR as used, so that it'll be preserved during rtl expansion. */
563 set_is_used (tree var
)
565 var_ann_t ann
= get_var_ann (var
);
570 /* Return true if T (assumed to be a DECL) is a global variable.
571 A variable is considered global if its storage is not automatic. */
574 is_global_var (const_tree t
)
576 return (TREE_STATIC (t
) || DECL_EXTERNAL (t
));
580 /* Return true if VAR may be aliased. A variable is considered as
581 maybe aliased if it has its address taken by the local TU
582 or possibly by another TU and might be modified through a pointer. */
585 may_be_aliased (const_tree var
)
587 return (TREE_CODE (var
) != CONST_DECL
588 && !((TREE_STATIC (var
) || TREE_PUBLIC (var
) || DECL_EXTERNAL (var
))
589 && TREE_READONLY (var
)
590 && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (var
)))
591 && (TREE_PUBLIC (var
)
592 || DECL_EXTERNAL (var
)
593 || TREE_ADDRESSABLE (var
)));
597 /* PHI nodes should contain only ssa_names and invariants. A test
598 for ssa_name is definitely simpler; don't let invalid contents
599 slip in in the meantime. */
602 phi_ssa_name_p (const_tree t
)
604 if (TREE_CODE (t
) == SSA_NAME
)
606 #ifdef ENABLE_CHECKING
607 gcc_assert (is_gimple_min_invariant (t
));
613 /* Returns the loop of the statement STMT. */
615 static inline struct loop
*
616 loop_containing_stmt (gimple stmt
)
618 basic_block bb
= gimple_bb (stmt
);
622 return bb
->loop_father
;
626 /* ----------------------------------------------------------------------- */
628 /* The following set of routines are used to iterator over various type of
631 /* Return true if PTR is finished iterating. */
633 op_iter_done (const ssa_op_iter
*ptr
)
638 /* Get the next iterator use value for PTR. */
639 static inline use_operand_p
640 op_iter_next_use (ssa_op_iter
*ptr
)
643 gcc_checking_assert (ptr
->iter_type
== ssa_op_iter_use
);
646 use_p
= USE_OP_PTR (ptr
->uses
);
647 ptr
->uses
= ptr
->uses
->next
;
650 if (ptr
->phi_i
< ptr
->num_phi
)
652 return PHI_ARG_DEF_PTR (ptr
->phi_stmt
, (ptr
->phi_i
)++);
655 return NULL_USE_OPERAND_P
;
658 /* Get the next iterator def value for PTR. */
659 static inline def_operand_p
660 op_iter_next_def (ssa_op_iter
*ptr
)
663 gcc_checking_assert (ptr
->iter_type
== ssa_op_iter_def
);
666 def_p
= DEF_OP_PTR (ptr
->defs
);
667 ptr
->defs
= ptr
->defs
->next
;
671 return NULL_DEF_OPERAND_P
;
674 /* Get the next iterator tree value for PTR. */
676 op_iter_next_tree (ssa_op_iter
*ptr
)
679 gcc_checking_assert (ptr
->iter_type
== ssa_op_iter_tree
);
682 val
= USE_OP (ptr
->uses
);
683 ptr
->uses
= ptr
->uses
->next
;
688 val
= DEF_OP (ptr
->defs
);
689 ptr
->defs
= ptr
->defs
->next
;
699 /* This functions clears the iterator PTR, and marks it done. This is normally
700 used to prevent warnings in the compile about might be uninitialized
704 clear_and_done_ssa_iter (ssa_op_iter
*ptr
)
708 ptr
->iter_type
= ssa_op_iter_none
;
711 ptr
->phi_stmt
= NULL
;
715 /* Initialize the iterator PTR to the virtual defs in STMT. */
717 op_iter_init (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
719 /* We do not support iterating over virtual defs or uses without
720 iterating over defs or uses at the same time. */
721 gcc_checking_assert ((!(flags
& SSA_OP_VDEF
) || (flags
& SSA_OP_DEF
))
722 && (!(flags
& SSA_OP_VUSE
) || (flags
& SSA_OP_USE
)));
723 ptr
->defs
= (flags
& (SSA_OP_DEF
|SSA_OP_VDEF
)) ? gimple_def_ops (stmt
) : NULL
;
724 if (!(flags
& SSA_OP_VDEF
)
726 && gimple_vdef (stmt
) != NULL_TREE
)
727 ptr
->defs
= ptr
->defs
->next
;
728 ptr
->uses
= (flags
& (SSA_OP_USE
|SSA_OP_VUSE
)) ? gimple_use_ops (stmt
) : NULL
;
729 if (!(flags
& SSA_OP_VUSE
)
731 && gimple_vuse (stmt
) != NULL_TREE
)
732 ptr
->uses
= ptr
->uses
->next
;
737 ptr
->phi_stmt
= NULL
;
740 /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
742 static inline use_operand_p
743 op_iter_init_use (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
745 gcc_checking_assert ((flags
& SSA_OP_ALL_DEFS
) == 0
746 && (flags
& SSA_OP_USE
));
747 op_iter_init (ptr
, stmt
, flags
);
748 ptr
->iter_type
= ssa_op_iter_use
;
749 return op_iter_next_use (ptr
);
752 /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
754 static inline def_operand_p
755 op_iter_init_def (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
757 gcc_checking_assert ((flags
& SSA_OP_ALL_USES
) == 0
758 && (flags
& SSA_OP_DEF
));
759 op_iter_init (ptr
, stmt
, flags
);
760 ptr
->iter_type
= ssa_op_iter_def
;
761 return op_iter_next_def (ptr
);
764 /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
765 the first operand as a tree. */
767 op_iter_init_tree (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
769 op_iter_init (ptr
, stmt
, flags
);
770 ptr
->iter_type
= ssa_op_iter_tree
;
771 return op_iter_next_tree (ptr
);
775 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
778 single_ssa_tree_operand (gimple stmt
, int flags
)
783 var
= op_iter_init_tree (&iter
, stmt
, flags
);
784 if (op_iter_done (&iter
))
786 op_iter_next_tree (&iter
);
787 if (op_iter_done (&iter
))
793 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
795 static inline use_operand_p
796 single_ssa_use_operand (gimple stmt
, int flags
)
801 var
= op_iter_init_use (&iter
, stmt
, flags
);
802 if (op_iter_done (&iter
))
803 return NULL_USE_OPERAND_P
;
804 op_iter_next_use (&iter
);
805 if (op_iter_done (&iter
))
807 return NULL_USE_OPERAND_P
;
812 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
814 static inline def_operand_p
815 single_ssa_def_operand (gimple stmt
, int flags
)
820 var
= op_iter_init_def (&iter
, stmt
, flags
);
821 if (op_iter_done (&iter
))
822 return NULL_DEF_OPERAND_P
;
823 op_iter_next_def (&iter
);
824 if (op_iter_done (&iter
))
826 return NULL_DEF_OPERAND_P
;
830 /* Return true if there are zero operands in STMT matching the type
833 zero_ssa_operands (gimple stmt
, int flags
)
837 op_iter_init_tree (&iter
, stmt
, flags
);
838 return op_iter_done (&iter
);
842 /* Return the number of operands matching FLAGS in STMT. */
844 num_ssa_operands (gimple stmt
, int flags
)
850 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, flags
)
856 /* Delink all immediate_use information for STMT. */
858 delink_stmt_imm_use (gimple stmt
)
863 if (ssa_operands_active ())
864 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_ALL_USES
)
865 delink_imm_use (use_p
);
869 /* If there is a single DEF in the PHI node which matches FLAG, return it.
870 Otherwise return NULL_DEF_OPERAND_P. */
872 single_phi_def (gimple stmt
, int flags
)
874 tree def
= PHI_RESULT (stmt
);
875 if ((flags
& SSA_OP_DEF
) && is_gimple_reg (def
))
877 if ((flags
& SSA_OP_VIRTUAL_DEFS
) && !is_gimple_reg (def
))
882 /* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
883 be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
884 static inline use_operand_p
885 op_iter_init_phiuse (ssa_op_iter
*ptr
, gimple phi
, int flags
)
887 tree phi_def
= gimple_phi_result (phi
);
890 clear_and_done_ssa_iter (ptr
);
893 gcc_checking_assert ((flags
& (SSA_OP_USE
| SSA_OP_VIRTUAL_USES
)) != 0);
895 comp
= (is_gimple_reg (phi_def
) ? SSA_OP_USE
: SSA_OP_VIRTUAL_USES
);
897 /* If the PHI node doesn't the operand type we care about, we're done. */
898 if ((flags
& comp
) == 0)
901 return NULL_USE_OPERAND_P
;
905 ptr
->num_phi
= gimple_phi_num_args (phi
);
906 ptr
->iter_type
= ssa_op_iter_use
;
907 return op_iter_next_use (ptr
);
911 /* Start an iterator for a PHI definition. */
913 static inline def_operand_p
914 op_iter_init_phidef (ssa_op_iter
*ptr
, gimple phi
, int flags
)
916 tree phi_def
= PHI_RESULT (phi
);
919 clear_and_done_ssa_iter (ptr
);
922 gcc_checking_assert ((flags
& (SSA_OP_DEF
| SSA_OP_VIRTUAL_DEFS
)) != 0);
924 comp
= (is_gimple_reg (phi_def
) ? SSA_OP_DEF
: SSA_OP_VIRTUAL_DEFS
);
926 /* If the PHI node doesn't have the operand type we care about,
928 if ((flags
& comp
) == 0)
931 return NULL_DEF_OPERAND_P
;
934 ptr
->iter_type
= ssa_op_iter_def
;
935 /* The first call to op_iter_next_def will terminate the iterator since
936 all the fields are NULL. Simply return the result here as the first and
937 therefore only result. */
938 return PHI_RESULT_PTR (phi
);
941 /* Return true is IMM has reached the end of the immediate use stmt list. */
944 end_imm_use_stmt_p (const imm_use_iterator
*imm
)
946 return (imm
->imm_use
== imm
->end_p
);
949 /* Finished the traverse of an immediate use stmt list IMM by removing the
950 placeholder node from the list. */
953 end_imm_use_stmt_traverse (imm_use_iterator
*imm
)
955 delink_imm_use (&(imm
->iter_node
));
958 /* Immediate use traversal of uses within a stmt require that all the
959 uses on a stmt be sequentially listed. This routine is used to build up
960 this sequential list by adding USE_P to the end of the current list
961 currently delimited by HEAD and LAST_P. The new LAST_P value is
964 static inline use_operand_p
965 move_use_after_head (use_operand_p use_p
, use_operand_p head
,
966 use_operand_p last_p
)
968 #ifdef ENABLE_CHECKING
969 gcc_assert (USE_FROM_PTR (use_p
) == USE_FROM_PTR (head
));
971 /* Skip head when we find it. */
974 /* If use_p is already linked in after last_p, continue. */
975 if (last_p
->next
== use_p
)
979 /* Delink from current location, and link in at last_p. */
980 delink_imm_use (use_p
);
981 link_imm_use_to_list (use_p
, last_p
);
989 /* This routine will relink all uses with the same stmt as HEAD into the list
990 immediately following HEAD for iterator IMM. */
993 link_use_stmts_after (use_operand_p head
, imm_use_iterator
*imm
)
996 use_operand_p last_p
= head
;
997 gimple head_stmt
= USE_STMT (head
);
998 tree use
= USE_FROM_PTR (head
);
1002 /* Only look at virtual or real uses, depending on the type of HEAD. */
1003 flag
= (is_gimple_reg (use
) ? SSA_OP_USE
: SSA_OP_VIRTUAL_USES
);
1005 if (gimple_code (head_stmt
) == GIMPLE_PHI
)
1007 FOR_EACH_PHI_ARG (use_p
, head_stmt
, op_iter
, flag
)
1008 if (USE_FROM_PTR (use_p
) == use
)
1009 last_p
= move_use_after_head (use_p
, head
, last_p
);
1013 if (flag
== SSA_OP_USE
)
1015 FOR_EACH_SSA_USE_OPERAND (use_p
, head_stmt
, op_iter
, flag
)
1016 if (USE_FROM_PTR (use_p
) == use
)
1017 last_p
= move_use_after_head (use_p
, head
, last_p
);
1019 else if ((use_p
= gimple_vuse_op (head_stmt
)) != NULL_USE_OPERAND_P
)
1021 if (USE_FROM_PTR (use_p
) == use
)
1022 last_p
= move_use_after_head (use_p
, head
, last_p
);
1025 /* Link iter node in after last_p. */
1026 if (imm
->iter_node
.prev
!= NULL
)
1027 delink_imm_use (&imm
->iter_node
);
1028 link_imm_use_to_list (&(imm
->iter_node
), last_p
);
1031 /* Initialize IMM to traverse over uses of VAR. Return the first statement. */
1032 static inline gimple
1033 first_imm_use_stmt (imm_use_iterator
*imm
, tree var
)
1035 imm
->end_p
= &(SSA_NAME_IMM_USE_NODE (var
));
1036 imm
->imm_use
= imm
->end_p
->next
;
1037 imm
->next_imm_name
= NULL_USE_OPERAND_P
;
1039 /* iter_node is used as a marker within the immediate use list to indicate
1040 where the end of the current stmt's uses are. Initialize it to NULL
1041 stmt and use, which indicates a marker node. */
1042 imm
->iter_node
.prev
= NULL_USE_OPERAND_P
;
1043 imm
->iter_node
.next
= NULL_USE_OPERAND_P
;
1044 imm
->iter_node
.loc
.stmt
= NULL
;
1045 imm
->iter_node
.use
= NULL
;
1047 if (end_imm_use_stmt_p (imm
))
1050 link_use_stmts_after (imm
->imm_use
, imm
);
1052 return USE_STMT (imm
->imm_use
);
1055 /* Bump IMM to the next stmt which has a use of var. */
1057 static inline gimple
1058 next_imm_use_stmt (imm_use_iterator
*imm
)
1060 imm
->imm_use
= imm
->iter_node
.next
;
1061 if (end_imm_use_stmt_p (imm
))
1063 if (imm
->iter_node
.prev
!= NULL
)
1064 delink_imm_use (&imm
->iter_node
);
1068 link_use_stmts_after (imm
->imm_use
, imm
);
1069 return USE_STMT (imm
->imm_use
);
1072 /* This routine will return the first use on the stmt IMM currently refers
1075 static inline use_operand_p
1076 first_imm_use_on_stmt (imm_use_iterator
*imm
)
1078 imm
->next_imm_name
= imm
->imm_use
->next
;
1079 return imm
->imm_use
;
1082 /* Return TRUE if the last use on the stmt IMM refers to has been visited. */
1085 end_imm_use_on_stmt_p (const imm_use_iterator
*imm
)
1087 return (imm
->imm_use
== &(imm
->iter_node
));
1090 /* Bump to the next use on the stmt IMM refers to, return NULL if done. */
1092 static inline use_operand_p
1093 next_imm_use_on_stmt (imm_use_iterator
*imm
)
1095 imm
->imm_use
= imm
->next_imm_name
;
1096 if (end_imm_use_on_stmt_p (imm
))
1097 return NULL_USE_OPERAND_P
;
1100 imm
->next_imm_name
= imm
->imm_use
->next
;
1101 return imm
->imm_use
;
1105 /* Return true if VAR cannot be modified by the program. */
1108 unmodifiable_var_p (const_tree var
)
1110 if (TREE_CODE (var
) == SSA_NAME
)
1111 var
= SSA_NAME_VAR (var
);
1113 return TREE_READONLY (var
) && (TREE_STATIC (var
) || DECL_EXTERNAL (var
));
1116 /* Return true if REF, an ARRAY_REF, has an INDIRECT_REF somewhere in it. */
1119 array_ref_contains_indirect_ref (const_tree ref
)
1121 gcc_checking_assert (TREE_CODE (ref
) == ARRAY_REF
);
1124 ref
= TREE_OPERAND (ref
, 0);
1125 } while (handled_component_p (ref
));
1127 return TREE_CODE (ref
) == INDIRECT_REF
;
1130 /* Return true if REF, a handled component reference, has an ARRAY_REF
1134 ref_contains_array_ref (const_tree ref
)
1136 gcc_checking_assert (handled_component_p (ref
));
1139 if (TREE_CODE (ref
) == ARRAY_REF
)
1141 ref
= TREE_OPERAND (ref
, 0);
1142 } while (handled_component_p (ref
));
1147 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1150 contains_view_convert_expr_p (const_tree ref
)
1152 while (handled_component_p (ref
))
1154 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
1156 ref
= TREE_OPERAND (ref
, 0);
1162 /* Return true, if the two ranges [POS1, SIZE1] and [POS2, SIZE2]
1163 overlap. SIZE1 and/or SIZE2 can be (unsigned)-1 in which case the
1164 range is open-ended. Otherwise return false. */
1167 ranges_overlap_p (unsigned HOST_WIDE_INT pos1
,
1168 unsigned HOST_WIDE_INT size1
,
1169 unsigned HOST_WIDE_INT pos2
,
1170 unsigned HOST_WIDE_INT size2
)
1173 && (size2
== (unsigned HOST_WIDE_INT
)-1
1174 || pos1
< (pos2
+ size2
)))
1177 && (size1
== (unsigned HOST_WIDE_INT
)-1
1178 || pos2
< (pos1
+ size1
)))
1184 /* Accessor to tree-ssa-operands.c caches. */
1185 static inline struct ssa_operands
*
1186 gimple_ssa_operands (const struct function
*fun
)
1188 return &fun
->gimple_df
->ssa_operands
;
1191 /* Given an edge_var_map V, return the PHI arg definition. */
1194 redirect_edge_var_map_def (edge_var_map
*v
)
1199 /* Given an edge_var_map V, return the PHI result. */
1202 redirect_edge_var_map_result (edge_var_map
*v
)
1207 /* Given an edge_var_map V, return the PHI arg location. */
1209 static inline source_location
1210 redirect_edge_var_map_location (edge_var_map
*v
)
1216 /* Return an SSA_NAME node for variable VAR defined in statement STMT
1217 in function cfun. */
1220 make_ssa_name (tree var
, gimple stmt
)
1222 return make_ssa_name_fn (cfun
, var
, stmt
);
1225 #endif /* _TREE_FLOW_INLINE_H */