1 /* Inline functions for tree-flow.h
2 Copyright (C) 2001, 2003, 2005, 2006, 2007, 2008, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #ifndef _TREE_FLOW_INLINE_H
23 #define _TREE_FLOW_INLINE_H 1
25 /* Inline functions for manipulating various data structures defined in
26 tree-flow.h. See tree-flow.h for documentation. */
28 /* Return true when gimple SSA form was built.
29 gimple_in_ssa_p is queried by gimplifier in various early stages before SSA
30 infrastructure is initialized. Check for presence of the datastructures
33 gimple_in_ssa_p (const struct function
*fun
)
35 return fun
&& fun
->gimple_df
&& fun
->gimple_df
->in_ssa_p
;
38 /* Artificial variable used for the virtual operand FUD chain. */
40 gimple_vop (const struct function
*fun
)
42 gcc_checking_assert (fun
&& fun
->gimple_df
);
43 return fun
->gimple_df
->vop
;
46 /* Initialize the hashtable iterator HTI to point to hashtable TABLE */
49 first_htab_element (htab_iterator
*hti
, htab_t table
)
52 hti
->slot
= table
->entries
;
53 hti
->limit
= hti
->slot
+ htab_size (table
);
57 if (x
!= HTAB_EMPTY_ENTRY
&& x
!= HTAB_DELETED_ENTRY
)
59 } while (++(hti
->slot
) < hti
->limit
);
61 if (hti
->slot
< hti
->limit
)
66 /* Return current non-empty/deleted slot of the hashtable pointed to by HTI,
67 or NULL if we have reached the end. */
70 end_htab_p (const htab_iterator
*hti
)
72 if (hti
->slot
>= hti
->limit
)
77 /* Advance the hashtable iterator pointed to by HTI to the next element of the
81 next_htab_element (htab_iterator
*hti
)
83 while (++(hti
->slot
) < hti
->limit
)
86 if (x
!= HTAB_EMPTY_ENTRY
&& x
!= HTAB_DELETED_ENTRY
)
92 /* Get the number of the next statement uid to be allocated. */
93 static inline unsigned int
94 gimple_stmt_max_uid (struct function
*fn
)
96 return fn
->last_stmt_uid
;
99 /* Set the number of the next statement uid to be allocated. */
101 set_gimple_stmt_max_uid (struct function
*fn
, unsigned int maxid
)
103 fn
->last_stmt_uid
= maxid
;
106 /* Set the number of the next statement uid to be allocated. */
107 static inline unsigned int
108 inc_gimple_stmt_max_uid (struct function
*fn
)
110 return fn
->last_stmt_uid
++;
113 /* Return the line number for EXPR, or return -1 if we have no line
114 number information for it. */
116 get_lineno (const_gimple stmt
)
123 loc
= gimple_location (stmt
);
124 if (loc
== UNKNOWN_LOCATION
)
127 return LOCATION_LINE (loc
);
130 /* Delink an immediate_uses node from its chain. */
132 delink_imm_use (ssa_use_operand_t
*linknode
)
134 /* Return if this node is not in a list. */
135 if (linknode
->prev
== NULL
)
138 linknode
->prev
->next
= linknode
->next
;
139 linknode
->next
->prev
= linknode
->prev
;
140 linknode
->prev
= NULL
;
141 linknode
->next
= NULL
;
144 /* Link ssa_imm_use node LINKNODE into the chain for LIST. */
146 link_imm_use_to_list (ssa_use_operand_t
*linknode
, ssa_use_operand_t
*list
)
148 /* Link the new node at the head of the list. If we are in the process of
149 traversing the list, we won't visit any new nodes added to it. */
150 linknode
->prev
= list
;
151 linknode
->next
= list
->next
;
152 list
->next
->prev
= linknode
;
153 list
->next
= linknode
;
156 /* Link ssa_imm_use node LINKNODE into the chain for DEF. */
158 link_imm_use (ssa_use_operand_t
*linknode
, tree def
)
160 ssa_use_operand_t
*root
;
162 if (!def
|| TREE_CODE (def
) != SSA_NAME
)
163 linknode
->prev
= NULL
;
166 root
= &(SSA_NAME_IMM_USE_NODE (def
));
168 gcc_checking_assert (*(linknode
->use
) == def
);
169 link_imm_use_to_list (linknode
, root
);
173 /* Set the value of a use pointed to by USE to VAL. */
175 set_ssa_use_from_ptr (use_operand_p use
, tree val
)
177 delink_imm_use (use
);
179 link_imm_use (use
, val
);
182 /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
185 link_imm_use_stmt (ssa_use_operand_t
*linknode
, tree def
, gimple stmt
)
188 link_imm_use (linknode
, def
);
190 link_imm_use (linknode
, NULL
);
191 linknode
->loc
.stmt
= stmt
;
194 /* Relink a new node in place of an old node in the list. */
196 relink_imm_use (ssa_use_operand_t
*node
, ssa_use_operand_t
*old
)
198 /* The node one had better be in the same list. */
199 gcc_checking_assert (*(old
->use
) == *(node
->use
));
200 node
->prev
= old
->prev
;
201 node
->next
= old
->next
;
204 old
->prev
->next
= node
;
205 old
->next
->prev
= node
;
206 /* Remove the old node from the list. */
211 /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
214 relink_imm_use_stmt (ssa_use_operand_t
*linknode
, ssa_use_operand_t
*old
,
218 relink_imm_use (linknode
, old
);
220 link_imm_use (linknode
, NULL
);
221 linknode
->loc
.stmt
= stmt
;
225 /* Return true is IMM has reached the end of the immediate use list. */
227 end_readonly_imm_use_p (const imm_use_iterator
*imm
)
229 return (imm
->imm_use
== imm
->end_p
);
232 /* Initialize iterator IMM to process the list for VAR. */
233 static inline use_operand_p
234 first_readonly_imm_use (imm_use_iterator
*imm
, tree var
)
236 imm
->end_p
= &(SSA_NAME_IMM_USE_NODE (var
));
237 imm
->imm_use
= imm
->end_p
->next
;
238 #ifdef ENABLE_CHECKING
239 imm
->iter_node
.next
= imm
->imm_use
->next
;
241 if (end_readonly_imm_use_p (imm
))
242 return NULL_USE_OPERAND_P
;
246 /* Bump IMM to the next use in the list. */
247 static inline use_operand_p
248 next_readonly_imm_use (imm_use_iterator
*imm
)
250 use_operand_p old
= imm
->imm_use
;
252 #ifdef ENABLE_CHECKING
253 /* If this assertion fails, it indicates the 'next' pointer has changed
254 since the last bump. This indicates that the list is being modified
255 via stmt changes, or SET_USE, or somesuch thing, and you need to be
256 using the SAFE version of the iterator. */
257 gcc_assert (imm
->iter_node
.next
== old
->next
);
258 imm
->iter_node
.next
= old
->next
->next
;
261 imm
->imm_use
= old
->next
;
262 if (end_readonly_imm_use_p (imm
))
263 return NULL_USE_OPERAND_P
;
268 extern bool has_zero_uses_1 (const ssa_use_operand_t
*head
);
269 extern bool single_imm_use_1 (const ssa_use_operand_t
*head
,
270 use_operand_p
*use_p
, gimple
*stmt
);
272 /* Return true if VAR has no nondebug uses. */
274 has_zero_uses (const_tree var
)
276 const ssa_use_operand_t
*const ptr
= &(SSA_NAME_IMM_USE_NODE (var
));
278 /* A single use_operand means there is no items in the list. */
279 if (ptr
== ptr
->next
)
282 /* If there are debug stmts, we have to look at each use and see
283 whether there are any nondebug uses. */
284 if (!MAY_HAVE_DEBUG_STMTS
)
287 return has_zero_uses_1 (ptr
);
290 /* Return true if VAR has a single nondebug use. */
292 has_single_use (const_tree var
)
294 const ssa_use_operand_t
*const ptr
= &(SSA_NAME_IMM_USE_NODE (var
));
296 /* If there aren't any uses whatsoever, we're done. */
297 if (ptr
== ptr
->next
)
300 /* If there's a single use, check that it's not a debug stmt. */
301 if (ptr
== ptr
->next
->next
)
302 return !is_gimple_debug (USE_STMT (ptr
->next
));
304 /* If there are debug stmts, we have to look at each of them. */
305 if (!MAY_HAVE_DEBUG_STMTS
)
308 return single_imm_use_1 (ptr
, NULL
, NULL
);
312 /* If VAR has only a single immediate nondebug use, return true, and
313 set USE_P and STMT to the use pointer and stmt of occurrence. */
315 single_imm_use (const_tree var
, use_operand_p
*use_p
, gimple
*stmt
)
317 const ssa_use_operand_t
*const ptr
= &(SSA_NAME_IMM_USE_NODE (var
));
319 /* If there aren't any uses whatsoever, we're done. */
320 if (ptr
== ptr
->next
)
323 *use_p
= NULL_USE_OPERAND_P
;
328 /* If there's a single use, check that it's not a debug stmt. */
329 if (ptr
== ptr
->next
->next
)
331 if (!is_gimple_debug (USE_STMT (ptr
->next
)))
334 *stmt
= ptr
->next
->loc
.stmt
;
341 /* If there are debug stmts, we have to look at each of them. */
342 if (!MAY_HAVE_DEBUG_STMTS
)
345 return single_imm_use_1 (ptr
, use_p
, stmt
);
348 /* Return the number of nondebug immediate uses of VAR. */
349 static inline unsigned int
350 num_imm_uses (const_tree var
)
352 const ssa_use_operand_t
*const start
= &(SSA_NAME_IMM_USE_NODE (var
));
353 const ssa_use_operand_t
*ptr
;
354 unsigned int num
= 0;
356 if (!MAY_HAVE_DEBUG_STMTS
)
357 for (ptr
= start
->next
; ptr
!= start
; ptr
= ptr
->next
)
360 for (ptr
= start
->next
; ptr
!= start
; ptr
= ptr
->next
)
361 if (!is_gimple_debug (USE_STMT (ptr
)))
367 /* Return the tree pointed-to by USE. */
369 get_use_from_ptr (use_operand_p use
)
374 /* Return the tree pointed-to by DEF. */
376 get_def_from_ptr (def_operand_p def
)
381 /* Return a use_operand_p pointer for argument I of PHI node GS. */
383 static inline use_operand_p
384 gimple_phi_arg_imm_use_ptr (gimple gs
, int i
)
386 return &gimple_phi_arg (gs
, i
)->imm_use
;
389 /* Return the tree operand for argument I of PHI node GS. */
392 gimple_phi_arg_def (gimple gs
, size_t index
)
394 struct phi_arg_d
*pd
= gimple_phi_arg (gs
, index
);
395 return get_use_from_ptr (&pd
->imm_use
);
398 /* Return a pointer to the tree operand for argument I of PHI node GS. */
401 gimple_phi_arg_def_ptr (gimple gs
, size_t index
)
403 return &gimple_phi_arg (gs
, index
)->def
;
406 /* Return the edge associated with argument I of phi node GS. */
409 gimple_phi_arg_edge (gimple gs
, size_t i
)
411 return EDGE_PRED (gimple_bb (gs
), i
);
414 /* Return the source location of gimple argument I of phi node GS. */
416 static inline source_location
417 gimple_phi_arg_location (gimple gs
, size_t i
)
419 return gimple_phi_arg (gs
, i
)->locus
;
422 /* Return the source location of the argument on edge E of phi node GS. */
424 static inline source_location
425 gimple_phi_arg_location_from_edge (gimple gs
, edge e
)
427 return gimple_phi_arg (gs
, e
->dest_idx
)->locus
;
430 /* Set the source location of gimple argument I of phi node GS to LOC. */
433 gimple_phi_arg_set_location (gimple gs
, size_t i
, source_location loc
)
435 gimple_phi_arg (gs
, i
)->locus
= loc
;
438 /* Return TRUE if argument I of phi node GS has a location record. */
441 gimple_phi_arg_has_location (gimple gs
, size_t i
)
443 return gimple_phi_arg_location (gs
, i
) != UNKNOWN_LOCATION
;
447 /* Return the PHI nodes for basic block BB, or NULL if there are no
449 static inline gimple_seq
450 phi_nodes (const_basic_block bb
)
452 gcc_checking_assert (!(bb
->flags
& BB_RTL
));
453 return bb
->il
.gimple
.phi_nodes
;
456 static inline gimple_seq
*
457 phi_nodes_ptr (basic_block bb
)
459 gcc_checking_assert (!(bb
->flags
& BB_RTL
));
460 return &bb
->il
.gimple
.phi_nodes
;
463 /* Set PHI nodes of a basic block BB to SEQ. */
466 set_phi_nodes (basic_block bb
, gimple_seq seq
)
468 gimple_stmt_iterator i
;
470 gcc_checking_assert (!(bb
->flags
& BB_RTL
));
471 bb
->il
.gimple
.phi_nodes
= seq
;
473 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
474 gimple_set_bb (gsi_stmt (i
), bb
);
477 /* Return the phi argument which contains the specified use. */
480 phi_arg_index_from_use (use_operand_p use
)
482 struct phi_arg_d
*element
, *root
;
486 /* Since the use is the first thing in a PHI argument element, we can
487 calculate its index based on casting it to an argument, and performing
488 pointer arithmetic. */
490 phi
= USE_STMT (use
);
492 element
= (struct phi_arg_d
*)use
;
493 root
= gimple_phi_arg (phi
, 0);
494 index
= element
- root
;
496 /* Make sure the calculation doesn't have any leftover bytes. If it does,
497 then imm_use is likely not the first element in phi_arg_d. */
498 gcc_checking_assert ((((char *)element
- (char *)root
)
499 % sizeof (struct phi_arg_d
)) == 0
500 && index
< gimple_phi_capacity (phi
));
505 /* Return true if T (assumed to be a DECL) is a global variable.
506 A variable is considered global if its storage is not automatic. */
509 is_global_var (const_tree t
)
511 return (TREE_STATIC (t
) || DECL_EXTERNAL (t
));
515 /* Return true if VAR may be aliased. A variable is considered as
516 maybe aliased if it has its address taken by the local TU
517 or possibly by another TU and might be modified through a pointer. */
520 may_be_aliased (const_tree var
)
522 return (TREE_CODE (var
) != CONST_DECL
523 && !((TREE_STATIC (var
) || TREE_PUBLIC (var
) || DECL_EXTERNAL (var
))
524 && TREE_READONLY (var
)
525 && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (var
)))
526 && (TREE_PUBLIC (var
)
527 || DECL_EXTERNAL (var
)
528 || TREE_ADDRESSABLE (var
)));
532 /* PHI nodes should contain only ssa_names and invariants. A test
533 for ssa_name is definitely simpler; don't let invalid contents
534 slip in in the meantime. */
537 phi_ssa_name_p (const_tree t
)
539 if (TREE_CODE (t
) == SSA_NAME
)
541 gcc_checking_assert (is_gimple_min_invariant (t
));
546 /* Returns the loop of the statement STMT. */
548 static inline struct loop
*
549 loop_containing_stmt (gimple stmt
)
551 basic_block bb
= gimple_bb (stmt
);
555 return bb
->loop_father
;
559 /* ----------------------------------------------------------------------- */
561 /* The following set of routines are used to iterator over various type of
564 /* Return true if PTR is finished iterating. */
566 op_iter_done (const ssa_op_iter
*ptr
)
571 /* Get the next iterator use value for PTR. */
572 static inline use_operand_p
573 op_iter_next_use (ssa_op_iter
*ptr
)
576 gcc_checking_assert (ptr
->iter_type
== ssa_op_iter_use
);
579 use_p
= USE_OP_PTR (ptr
->uses
);
580 ptr
->uses
= ptr
->uses
->next
;
583 if (ptr
->i
< ptr
->numops
)
585 return PHI_ARG_DEF_PTR (ptr
->stmt
, (ptr
->i
)++);
588 return NULL_USE_OPERAND_P
;
591 /* Get the next iterator def value for PTR. */
592 static inline def_operand_p
593 op_iter_next_def (ssa_op_iter
*ptr
)
595 gcc_checking_assert (ptr
->iter_type
== ssa_op_iter_def
);
596 if (ptr
->flags
& SSA_OP_VDEF
)
599 ptr
->flags
&= ~SSA_OP_VDEF
;
600 p
= gimple_vdef_ptr (ptr
->stmt
);
604 if (ptr
->flags
& SSA_OP_DEF
)
606 while (ptr
->i
< ptr
->numops
)
608 tree
*val
= gimple_op_ptr (ptr
->stmt
, ptr
->i
);
612 if (TREE_CODE (*val
) == TREE_LIST
)
613 val
= &TREE_VALUE (*val
);
614 if (TREE_CODE (*val
) == SSA_NAME
615 || is_gimple_reg (*val
))
619 ptr
->flags
&= ~SSA_OP_DEF
;
623 return NULL_DEF_OPERAND_P
;
626 /* Get the next iterator tree value for PTR. */
628 op_iter_next_tree (ssa_op_iter
*ptr
)
631 gcc_checking_assert (ptr
->iter_type
== ssa_op_iter_tree
);
634 val
= USE_OP (ptr
->uses
);
635 ptr
->uses
= ptr
->uses
->next
;
638 if (ptr
->flags
& SSA_OP_VDEF
)
640 ptr
->flags
&= ~SSA_OP_VDEF
;
641 if ((val
= gimple_vdef (ptr
->stmt
)))
644 if (ptr
->flags
& SSA_OP_DEF
)
646 while (ptr
->i
< ptr
->numops
)
648 val
= gimple_op (ptr
->stmt
, ptr
->i
);
652 if (TREE_CODE (val
) == TREE_LIST
)
653 val
= TREE_VALUE (val
);
654 if (TREE_CODE (val
) == SSA_NAME
655 || is_gimple_reg (val
))
659 ptr
->flags
&= ~SSA_OP_DEF
;
667 /* This functions clears the iterator PTR, and marks it done. This is normally
668 used to prevent warnings in the compile about might be uninitialized
672 clear_and_done_ssa_iter (ssa_op_iter
*ptr
)
677 ptr
->iter_type
= ssa_op_iter_none
;
683 /* Initialize the iterator PTR to the virtual defs in STMT. */
685 op_iter_init (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
687 /* PHI nodes require a different iterator initialization path. We
688 do not support iterating over virtual defs or uses without
689 iterating over defs or uses at the same time. */
690 gcc_checking_assert (gimple_code (stmt
) != GIMPLE_PHI
691 && (!(flags
& SSA_OP_VDEF
) || (flags
& SSA_OP_DEF
))
692 && (!(flags
& SSA_OP_VUSE
) || (flags
& SSA_OP_USE
)));
694 if (flags
& (SSA_OP_DEF
| SSA_OP_VDEF
))
696 switch (gimple_code (stmt
))
703 ptr
->numops
= gimple_asm_noutputs (stmt
);
707 flags
&= ~(SSA_OP_DEF
| SSA_OP_VDEF
);
711 ptr
->uses
= (flags
& (SSA_OP_USE
|SSA_OP_VUSE
)) ? gimple_use_ops (stmt
) : NULL
;
712 if (!(flags
& SSA_OP_VUSE
)
714 && gimple_vuse (stmt
) != NULL_TREE
)
715 ptr
->uses
= ptr
->uses
->next
;
723 /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
725 static inline use_operand_p
726 op_iter_init_use (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
728 gcc_checking_assert ((flags
& SSA_OP_ALL_DEFS
) == 0
729 && (flags
& SSA_OP_USE
));
730 op_iter_init (ptr
, stmt
, flags
);
731 ptr
->iter_type
= ssa_op_iter_use
;
732 return op_iter_next_use (ptr
);
735 /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
737 static inline def_operand_p
738 op_iter_init_def (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
740 gcc_checking_assert ((flags
& SSA_OP_ALL_USES
) == 0
741 && (flags
& SSA_OP_DEF
));
742 op_iter_init (ptr
, stmt
, flags
);
743 ptr
->iter_type
= ssa_op_iter_def
;
744 return op_iter_next_def (ptr
);
747 /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
748 the first operand as a tree. */
750 op_iter_init_tree (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
752 op_iter_init (ptr
, stmt
, flags
);
753 ptr
->iter_type
= ssa_op_iter_tree
;
754 return op_iter_next_tree (ptr
);
758 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
761 single_ssa_tree_operand (gimple stmt
, int flags
)
766 var
= op_iter_init_tree (&iter
, stmt
, flags
);
767 if (op_iter_done (&iter
))
769 op_iter_next_tree (&iter
);
770 if (op_iter_done (&iter
))
776 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
778 static inline use_operand_p
779 single_ssa_use_operand (gimple stmt
, int flags
)
784 var
= op_iter_init_use (&iter
, stmt
, flags
);
785 if (op_iter_done (&iter
))
786 return NULL_USE_OPERAND_P
;
787 op_iter_next_use (&iter
);
788 if (op_iter_done (&iter
))
790 return NULL_USE_OPERAND_P
;
795 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
797 static inline def_operand_p
798 single_ssa_def_operand (gimple stmt
, int flags
)
803 var
= op_iter_init_def (&iter
, stmt
, flags
);
804 if (op_iter_done (&iter
))
805 return NULL_DEF_OPERAND_P
;
806 op_iter_next_def (&iter
);
807 if (op_iter_done (&iter
))
809 return NULL_DEF_OPERAND_P
;
813 /* Return true if there are zero operands in STMT matching the type
816 zero_ssa_operands (gimple stmt
, int flags
)
820 op_iter_init_tree (&iter
, stmt
, flags
);
821 return op_iter_done (&iter
);
825 /* Return the number of operands matching FLAGS in STMT. */
827 num_ssa_operands (gimple stmt
, int flags
)
833 gcc_checking_assert (gimple_code (stmt
) != GIMPLE_PHI
);
834 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, flags
)
839 static inline use_operand_p
840 op_iter_init_phiuse (ssa_op_iter
*ptr
, gimple phi
, int flags
);
842 /* Delink all immediate_use information for STMT. */
844 delink_stmt_imm_use (gimple stmt
)
849 if (ssa_operands_active (cfun
))
850 FOR_EACH_PHI_OR_STMT_USE (use_p
, stmt
, iter
, SSA_OP_ALL_USES
)
851 delink_imm_use (use_p
);
855 /* If there is a single DEF in the PHI node which matches FLAG, return it.
856 Otherwise return NULL_DEF_OPERAND_P. */
858 single_phi_def (gimple stmt
, int flags
)
860 tree def
= PHI_RESULT (stmt
);
861 if ((flags
& SSA_OP_DEF
) && is_gimple_reg (def
))
863 if ((flags
& SSA_OP_VIRTUAL_DEFS
) && !is_gimple_reg (def
))
868 /* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
869 be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
870 static inline use_operand_p
871 op_iter_init_phiuse (ssa_op_iter
*ptr
, gimple phi
, int flags
)
873 tree phi_def
= gimple_phi_result (phi
);
876 clear_and_done_ssa_iter (ptr
);
879 gcc_checking_assert ((flags
& (SSA_OP_USE
| SSA_OP_VIRTUAL_USES
)) != 0);
881 comp
= (is_gimple_reg (phi_def
) ? SSA_OP_USE
: SSA_OP_VIRTUAL_USES
);
883 /* If the PHI node doesn't the operand type we care about, we're done. */
884 if ((flags
& comp
) == 0)
887 return NULL_USE_OPERAND_P
;
891 ptr
->numops
= gimple_phi_num_args (phi
);
892 ptr
->iter_type
= ssa_op_iter_use
;
894 return op_iter_next_use (ptr
);
898 /* Start an iterator for a PHI definition. */
900 static inline def_operand_p
901 op_iter_init_phidef (ssa_op_iter
*ptr
, gimple phi
, int flags
)
903 tree phi_def
= PHI_RESULT (phi
);
906 clear_and_done_ssa_iter (ptr
);
909 gcc_checking_assert ((flags
& (SSA_OP_DEF
| SSA_OP_VIRTUAL_DEFS
)) != 0);
911 comp
= (is_gimple_reg (phi_def
) ? SSA_OP_DEF
: SSA_OP_VIRTUAL_DEFS
);
913 /* If the PHI node doesn't have the operand type we care about,
915 if ((flags
& comp
) == 0)
918 return NULL_DEF_OPERAND_P
;
921 ptr
->iter_type
= ssa_op_iter_def
;
922 /* The first call to op_iter_next_def will terminate the iterator since
923 all the fields are NULL. Simply return the result here as the first and
924 therefore only result. */
925 return PHI_RESULT_PTR (phi
);
928 /* Return true is IMM has reached the end of the immediate use stmt list. */
931 end_imm_use_stmt_p (const imm_use_iterator
*imm
)
933 return (imm
->imm_use
== imm
->end_p
);
936 /* Finished the traverse of an immediate use stmt list IMM by removing the
937 placeholder node from the list. */
940 end_imm_use_stmt_traverse (imm_use_iterator
*imm
)
942 delink_imm_use (&(imm
->iter_node
));
945 /* Immediate use traversal of uses within a stmt require that all the
946 uses on a stmt be sequentially listed. This routine is used to build up
947 this sequential list by adding USE_P to the end of the current list
948 currently delimited by HEAD and LAST_P. The new LAST_P value is
951 static inline use_operand_p
952 move_use_after_head (use_operand_p use_p
, use_operand_p head
,
953 use_operand_p last_p
)
955 gcc_checking_assert (USE_FROM_PTR (use_p
) == USE_FROM_PTR (head
));
956 /* Skip head when we find it. */
959 /* If use_p is already linked in after last_p, continue. */
960 if (last_p
->next
== use_p
)
964 /* Delink from current location, and link in at last_p. */
965 delink_imm_use (use_p
);
966 link_imm_use_to_list (use_p
, last_p
);
974 /* This routine will relink all uses with the same stmt as HEAD into the list
975 immediately following HEAD for iterator IMM. */
978 link_use_stmts_after (use_operand_p head
, imm_use_iterator
*imm
)
981 use_operand_p last_p
= head
;
982 gimple head_stmt
= USE_STMT (head
);
983 tree use
= USE_FROM_PTR (head
);
987 /* Only look at virtual or real uses, depending on the type of HEAD. */
988 flag
= (is_gimple_reg (use
) ? SSA_OP_USE
: SSA_OP_VIRTUAL_USES
);
990 if (gimple_code (head_stmt
) == GIMPLE_PHI
)
992 FOR_EACH_PHI_ARG (use_p
, head_stmt
, op_iter
, flag
)
993 if (USE_FROM_PTR (use_p
) == use
)
994 last_p
= move_use_after_head (use_p
, head
, last_p
);
998 if (flag
== SSA_OP_USE
)
1000 FOR_EACH_SSA_USE_OPERAND (use_p
, head_stmt
, op_iter
, flag
)
1001 if (USE_FROM_PTR (use_p
) == use
)
1002 last_p
= move_use_after_head (use_p
, head
, last_p
);
1004 else if ((use_p
= gimple_vuse_op (head_stmt
)) != NULL_USE_OPERAND_P
)
1006 if (USE_FROM_PTR (use_p
) == use
)
1007 last_p
= move_use_after_head (use_p
, head
, last_p
);
1010 /* Link iter node in after last_p. */
1011 if (imm
->iter_node
.prev
!= NULL
)
1012 delink_imm_use (&imm
->iter_node
);
1013 link_imm_use_to_list (&(imm
->iter_node
), last_p
);
1016 /* Initialize IMM to traverse over uses of VAR. Return the first statement. */
1017 static inline gimple
1018 first_imm_use_stmt (imm_use_iterator
*imm
, tree var
)
1020 imm
->end_p
= &(SSA_NAME_IMM_USE_NODE (var
));
1021 imm
->imm_use
= imm
->end_p
->next
;
1022 imm
->next_imm_name
= NULL_USE_OPERAND_P
;
1024 /* iter_node is used as a marker within the immediate use list to indicate
1025 where the end of the current stmt's uses are. Initialize it to NULL
1026 stmt and use, which indicates a marker node. */
1027 imm
->iter_node
.prev
= NULL_USE_OPERAND_P
;
1028 imm
->iter_node
.next
= NULL_USE_OPERAND_P
;
1029 imm
->iter_node
.loc
.stmt
= NULL
;
1030 imm
->iter_node
.use
= NULL
;
1032 if (end_imm_use_stmt_p (imm
))
1035 link_use_stmts_after (imm
->imm_use
, imm
);
1037 return USE_STMT (imm
->imm_use
);
1040 /* Bump IMM to the next stmt which has a use of var. */
1042 static inline gimple
1043 next_imm_use_stmt (imm_use_iterator
*imm
)
1045 imm
->imm_use
= imm
->iter_node
.next
;
1046 if (end_imm_use_stmt_p (imm
))
1048 if (imm
->iter_node
.prev
!= NULL
)
1049 delink_imm_use (&imm
->iter_node
);
1053 link_use_stmts_after (imm
->imm_use
, imm
);
1054 return USE_STMT (imm
->imm_use
);
1057 /* This routine will return the first use on the stmt IMM currently refers
1060 static inline use_operand_p
1061 first_imm_use_on_stmt (imm_use_iterator
*imm
)
1063 imm
->next_imm_name
= imm
->imm_use
->next
;
1064 return imm
->imm_use
;
1067 /* Return TRUE if the last use on the stmt IMM refers to has been visited. */
1070 end_imm_use_on_stmt_p (const imm_use_iterator
*imm
)
1072 return (imm
->imm_use
== &(imm
->iter_node
));
1075 /* Bump to the next use on the stmt IMM refers to, return NULL if done. */
1077 static inline use_operand_p
1078 next_imm_use_on_stmt (imm_use_iterator
*imm
)
1080 imm
->imm_use
= imm
->next_imm_name
;
1081 if (end_imm_use_on_stmt_p (imm
))
1082 return NULL_USE_OPERAND_P
;
1085 imm
->next_imm_name
= imm
->imm_use
->next
;
1086 return imm
->imm_use
;
1090 /* Return true if VAR cannot be modified by the program. */
1093 unmodifiable_var_p (const_tree var
)
1095 if (TREE_CODE (var
) == SSA_NAME
)
1096 var
= SSA_NAME_VAR (var
);
1098 return TREE_READONLY (var
) && (TREE_STATIC (var
) || DECL_EXTERNAL (var
));
1101 /* Return true if REF, a handled component reference, has an ARRAY_REF
1105 ref_contains_array_ref (const_tree ref
)
1107 gcc_checking_assert (handled_component_p (ref
));
1110 if (TREE_CODE (ref
) == ARRAY_REF
)
1112 ref
= TREE_OPERAND (ref
, 0);
1113 } while (handled_component_p (ref
));
1118 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1121 contains_view_convert_expr_p (const_tree ref
)
1123 while (handled_component_p (ref
))
1125 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
1127 ref
= TREE_OPERAND (ref
, 0);
1133 /* Return true, if the two ranges [POS1, SIZE1] and [POS2, SIZE2]
1134 overlap. SIZE1 and/or SIZE2 can be (unsigned)-1 in which case the
1135 range is open-ended. Otherwise return false. */
1138 ranges_overlap_p (unsigned HOST_WIDE_INT pos1
,
1139 unsigned HOST_WIDE_INT size1
,
1140 unsigned HOST_WIDE_INT pos2
,
1141 unsigned HOST_WIDE_INT size2
)
1144 && (size2
== (unsigned HOST_WIDE_INT
)-1
1145 || pos1
< (pos2
+ size2
)))
1148 && (size1
== (unsigned HOST_WIDE_INT
)-1
1149 || pos2
< (pos1
+ size1
)))
1155 /* Accessor to tree-ssa-operands.c caches. */
1156 static inline struct ssa_operands
*
1157 gimple_ssa_operands (const struct function
*fun
)
1159 return &fun
->gimple_df
->ssa_operands
;
1162 /* Given an edge_var_map V, return the PHI arg definition. */
1165 redirect_edge_var_map_def (edge_var_map
*v
)
1170 /* Given an edge_var_map V, return the PHI result. */
1173 redirect_edge_var_map_result (edge_var_map
*v
)
1178 /* Given an edge_var_map V, return the PHI arg location. */
1180 static inline source_location
1181 redirect_edge_var_map_location (edge_var_map
*v
)
1187 /* Return an SSA_NAME node for variable VAR defined in statement STMT
1188 in function cfun. */
1191 make_ssa_name (tree var
, gimple stmt
)
1193 return make_ssa_name_fn (cfun
, var
, stmt
);
1196 /* Return an SSA_NAME node using the template SSA name NAME defined in
1197 statement STMT in function cfun. */
1200 copy_ssa_name (tree var
, gimple stmt
)
1202 return copy_ssa_name_fn (cfun
, var
, stmt
);
1205 /* Creates a duplicate of a SSA name NAME tobe defined by statement STMT
1206 in function cfun. */
1209 duplicate_ssa_name (tree var
, gimple stmt
)
1211 return duplicate_ssa_name_fn (cfun
, var
, stmt
);
1214 /* Return an anonymous SSA_NAME node for type TYPE defined in statement STMT
1215 in function cfun. Arrange so that it uses NAME in dumps. */
1218 make_temp_ssa_name (tree type
, gimple stmt
, const char *name
)
1221 gcc_checking_assert (TYPE_P (type
));
1222 ssa_name
= make_ssa_name_fn (cfun
, type
, stmt
);
1223 SET_SSA_NAME_VAR_OR_IDENTIFIER (ssa_name
, get_identifier (name
));
1227 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
1228 denotes the starting address of the memory access EXP.
1229 Returns NULL_TREE if the offset is not constant or any component
1230 is not BITS_PER_UNIT-aligned.
1231 VALUEIZE if non-NULL is used to valueize SSA names. It should return
1232 its argument or a constant if the argument is known to be constant. */
1233 /* ??? This is a static inline here to avoid the overhead of the indirect calls
1234 to VALUEIZE. But is this overhead really that significant? And should we
1235 perhaps just rely on WHOPR to specialize the function? */
1238 get_addr_base_and_unit_offset_1 (tree exp
, HOST_WIDE_INT
*poffset
,
1239 tree (*valueize
) (tree
))
1241 HOST_WIDE_INT byte_offset
= 0;
1243 /* Compute cumulative byte-offset for nested component-refs and array-refs,
1244 and find the ultimate containing object. */
1247 switch (TREE_CODE (exp
))
1254 tree field
= TREE_OPERAND (exp
, 1);
1255 tree this_offset
= component_ref_field_offset (exp
);
1256 HOST_WIDE_INT hthis_offset
;
1259 || TREE_CODE (this_offset
) != INTEGER_CST
1260 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
))
1264 hthis_offset
= TREE_INT_CST_LOW (this_offset
);
1265 hthis_offset
+= (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
))
1267 byte_offset
+= hthis_offset
;
1272 case ARRAY_RANGE_REF
:
1274 tree index
= TREE_OPERAND (exp
, 1);
1275 tree low_bound
, unit_size
;
1278 && TREE_CODE (index
) == SSA_NAME
)
1279 index
= (*valueize
) (index
);
1281 /* If the resulting bit-offset is constant, track it. */
1282 if (TREE_CODE (index
) == INTEGER_CST
1283 && (low_bound
= array_ref_low_bound (exp
),
1284 TREE_CODE (low_bound
) == INTEGER_CST
)
1285 && (unit_size
= array_ref_element_size (exp
),
1286 TREE_CODE (unit_size
) == INTEGER_CST
))
1288 HOST_WIDE_INT hindex
= TREE_INT_CST_LOW (index
);
1290 hindex
-= TREE_INT_CST_LOW (low_bound
);
1291 hindex
*= TREE_INT_CST_LOW (unit_size
);
1292 byte_offset
+= hindex
;
1303 byte_offset
+= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp
)));
1306 case VIEW_CONVERT_EXPR
:
1311 tree base
= TREE_OPERAND (exp
, 0);
1313 && TREE_CODE (base
) == SSA_NAME
)
1314 base
= (*valueize
) (base
);
1316 /* Hand back the decl for MEM[&decl, off]. */
1317 if (TREE_CODE (base
) == ADDR_EXPR
)
1319 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
1321 double_int off
= mem_ref_offset (exp
);
1322 gcc_assert (off
.high
== -1 || off
.high
== 0);
1323 byte_offset
+= off
.to_shwi ();
1325 exp
= TREE_OPERAND (base
, 0);
1330 case TARGET_MEM_REF
:
1332 tree base
= TREE_OPERAND (exp
, 0);
1334 && TREE_CODE (base
) == SSA_NAME
)
1335 base
= (*valueize
) (base
);
1337 /* Hand back the decl for MEM[&decl, off]. */
1338 if (TREE_CODE (base
) == ADDR_EXPR
)
1340 if (TMR_INDEX (exp
) || TMR_INDEX2 (exp
))
1342 if (!integer_zerop (TMR_OFFSET (exp
)))
1344 double_int off
= mem_ref_offset (exp
);
1345 gcc_assert (off
.high
== -1 || off
.high
== 0);
1346 byte_offset
+= off
.to_shwi ();
1348 exp
= TREE_OPERAND (base
, 0);
1357 exp
= TREE_OPERAND (exp
, 0);
1361 *poffset
= byte_offset
;
1365 #endif /* _TREE_FLOW_INLINE_H */