1 /* Inline functions for tree-flow.h
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #ifndef _TREE_FLOW_INLINE_H
22 #define _TREE_FLOW_INLINE_H 1
24 /* Inline functions for manipulating various data structures defined in
25 tree-flow.h. See tree-flow.h for documentation. */
27 /* Return true when gimple SSA form was built.
28 gimple_in_ssa_p is queried by gimplifier in various early stages before SSA
29 infrastructure is initialized. Check for presence of the datastructures
32 gimple_in_ssa_p (const struct function
*fun
)
34 return fun
&& fun
->gimple_df
&& fun
->gimple_df
->in_ssa_p
;
37 /* Artificial variable used for the virtual operand FUD chain. */
39 gimple_vop (const struct function
*fun
)
41 gcc_checking_assert (fun
&& fun
->gimple_df
);
42 return fun
->gimple_df
->vop
;
45 /* Initialize the hashtable iterator HTI to point to hashtable TABLE */
48 first_htab_element (htab_iterator
*hti
, htab_t table
)
51 hti
->slot
= table
->entries
;
52 hti
->limit
= hti
->slot
+ htab_size (table
);
56 if (x
!= HTAB_EMPTY_ENTRY
&& x
!= HTAB_DELETED_ENTRY
)
58 } while (++(hti
->slot
) < hti
->limit
);
60 if (hti
->slot
< hti
->limit
)
65 /* Return current non-empty/deleted slot of the hashtable pointed to by HTI,
66 or NULL if we have reached the end. */
69 end_htab_p (const htab_iterator
*hti
)
71 if (hti
->slot
>= hti
->limit
)
76 /* Advance the hashtable iterator pointed to by HTI to the next element of the
80 next_htab_element (htab_iterator
*hti
)
82 while (++(hti
->slot
) < hti
->limit
)
85 if (x
!= HTAB_EMPTY_ENTRY
&& x
!= HTAB_DELETED_ENTRY
)
91 /* Get the number of the next statement uid to be allocated. */
92 static inline unsigned int
93 gimple_stmt_max_uid (struct function
*fn
)
95 return fn
->last_stmt_uid
;
98 /* Set the number of the next statement uid to be allocated. */
100 set_gimple_stmt_max_uid (struct function
*fn
, unsigned int maxid
)
102 fn
->last_stmt_uid
= maxid
;
105 /* Set the number of the next statement uid to be allocated. */
106 static inline unsigned int
107 inc_gimple_stmt_max_uid (struct function
*fn
)
109 return fn
->last_stmt_uid
++;
112 /* Return the line number for EXPR, or return -1 if we have no line
113 number information for it. */
115 get_lineno (const_gimple stmt
)
122 loc
= gimple_location (stmt
);
123 if (loc
== UNKNOWN_LOCATION
)
126 return LOCATION_LINE (loc
);
129 /* Delink an immediate_uses node from its chain. */
131 delink_imm_use (ssa_use_operand_t
*linknode
)
133 /* Return if this node is not in a list. */
134 if (linknode
->prev
== NULL
)
137 linknode
->prev
->next
= linknode
->next
;
138 linknode
->next
->prev
= linknode
->prev
;
139 linknode
->prev
= NULL
;
140 linknode
->next
= NULL
;
143 /* Link ssa_imm_use node LINKNODE into the chain for LIST. */
145 link_imm_use_to_list (ssa_use_operand_t
*linknode
, ssa_use_operand_t
*list
)
147 /* Link the new node at the head of the list. If we are in the process of
148 traversing the list, we won't visit any new nodes added to it. */
149 linknode
->prev
= list
;
150 linknode
->next
= list
->next
;
151 list
->next
->prev
= linknode
;
152 list
->next
= linknode
;
155 /* Link ssa_imm_use node LINKNODE into the chain for DEF. */
157 link_imm_use (ssa_use_operand_t
*linknode
, tree def
)
159 ssa_use_operand_t
*root
;
161 if (!def
|| TREE_CODE (def
) != SSA_NAME
)
162 linknode
->prev
= NULL
;
165 root
= &(SSA_NAME_IMM_USE_NODE (def
));
167 gcc_checking_assert (*(linknode
->use
) == def
);
168 link_imm_use_to_list (linknode
, root
);
172 /* Set the value of a use pointed to by USE to VAL. */
174 set_ssa_use_from_ptr (use_operand_p use
, tree val
)
176 delink_imm_use (use
);
178 link_imm_use (use
, val
);
181 /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
184 link_imm_use_stmt (ssa_use_operand_t
*linknode
, tree def
, gimple stmt
)
187 link_imm_use (linknode
, def
);
189 link_imm_use (linknode
, NULL
);
190 linknode
->loc
.stmt
= stmt
;
193 /* Relink a new node in place of an old node in the list. */
195 relink_imm_use (ssa_use_operand_t
*node
, ssa_use_operand_t
*old
)
197 /* The node one had better be in the same list. */
198 gcc_checking_assert (*(old
->use
) == *(node
->use
));
199 node
->prev
= old
->prev
;
200 node
->next
= old
->next
;
203 old
->prev
->next
= node
;
204 old
->next
->prev
= node
;
205 /* Remove the old node from the list. */
210 /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
213 relink_imm_use_stmt (ssa_use_operand_t
*linknode
, ssa_use_operand_t
*old
,
217 relink_imm_use (linknode
, old
);
219 link_imm_use (linknode
, NULL
);
220 linknode
->loc
.stmt
= stmt
;
224 /* Return true is IMM has reached the end of the immediate use list. */
226 end_readonly_imm_use_p (const imm_use_iterator
*imm
)
228 return (imm
->imm_use
== imm
->end_p
);
231 /* Initialize iterator IMM to process the list for VAR. */
232 static inline use_operand_p
233 first_readonly_imm_use (imm_use_iterator
*imm
, tree var
)
235 imm
->end_p
= &(SSA_NAME_IMM_USE_NODE (var
));
236 imm
->imm_use
= imm
->end_p
->next
;
237 #ifdef ENABLE_CHECKING
238 imm
->iter_node
.next
= imm
->imm_use
->next
;
240 if (end_readonly_imm_use_p (imm
))
241 return NULL_USE_OPERAND_P
;
245 /* Bump IMM to the next use in the list. */
246 static inline use_operand_p
247 next_readonly_imm_use (imm_use_iterator
*imm
)
249 use_operand_p old
= imm
->imm_use
;
251 #ifdef ENABLE_CHECKING
252 /* If this assertion fails, it indicates the 'next' pointer has changed
253 since the last bump. This indicates that the list is being modified
254 via stmt changes, or SET_USE, or somesuch thing, and you need to be
255 using the SAFE version of the iterator. */
256 gcc_assert (imm
->iter_node
.next
== old
->next
);
257 imm
->iter_node
.next
= old
->next
->next
;
260 imm
->imm_use
= old
->next
;
261 if (end_readonly_imm_use_p (imm
))
262 return NULL_USE_OPERAND_P
;
267 extern bool has_zero_uses_1 (const ssa_use_operand_t
*head
);
268 extern bool single_imm_use_1 (const ssa_use_operand_t
*head
,
269 use_operand_p
*use_p
, gimple
*stmt
);
271 /* Return true if VAR has no nondebug uses. */
273 has_zero_uses (const_tree var
)
275 const ssa_use_operand_t
*const ptr
= &(SSA_NAME_IMM_USE_NODE (var
));
277 /* A single use_operand means there is no items in the list. */
278 if (ptr
== ptr
->next
)
281 /* If there are debug stmts, we have to look at each use and see
282 whether there are any nondebug uses. */
283 if (!MAY_HAVE_DEBUG_STMTS
)
286 return has_zero_uses_1 (ptr
);
289 /* Return true if VAR has a single nondebug use. */
291 has_single_use (const_tree var
)
293 const ssa_use_operand_t
*const ptr
= &(SSA_NAME_IMM_USE_NODE (var
));
295 /* If there aren't any uses whatsoever, we're done. */
296 if (ptr
== ptr
->next
)
299 /* If there's a single use, check that it's not a debug stmt. */
300 if (ptr
== ptr
->next
->next
)
301 return !is_gimple_debug (USE_STMT (ptr
->next
));
303 /* If there are debug stmts, we have to look at each of them. */
304 if (!MAY_HAVE_DEBUG_STMTS
)
307 return single_imm_use_1 (ptr
, NULL
, NULL
);
311 /* If VAR has only a single immediate nondebug use, return true, and
312 set USE_P and STMT to the use pointer and stmt of occurrence. */
314 single_imm_use (const_tree var
, use_operand_p
*use_p
, gimple
*stmt
)
316 const ssa_use_operand_t
*const ptr
= &(SSA_NAME_IMM_USE_NODE (var
));
318 /* If there aren't any uses whatsoever, we're done. */
319 if (ptr
== ptr
->next
)
322 *use_p
= NULL_USE_OPERAND_P
;
327 /* If there's a single use, check that it's not a debug stmt. */
328 if (ptr
== ptr
->next
->next
)
330 if (!is_gimple_debug (USE_STMT (ptr
->next
)))
333 *stmt
= ptr
->next
->loc
.stmt
;
340 /* If there are debug stmts, we have to look at each of them. */
341 if (!MAY_HAVE_DEBUG_STMTS
)
344 return single_imm_use_1 (ptr
, use_p
, stmt
);
347 /* Return the number of nondebug immediate uses of VAR. */
348 static inline unsigned int
349 num_imm_uses (const_tree var
)
351 const ssa_use_operand_t
*const start
= &(SSA_NAME_IMM_USE_NODE (var
));
352 const ssa_use_operand_t
*ptr
;
353 unsigned int num
= 0;
355 if (!MAY_HAVE_DEBUG_STMTS
)
356 for (ptr
= start
->next
; ptr
!= start
; ptr
= ptr
->next
)
359 for (ptr
= start
->next
; ptr
!= start
; ptr
= ptr
->next
)
360 if (!is_gimple_debug (USE_STMT (ptr
)))
366 /* Return the tree pointed-to by USE. */
368 get_use_from_ptr (use_operand_p use
)
373 /* Return the tree pointed-to by DEF. */
375 get_def_from_ptr (def_operand_p def
)
380 /* Return a use_operand_p pointer for argument I of PHI node GS. */
382 static inline use_operand_p
383 gimple_phi_arg_imm_use_ptr (gimple gs
, int i
)
385 return &gimple_phi_arg (gs
, i
)->imm_use
;
388 /* Return the tree operand for argument I of PHI node GS. */
391 gimple_phi_arg_def (gimple gs
, size_t index
)
393 struct phi_arg_d
*pd
= gimple_phi_arg (gs
, index
);
394 return get_use_from_ptr (&pd
->imm_use
);
397 /* Return a pointer to the tree operand for argument I of PHI node GS. */
400 gimple_phi_arg_def_ptr (gimple gs
, size_t index
)
402 return &gimple_phi_arg (gs
, index
)->def
;
405 /* Return the edge associated with argument I of phi node GS. */
408 gimple_phi_arg_edge (gimple gs
, size_t i
)
410 return EDGE_PRED (gimple_bb (gs
), i
);
413 /* Return the source location of gimple argument I of phi node GS. */
415 static inline source_location
416 gimple_phi_arg_location (gimple gs
, size_t i
)
418 return gimple_phi_arg (gs
, i
)->locus
;
421 /* Return the source location of the argument on edge E of phi node GS. */
423 static inline source_location
424 gimple_phi_arg_location_from_edge (gimple gs
, edge e
)
426 return gimple_phi_arg (gs
, e
->dest_idx
)->locus
;
429 /* Set the source location of gimple argument I of phi node GS to LOC. */
432 gimple_phi_arg_set_location (gimple gs
, size_t i
, source_location loc
)
434 gimple_phi_arg (gs
, i
)->locus
= loc
;
437 /* Return TRUE if argument I of phi node GS has a location record. */
440 gimple_phi_arg_has_location (gimple gs
, size_t i
)
442 return gimple_phi_arg_location (gs
, i
) != UNKNOWN_LOCATION
;
446 /* Return the PHI nodes for basic block BB, or NULL if there are no
448 static inline gimple_seq
449 phi_nodes (const_basic_block bb
)
451 gcc_checking_assert (!(bb
->flags
& BB_RTL
));
452 return bb
->il
.gimple
.phi_nodes
;
455 static inline gimple_seq
*
456 phi_nodes_ptr (basic_block bb
)
458 gcc_checking_assert (!(bb
->flags
& BB_RTL
));
459 return &bb
->il
.gimple
.phi_nodes
;
462 /* Set PHI nodes of a basic block BB to SEQ. */
465 set_phi_nodes (basic_block bb
, gimple_seq seq
)
467 gimple_stmt_iterator i
;
469 gcc_checking_assert (!(bb
->flags
& BB_RTL
));
470 bb
->il
.gimple
.phi_nodes
= seq
;
472 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
473 gimple_set_bb (gsi_stmt (i
), bb
);
476 /* Return the phi argument which contains the specified use. */
479 phi_arg_index_from_use (use_operand_p use
)
481 struct phi_arg_d
*element
, *root
;
485 /* Since the use is the first thing in a PHI argument element, we can
486 calculate its index based on casting it to an argument, and performing
487 pointer arithmetic. */
489 phi
= USE_STMT (use
);
491 element
= (struct phi_arg_d
*)use
;
492 root
= gimple_phi_arg (phi
, 0);
493 index
= element
- root
;
495 /* Make sure the calculation doesn't have any leftover bytes. If it does,
496 then imm_use is likely not the first element in phi_arg_d. */
497 gcc_checking_assert ((((char *)element
- (char *)root
)
498 % sizeof (struct phi_arg_d
)) == 0
499 && index
< gimple_phi_capacity (phi
));
504 /* Return true if T (assumed to be a DECL) is a global variable.
505 A variable is considered global if its storage is not automatic. */
508 is_global_var (const_tree t
)
510 return (TREE_STATIC (t
) || DECL_EXTERNAL (t
));
514 /* Return true if VAR may be aliased. A variable is considered as
515 maybe aliased if it has its address taken by the local TU
516 or possibly by another TU and might be modified through a pointer. */
519 may_be_aliased (const_tree var
)
521 return (TREE_CODE (var
) != CONST_DECL
522 && !((TREE_STATIC (var
) || TREE_PUBLIC (var
) || DECL_EXTERNAL (var
))
523 && TREE_READONLY (var
)
524 && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (var
)))
525 && (TREE_PUBLIC (var
)
526 || DECL_EXTERNAL (var
)
527 || TREE_ADDRESSABLE (var
)));
531 /* PHI nodes should contain only ssa_names and invariants. A test
532 for ssa_name is definitely simpler; don't let invalid contents
533 slip in in the meantime. */
536 phi_ssa_name_p (const_tree t
)
538 if (TREE_CODE (t
) == SSA_NAME
)
540 gcc_checking_assert (is_gimple_min_invariant (t
));
545 /* Returns the loop of the statement STMT. */
547 static inline struct loop
*
548 loop_containing_stmt (gimple stmt
)
550 basic_block bb
= gimple_bb (stmt
);
554 return bb
->loop_father
;
558 /* ----------------------------------------------------------------------- */
560 /* The following set of routines are used to iterator over various type of
563 /* Return true if PTR is finished iterating. */
565 op_iter_done (const ssa_op_iter
*ptr
)
570 /* Get the next iterator use value for PTR. */
571 static inline use_operand_p
572 op_iter_next_use (ssa_op_iter
*ptr
)
575 gcc_checking_assert (ptr
->iter_type
== ssa_op_iter_use
);
578 use_p
= USE_OP_PTR (ptr
->uses
);
579 ptr
->uses
= ptr
->uses
->next
;
582 if (ptr
->i
< ptr
->numops
)
584 return PHI_ARG_DEF_PTR (ptr
->stmt
, (ptr
->i
)++);
587 return NULL_USE_OPERAND_P
;
590 /* Get the next iterator def value for PTR. */
591 static inline def_operand_p
592 op_iter_next_def (ssa_op_iter
*ptr
)
594 gcc_checking_assert (ptr
->iter_type
== ssa_op_iter_def
);
595 if (ptr
->flags
& SSA_OP_VDEF
)
598 ptr
->flags
&= ~SSA_OP_VDEF
;
599 p
= gimple_vdef_ptr (ptr
->stmt
);
603 if (ptr
->flags
& SSA_OP_DEF
)
605 while (ptr
->i
< ptr
->numops
)
607 tree
*val
= gimple_op_ptr (ptr
->stmt
, ptr
->i
);
611 if (TREE_CODE (*val
) == TREE_LIST
)
612 val
= &TREE_VALUE (*val
);
613 if (TREE_CODE (*val
) == SSA_NAME
614 || is_gimple_reg (*val
))
618 ptr
->flags
&= ~SSA_OP_DEF
;
622 return NULL_DEF_OPERAND_P
;
625 /* Get the next iterator tree value for PTR. */
627 op_iter_next_tree (ssa_op_iter
*ptr
)
630 gcc_checking_assert (ptr
->iter_type
== ssa_op_iter_tree
);
633 val
= USE_OP (ptr
->uses
);
634 ptr
->uses
= ptr
->uses
->next
;
637 if (ptr
->flags
& SSA_OP_VDEF
)
639 ptr
->flags
&= ~SSA_OP_VDEF
;
640 if ((val
= gimple_vdef (ptr
->stmt
)))
643 if (ptr
->flags
& SSA_OP_DEF
)
645 while (ptr
->i
< ptr
->numops
)
647 val
= gimple_op (ptr
->stmt
, ptr
->i
);
651 if (TREE_CODE (val
) == TREE_LIST
)
652 val
= TREE_VALUE (val
);
653 if (TREE_CODE (val
) == SSA_NAME
654 || is_gimple_reg (val
))
658 ptr
->flags
&= ~SSA_OP_DEF
;
666 /* This functions clears the iterator PTR, and marks it done. This is normally
667 used to prevent warnings in the compile about might be uninitialized
671 clear_and_done_ssa_iter (ssa_op_iter
*ptr
)
676 ptr
->iter_type
= ssa_op_iter_none
;
682 /* Initialize the iterator PTR to the virtual defs in STMT. */
684 op_iter_init (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
686 /* PHI nodes require a different iterator initialization path. We
687 do not support iterating over virtual defs or uses without
688 iterating over defs or uses at the same time. */
689 gcc_checking_assert (gimple_code (stmt
) != GIMPLE_PHI
690 && (!(flags
& SSA_OP_VDEF
) || (flags
& SSA_OP_DEF
))
691 && (!(flags
& SSA_OP_VUSE
) || (flags
& SSA_OP_USE
)));
693 if (flags
& (SSA_OP_DEF
| SSA_OP_VDEF
))
695 switch (gimple_code (stmt
))
702 ptr
->numops
= gimple_asm_noutputs (stmt
);
706 flags
&= ~(SSA_OP_DEF
| SSA_OP_VDEF
);
710 ptr
->uses
= (flags
& (SSA_OP_USE
|SSA_OP_VUSE
)) ? gimple_use_ops (stmt
) : NULL
;
711 if (!(flags
& SSA_OP_VUSE
)
713 && gimple_vuse (stmt
) != NULL_TREE
)
714 ptr
->uses
= ptr
->uses
->next
;
722 /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
724 static inline use_operand_p
725 op_iter_init_use (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
727 gcc_checking_assert ((flags
& SSA_OP_ALL_DEFS
) == 0
728 && (flags
& SSA_OP_USE
));
729 op_iter_init (ptr
, stmt
, flags
);
730 ptr
->iter_type
= ssa_op_iter_use
;
731 return op_iter_next_use (ptr
);
734 /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
736 static inline def_operand_p
737 op_iter_init_def (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
739 gcc_checking_assert ((flags
& SSA_OP_ALL_USES
) == 0
740 && (flags
& SSA_OP_DEF
));
741 op_iter_init (ptr
, stmt
, flags
);
742 ptr
->iter_type
= ssa_op_iter_def
;
743 return op_iter_next_def (ptr
);
746 /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
747 the first operand as a tree. */
749 op_iter_init_tree (ssa_op_iter
*ptr
, gimple stmt
, int flags
)
751 op_iter_init (ptr
, stmt
, flags
);
752 ptr
->iter_type
= ssa_op_iter_tree
;
753 return op_iter_next_tree (ptr
);
757 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
760 single_ssa_tree_operand (gimple stmt
, int flags
)
765 var
= op_iter_init_tree (&iter
, stmt
, flags
);
766 if (op_iter_done (&iter
))
768 op_iter_next_tree (&iter
);
769 if (op_iter_done (&iter
))
775 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
777 static inline use_operand_p
778 single_ssa_use_operand (gimple stmt
, int flags
)
783 var
= op_iter_init_use (&iter
, stmt
, flags
);
784 if (op_iter_done (&iter
))
785 return NULL_USE_OPERAND_P
;
786 op_iter_next_use (&iter
);
787 if (op_iter_done (&iter
))
789 return NULL_USE_OPERAND_P
;
794 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
796 static inline def_operand_p
797 single_ssa_def_operand (gimple stmt
, int flags
)
802 var
= op_iter_init_def (&iter
, stmt
, flags
);
803 if (op_iter_done (&iter
))
804 return NULL_DEF_OPERAND_P
;
805 op_iter_next_def (&iter
);
806 if (op_iter_done (&iter
))
808 return NULL_DEF_OPERAND_P
;
812 /* Return true if there are zero operands in STMT matching the type
815 zero_ssa_operands (gimple stmt
, int flags
)
819 op_iter_init_tree (&iter
, stmt
, flags
);
820 return op_iter_done (&iter
);
824 /* Return the number of operands matching FLAGS in STMT. */
826 num_ssa_operands (gimple stmt
, int flags
)
832 gcc_checking_assert (gimple_code (stmt
) != GIMPLE_PHI
);
833 FOR_EACH_SSA_TREE_OPERAND (t
, stmt
, iter
, flags
)
838 static inline use_operand_p
839 op_iter_init_phiuse (ssa_op_iter
*ptr
, gimple phi
, int flags
);
841 /* Delink all immediate_use information for STMT. */
843 delink_stmt_imm_use (gimple stmt
)
848 if (ssa_operands_active (cfun
))
849 FOR_EACH_PHI_OR_STMT_USE (use_p
, stmt
, iter
, SSA_OP_ALL_USES
)
850 delink_imm_use (use_p
);
854 /* If there is a single DEF in the PHI node which matches FLAG, return it.
855 Otherwise return NULL_DEF_OPERAND_P. */
857 single_phi_def (gimple stmt
, int flags
)
859 tree def
= PHI_RESULT (stmt
);
860 if ((flags
& SSA_OP_DEF
) && is_gimple_reg (def
))
862 if ((flags
& SSA_OP_VIRTUAL_DEFS
) && !is_gimple_reg (def
))
867 /* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
868 be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
869 static inline use_operand_p
870 op_iter_init_phiuse (ssa_op_iter
*ptr
, gimple phi
, int flags
)
872 tree phi_def
= gimple_phi_result (phi
);
875 clear_and_done_ssa_iter (ptr
);
878 gcc_checking_assert ((flags
& (SSA_OP_USE
| SSA_OP_VIRTUAL_USES
)) != 0);
880 comp
= (is_gimple_reg (phi_def
) ? SSA_OP_USE
: SSA_OP_VIRTUAL_USES
);
882 /* If the PHI node doesn't the operand type we care about, we're done. */
883 if ((flags
& comp
) == 0)
886 return NULL_USE_OPERAND_P
;
890 ptr
->numops
= gimple_phi_num_args (phi
);
891 ptr
->iter_type
= ssa_op_iter_use
;
893 return op_iter_next_use (ptr
);
897 /* Start an iterator for a PHI definition. */
899 static inline def_operand_p
900 op_iter_init_phidef (ssa_op_iter
*ptr
, gimple phi
, int flags
)
902 tree phi_def
= PHI_RESULT (phi
);
905 clear_and_done_ssa_iter (ptr
);
908 gcc_checking_assert ((flags
& (SSA_OP_DEF
| SSA_OP_VIRTUAL_DEFS
)) != 0);
910 comp
= (is_gimple_reg (phi_def
) ? SSA_OP_DEF
: SSA_OP_VIRTUAL_DEFS
);
912 /* If the PHI node doesn't have the operand type we care about,
914 if ((flags
& comp
) == 0)
917 return NULL_DEF_OPERAND_P
;
920 ptr
->iter_type
= ssa_op_iter_def
;
921 /* The first call to op_iter_next_def will terminate the iterator since
922 all the fields are NULL. Simply return the result here as the first and
923 therefore only result. */
924 return PHI_RESULT_PTR (phi
);
927 /* Return true is IMM has reached the end of the immediate use stmt list. */
930 end_imm_use_stmt_p (const imm_use_iterator
*imm
)
932 return (imm
->imm_use
== imm
->end_p
);
935 /* Finished the traverse of an immediate use stmt list IMM by removing the
936 placeholder node from the list. */
939 end_imm_use_stmt_traverse (imm_use_iterator
*imm
)
941 delink_imm_use (&(imm
->iter_node
));
944 /* Immediate use traversal of uses within a stmt require that all the
945 uses on a stmt be sequentially listed. This routine is used to build up
946 this sequential list by adding USE_P to the end of the current list
947 currently delimited by HEAD and LAST_P. The new LAST_P value is
950 static inline use_operand_p
951 move_use_after_head (use_operand_p use_p
, use_operand_p head
,
952 use_operand_p last_p
)
954 gcc_checking_assert (USE_FROM_PTR (use_p
) == USE_FROM_PTR (head
));
955 /* Skip head when we find it. */
958 /* If use_p is already linked in after last_p, continue. */
959 if (last_p
->next
== use_p
)
963 /* Delink from current location, and link in at last_p. */
964 delink_imm_use (use_p
);
965 link_imm_use_to_list (use_p
, last_p
);
973 /* This routine will relink all uses with the same stmt as HEAD into the list
974 immediately following HEAD for iterator IMM. */
977 link_use_stmts_after (use_operand_p head
, imm_use_iterator
*imm
)
980 use_operand_p last_p
= head
;
981 gimple head_stmt
= USE_STMT (head
);
982 tree use
= USE_FROM_PTR (head
);
986 /* Only look at virtual or real uses, depending on the type of HEAD. */
987 flag
= (is_gimple_reg (use
) ? SSA_OP_USE
: SSA_OP_VIRTUAL_USES
);
989 if (gimple_code (head_stmt
) == GIMPLE_PHI
)
991 FOR_EACH_PHI_ARG (use_p
, head_stmt
, op_iter
, flag
)
992 if (USE_FROM_PTR (use_p
) == use
)
993 last_p
= move_use_after_head (use_p
, head
, last_p
);
997 if (flag
== SSA_OP_USE
)
999 FOR_EACH_SSA_USE_OPERAND (use_p
, head_stmt
, op_iter
, flag
)
1000 if (USE_FROM_PTR (use_p
) == use
)
1001 last_p
= move_use_after_head (use_p
, head
, last_p
);
1003 else if ((use_p
= gimple_vuse_op (head_stmt
)) != NULL_USE_OPERAND_P
)
1005 if (USE_FROM_PTR (use_p
) == use
)
1006 last_p
= move_use_after_head (use_p
, head
, last_p
);
1009 /* Link iter node in after last_p. */
1010 if (imm
->iter_node
.prev
!= NULL
)
1011 delink_imm_use (&imm
->iter_node
);
1012 link_imm_use_to_list (&(imm
->iter_node
), last_p
);
1015 /* Initialize IMM to traverse over uses of VAR. Return the first statement. */
1016 static inline gimple
1017 first_imm_use_stmt (imm_use_iterator
*imm
, tree var
)
1019 imm
->end_p
= &(SSA_NAME_IMM_USE_NODE (var
));
1020 imm
->imm_use
= imm
->end_p
->next
;
1021 imm
->next_imm_name
= NULL_USE_OPERAND_P
;
1023 /* iter_node is used as a marker within the immediate use list to indicate
1024 where the end of the current stmt's uses are. Initialize it to NULL
1025 stmt and use, which indicates a marker node. */
1026 imm
->iter_node
.prev
= NULL_USE_OPERAND_P
;
1027 imm
->iter_node
.next
= NULL_USE_OPERAND_P
;
1028 imm
->iter_node
.loc
.stmt
= NULL
;
1029 imm
->iter_node
.use
= NULL
;
1031 if (end_imm_use_stmt_p (imm
))
1034 link_use_stmts_after (imm
->imm_use
, imm
);
1036 return USE_STMT (imm
->imm_use
);
1039 /* Bump IMM to the next stmt which has a use of var. */
1041 static inline gimple
1042 next_imm_use_stmt (imm_use_iterator
*imm
)
1044 imm
->imm_use
= imm
->iter_node
.next
;
1045 if (end_imm_use_stmt_p (imm
))
1047 if (imm
->iter_node
.prev
!= NULL
)
1048 delink_imm_use (&imm
->iter_node
);
1052 link_use_stmts_after (imm
->imm_use
, imm
);
1053 return USE_STMT (imm
->imm_use
);
1056 /* This routine will return the first use on the stmt IMM currently refers
1059 static inline use_operand_p
1060 first_imm_use_on_stmt (imm_use_iterator
*imm
)
1062 imm
->next_imm_name
= imm
->imm_use
->next
;
1063 return imm
->imm_use
;
1066 /* Return TRUE if the last use on the stmt IMM refers to has been visited. */
1069 end_imm_use_on_stmt_p (const imm_use_iterator
*imm
)
1071 return (imm
->imm_use
== &(imm
->iter_node
));
1074 /* Bump to the next use on the stmt IMM refers to, return NULL if done. */
1076 static inline use_operand_p
1077 next_imm_use_on_stmt (imm_use_iterator
*imm
)
1079 imm
->imm_use
= imm
->next_imm_name
;
1080 if (end_imm_use_on_stmt_p (imm
))
1081 return NULL_USE_OPERAND_P
;
1084 imm
->next_imm_name
= imm
->imm_use
->next
;
1085 return imm
->imm_use
;
1089 /* Return true if VAR cannot be modified by the program. */
1092 unmodifiable_var_p (const_tree var
)
1094 if (TREE_CODE (var
) == SSA_NAME
)
1095 var
= SSA_NAME_VAR (var
);
1097 return TREE_READONLY (var
) && (TREE_STATIC (var
) || DECL_EXTERNAL (var
));
1100 /* Return true if REF, a handled component reference, has an ARRAY_REF
1104 ref_contains_array_ref (const_tree ref
)
1106 gcc_checking_assert (handled_component_p (ref
));
1109 if (TREE_CODE (ref
) == ARRAY_REF
)
1111 ref
= TREE_OPERAND (ref
, 0);
1112 } while (handled_component_p (ref
));
1117 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1120 contains_view_convert_expr_p (const_tree ref
)
1122 while (handled_component_p (ref
))
1124 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
1126 ref
= TREE_OPERAND (ref
, 0);
1132 /* Return true, if the two ranges [POS1, SIZE1] and [POS2, SIZE2]
1133 overlap. SIZE1 and/or SIZE2 can be (unsigned)-1 in which case the
1134 range is open-ended. Otherwise return false. */
1137 ranges_overlap_p (unsigned HOST_WIDE_INT pos1
,
1138 unsigned HOST_WIDE_INT size1
,
1139 unsigned HOST_WIDE_INT pos2
,
1140 unsigned HOST_WIDE_INT size2
)
1143 && (size2
== (unsigned HOST_WIDE_INT
)-1
1144 || pos1
< (pos2
+ size2
)))
1147 && (size1
== (unsigned HOST_WIDE_INT
)-1
1148 || pos2
< (pos1
+ size1
)))
1154 /* Accessor to tree-ssa-operands.c caches. */
1155 static inline struct ssa_operands
*
1156 gimple_ssa_operands (const struct function
*fun
)
1158 return &fun
->gimple_df
->ssa_operands
;
1161 /* Given an edge_var_map V, return the PHI arg definition. */
1164 redirect_edge_var_map_def (edge_var_map
*v
)
1169 /* Given an edge_var_map V, return the PHI result. */
1172 redirect_edge_var_map_result (edge_var_map
*v
)
1177 /* Given an edge_var_map V, return the PHI arg location. */
1179 static inline source_location
1180 redirect_edge_var_map_location (edge_var_map
*v
)
1186 /* Return an SSA_NAME node for variable VAR defined in statement STMT
1187 in function cfun. */
1190 make_ssa_name (tree var
, gimple stmt
)
1192 return make_ssa_name_fn (cfun
, var
, stmt
);
1195 /* Return an SSA_NAME node using the template SSA name NAME defined in
1196 statement STMT in function cfun. */
1199 copy_ssa_name (tree var
, gimple stmt
)
1201 return copy_ssa_name_fn (cfun
, var
, stmt
);
1204 /* Creates a duplicate of a SSA name NAME tobe defined by statement STMT
1205 in function cfun. */
1208 duplicate_ssa_name (tree var
, gimple stmt
)
1210 return duplicate_ssa_name_fn (cfun
, var
, stmt
);
1213 /* Return an anonymous SSA_NAME node for type TYPE defined in statement STMT
1214 in function cfun. Arrange so that it uses NAME in dumps. */
1217 make_temp_ssa_name (tree type
, gimple stmt
, const char *name
)
1220 gcc_checking_assert (TYPE_P (type
));
1221 ssa_name
= make_ssa_name_fn (cfun
, type
, stmt
);
1222 SET_SSA_NAME_VAR_OR_IDENTIFIER (ssa_name
, get_identifier (name
));
1226 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
1227 denotes the starting address of the memory access EXP.
1228 Returns NULL_TREE if the offset is not constant or any component
1229 is not BITS_PER_UNIT-aligned.
1230 VALUEIZE if non-NULL is used to valueize SSA names. It should return
1231 its argument or a constant if the argument is known to be constant. */
1232 /* ??? This is a static inline here to avoid the overhead of the indirect calls
1233 to VALUEIZE. But is this overhead really that significant? And should we
1234 perhaps just rely on WHOPR to specialize the function? */
1237 get_addr_base_and_unit_offset_1 (tree exp
, HOST_WIDE_INT
*poffset
,
1238 tree (*valueize
) (tree
))
1240 HOST_WIDE_INT byte_offset
= 0;
1242 /* Compute cumulative byte-offset for nested component-refs and array-refs,
1243 and find the ultimate containing object. */
1246 switch (TREE_CODE (exp
))
1250 HOST_WIDE_INT this_off
= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 2));
1251 if (this_off
% BITS_PER_UNIT
)
1253 byte_offset
+= this_off
/ BITS_PER_UNIT
;
1259 tree field
= TREE_OPERAND (exp
, 1);
1260 tree this_offset
= component_ref_field_offset (exp
);
1261 HOST_WIDE_INT hthis_offset
;
1264 || TREE_CODE (this_offset
) != INTEGER_CST
1265 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
))
1269 hthis_offset
= TREE_INT_CST_LOW (this_offset
);
1270 hthis_offset
+= (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
))
1272 byte_offset
+= hthis_offset
;
1277 case ARRAY_RANGE_REF
:
1279 tree index
= TREE_OPERAND (exp
, 1);
1280 tree low_bound
, unit_size
;
1283 && TREE_CODE (index
) == SSA_NAME
)
1284 index
= (*valueize
) (index
);
1286 /* If the resulting bit-offset is constant, track it. */
1287 if (TREE_CODE (index
) == INTEGER_CST
1288 && (low_bound
= array_ref_low_bound (exp
),
1289 TREE_CODE (low_bound
) == INTEGER_CST
)
1290 && (unit_size
= array_ref_element_size (exp
),
1291 TREE_CODE (unit_size
) == INTEGER_CST
))
1293 HOST_WIDE_INT hindex
= TREE_INT_CST_LOW (index
);
1295 hindex
-= TREE_INT_CST_LOW (low_bound
);
1296 hindex
*= TREE_INT_CST_LOW (unit_size
);
1297 byte_offset
+= hindex
;
1308 byte_offset
+= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp
)));
1311 case VIEW_CONVERT_EXPR
:
1316 tree base
= TREE_OPERAND (exp
, 0);
1318 && TREE_CODE (base
) == SSA_NAME
)
1319 base
= (*valueize
) (base
);
1321 /* Hand back the decl for MEM[&decl, off]. */
1322 if (TREE_CODE (base
) == ADDR_EXPR
)
1324 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
1326 double_int off
= mem_ref_offset (exp
);
1327 gcc_assert (off
.high
== -1 || off
.high
== 0);
1328 byte_offset
+= off
.to_shwi ();
1330 exp
= TREE_OPERAND (base
, 0);
1335 case TARGET_MEM_REF
:
1337 tree base
= TREE_OPERAND (exp
, 0);
1339 && TREE_CODE (base
) == SSA_NAME
)
1340 base
= (*valueize
) (base
);
1342 /* Hand back the decl for MEM[&decl, off]. */
1343 if (TREE_CODE (base
) == ADDR_EXPR
)
1345 if (TMR_INDEX (exp
) || TMR_INDEX2 (exp
))
1347 if (!integer_zerop (TMR_OFFSET (exp
)))
1349 double_int off
= mem_ref_offset (exp
);
1350 gcc_assert (off
.high
== -1 || off
.high
== 0);
1351 byte_offset
+= off
.to_shwi ();
1353 exp
= TREE_OPERAND (base
, 0);
1362 exp
= TREE_OPERAND (exp
, 0);
1366 *poffset
= byte_offset
;
1370 #endif /* _TREE_FLOW_INLINE_H */