1 /* Inline functions for tree-flow.h
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #ifndef _TREE_FLOW_INLINE_H
22 #define _TREE_FLOW_INLINE_H 1
24 /* Inline functions for manipulating various data structures defined in
25 tree-flow.h. See tree-flow.h for documentation. */
27 /* Return true when gimple SSA form was built.
28 gimple_in_ssa_p is queried by gimplifier in various early stages before SSA
29 infrastructure is initialized. Check for presence of the datastructures
32 gimple_in_ssa_p (const struct function
*fun
)
34 return fun
&& fun
->gimple_df
&& fun
->gimple_df
->in_ssa_p
;
37 /* Artificial variable used for the virtual operand FUD chain. */
39 gimple_vop (const struct function
*fun
)
41 gcc_checking_assert (fun
&& fun
->gimple_df
);
42 return fun
->gimple_df
->vop
;
45 /* Initialize the hashtable iterator HTI to point to hashtable TABLE */
48 first_htab_element (htab_iterator
*hti
, htab_t table
)
51 hti
->slot
= table
->entries
;
52 hti
->limit
= hti
->slot
+ htab_size (table
);
56 if (x
!= HTAB_EMPTY_ENTRY
&& x
!= HTAB_DELETED_ENTRY
)
58 } while (++(hti
->slot
) < hti
->limit
);
60 if (hti
->slot
< hti
->limit
)
65 /* Return current non-empty/deleted slot of the hashtable pointed to by HTI,
66 or NULL if we have reached the end. */
69 end_htab_p (const htab_iterator
*hti
)
71 if (hti
->slot
>= hti
->limit
)
76 /* Advance the hashtable iterator pointed to by HTI to the next element of the
80 next_htab_element (htab_iterator
*hti
)
82 while (++(hti
->slot
) < hti
->limit
)
85 if (x
!= HTAB_EMPTY_ENTRY
&& x
!= HTAB_DELETED_ENTRY
)
91 /* Get the number of the next statement uid to be allocated. */
92 static inline unsigned int
93 gimple_stmt_max_uid (struct function
*fn
)
95 return fn
->last_stmt_uid
;
98 /* Set the number of the next statement uid to be allocated. */
100 set_gimple_stmt_max_uid (struct function
*fn
, unsigned int maxid
)
102 fn
->last_stmt_uid
= maxid
;
105 /* Set the number of the next statement uid to be allocated. */
106 static inline unsigned int
107 inc_gimple_stmt_max_uid (struct function
*fn
)
109 return fn
->last_stmt_uid
++;
112 /* Return the line number for EXPR, or return -1 if we have no line
113 number information for it. */
115 get_lineno (const_gimple stmt
)
122 loc
= gimple_location (stmt
);
123 if (loc
== UNKNOWN_LOCATION
)
126 return LOCATION_LINE (loc
);
130 /* Return true if T (assumed to be a DECL) is a global variable.
131 A variable is considered global if its storage is not automatic. */
134 is_global_var (const_tree t
)
136 return (TREE_STATIC (t
) || DECL_EXTERNAL (t
));
140 /* Return true if VAR may be aliased. A variable is considered as
141 maybe aliased if it has its address taken by the local TU
142 or possibly by another TU and might be modified through a pointer. */
145 may_be_aliased (const_tree var
)
147 return (TREE_CODE (var
) != CONST_DECL
148 && !((TREE_STATIC (var
) || TREE_PUBLIC (var
) || DECL_EXTERNAL (var
))
149 && TREE_READONLY (var
)
150 && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (var
)))
151 && (TREE_PUBLIC (var
)
152 || DECL_EXTERNAL (var
)
153 || TREE_ADDRESSABLE (var
)));
157 /* Returns the loop of the statement STMT. */
159 static inline struct loop
*
160 loop_containing_stmt (gimple stmt
)
162 basic_block bb
= gimple_bb (stmt
);
166 return bb
->loop_father
;
171 /* Return true if VAR cannot be modified by the program. */
174 unmodifiable_var_p (const_tree var
)
176 if (TREE_CODE (var
) == SSA_NAME
)
177 var
= SSA_NAME_VAR (var
);
179 return TREE_READONLY (var
) && (TREE_STATIC (var
) || DECL_EXTERNAL (var
));
182 /* Return true if REF, a handled component reference, has an ARRAY_REF
186 ref_contains_array_ref (const_tree ref
)
188 gcc_checking_assert (handled_component_p (ref
));
191 if (TREE_CODE (ref
) == ARRAY_REF
)
193 ref
= TREE_OPERAND (ref
, 0);
194 } while (handled_component_p (ref
));
199 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
202 contains_view_convert_expr_p (const_tree ref
)
204 while (handled_component_p (ref
))
206 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
208 ref
= TREE_OPERAND (ref
, 0);
214 /* Return true, if the two ranges [POS1, SIZE1] and [POS2, SIZE2]
215 overlap. SIZE1 and/or SIZE2 can be (unsigned)-1 in which case the
216 range is open-ended. Otherwise return false. */
219 ranges_overlap_p (unsigned HOST_WIDE_INT pos1
,
220 unsigned HOST_WIDE_INT size1
,
221 unsigned HOST_WIDE_INT pos2
,
222 unsigned HOST_WIDE_INT size2
)
225 && (size2
== (unsigned HOST_WIDE_INT
)-1
226 || pos1
< (pos2
+ size2
)))
229 && (size1
== (unsigned HOST_WIDE_INT
)-1
230 || pos2
< (pos1
+ size1
)))
236 /* Accessor to tree-ssa-operands.c caches. */
237 static inline struct ssa_operands
*
238 gimple_ssa_operands (const struct function
*fun
)
240 return &fun
->gimple_df
->ssa_operands
;
244 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
245 denotes the starting address of the memory access EXP.
246 Returns NULL_TREE if the offset is not constant or any component
247 is not BITS_PER_UNIT-aligned.
248 VALUEIZE if non-NULL is used to valueize SSA names. It should return
249 its argument or a constant if the argument is known to be constant. */
250 /* ??? This is a static inline here to avoid the overhead of the indirect calls
251 to VALUEIZE. But is this overhead really that significant? And should we
252 perhaps just rely on WHOPR to specialize the function? */
255 get_addr_base_and_unit_offset_1 (tree exp
, HOST_WIDE_INT
*poffset
,
256 tree (*valueize
) (tree
))
258 HOST_WIDE_INT byte_offset
= 0;
260 /* Compute cumulative byte-offset for nested component-refs and array-refs,
261 and find the ultimate containing object. */
264 switch (TREE_CODE (exp
))
268 HOST_WIDE_INT this_off
= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 2));
269 if (this_off
% BITS_PER_UNIT
)
271 byte_offset
+= this_off
/ BITS_PER_UNIT
;
277 tree field
= TREE_OPERAND (exp
, 1);
278 tree this_offset
= component_ref_field_offset (exp
);
279 HOST_WIDE_INT hthis_offset
;
282 || TREE_CODE (this_offset
) != INTEGER_CST
283 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
))
287 hthis_offset
= TREE_INT_CST_LOW (this_offset
);
288 hthis_offset
+= (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
))
290 byte_offset
+= hthis_offset
;
295 case ARRAY_RANGE_REF
:
297 tree index
= TREE_OPERAND (exp
, 1);
298 tree low_bound
, unit_size
;
301 && TREE_CODE (index
) == SSA_NAME
)
302 index
= (*valueize
) (index
);
304 /* If the resulting bit-offset is constant, track it. */
305 if (TREE_CODE (index
) == INTEGER_CST
306 && (low_bound
= array_ref_low_bound (exp
),
307 TREE_CODE (low_bound
) == INTEGER_CST
)
308 && (unit_size
= array_ref_element_size (exp
),
309 TREE_CODE (unit_size
) == INTEGER_CST
))
311 HOST_WIDE_INT hindex
= TREE_INT_CST_LOW (index
);
313 hindex
-= TREE_INT_CST_LOW (low_bound
);
314 hindex
*= TREE_INT_CST_LOW (unit_size
);
315 byte_offset
+= hindex
;
326 byte_offset
+= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp
)));
329 case VIEW_CONVERT_EXPR
:
334 tree base
= TREE_OPERAND (exp
, 0);
336 && TREE_CODE (base
) == SSA_NAME
)
337 base
= (*valueize
) (base
);
339 /* Hand back the decl for MEM[&decl, off]. */
340 if (TREE_CODE (base
) == ADDR_EXPR
)
342 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
344 double_int off
= mem_ref_offset (exp
);
345 gcc_assert (off
.high
== -1 || off
.high
== 0);
346 byte_offset
+= off
.to_shwi ();
348 exp
= TREE_OPERAND (base
, 0);
355 tree base
= TREE_OPERAND (exp
, 0);
357 && TREE_CODE (base
) == SSA_NAME
)
358 base
= (*valueize
) (base
);
360 /* Hand back the decl for MEM[&decl, off]. */
361 if (TREE_CODE (base
) == ADDR_EXPR
)
363 if (TMR_INDEX (exp
) || TMR_INDEX2 (exp
))
365 if (!integer_zerop (TMR_OFFSET (exp
)))
367 double_int off
= mem_ref_offset (exp
);
368 gcc_assert (off
.high
== -1 || off
.high
== 0);
369 byte_offset
+= off
.to_shwi ();
371 exp
= TREE_OPERAND (base
, 0);
380 exp
= TREE_OPERAND (exp
, 0);
384 *poffset
= byte_offset
;
388 #endif /* _TREE_FLOW_INLINE_H */