Import from savannah.gnu.org:
[official-gcc.git] / gcc / tree-flow-inline.h
blobb3a6d4b45c1f15fcd0bd7bcc8075595dfe302c72
1 /* Inline functions for tree-flow.h
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #ifndef _TREE_FLOW_INLINE_H
22 #define _TREE_FLOW_INLINE_H 1
24 /* Inline functions for manipulating various data structures defined in
25 tree-flow.h. See tree-flow.h for documentation. */
27 /* Return true when gimple SSA form was built.
28 gimple_in_ssa_p is queried by gimplifier in various early stages before SSA
29 infrastructure is initialized. Check for presence of the datastructures
30 at first place. */
31 static inline bool
32 gimple_in_ssa_p (const struct function *fun)
34 return fun && fun->gimple_df && fun->gimple_df->in_ssa_p;
37 /* Artificial variable used for the virtual operand FUD chain. */
38 static inline tree
39 gimple_vop (const struct function *fun)
41 gcc_checking_assert (fun && fun->gimple_df);
42 return fun->gimple_df->vop;
45 /* Initialize the hashtable iterator HTI to point to hashtable TABLE */
47 static inline void *
48 first_htab_element (htab_iterator *hti, htab_t table)
50 hti->htab = table;
51 hti->slot = table->entries;
52 hti->limit = hti->slot + htab_size (table);
55 PTR x = *(hti->slot);
56 if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
57 break;
58 } while (++(hti->slot) < hti->limit);
60 if (hti->slot < hti->limit)
61 return *(hti->slot);
62 return NULL;
65 /* Return current non-empty/deleted slot of the hashtable pointed to by HTI,
66 or NULL if we have reached the end. */
68 static inline bool
69 end_htab_p (const htab_iterator *hti)
71 if (hti->slot >= hti->limit)
72 return true;
73 return false;
76 /* Advance the hashtable iterator pointed to by HTI to the next element of the
77 hashtable. */
79 static inline void *
80 next_htab_element (htab_iterator *hti)
82 while (++(hti->slot) < hti->limit)
84 PTR x = *(hti->slot);
85 if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
86 return x;
88 return NULL;
91 /* Get the number of the next statement uid to be allocated. */
92 static inline unsigned int
93 gimple_stmt_max_uid (struct function *fn)
95 return fn->last_stmt_uid;
98 /* Set the number of the next statement uid to be allocated. */
99 static inline void
100 set_gimple_stmt_max_uid (struct function *fn, unsigned int maxid)
102 fn->last_stmt_uid = maxid;
105 /* Set the number of the next statement uid to be allocated. */
106 static inline unsigned int
107 inc_gimple_stmt_max_uid (struct function *fn)
109 return fn->last_stmt_uid++;
112 /* Return the line number for EXPR, or return -1 if we have no line
113 number information for it. */
114 static inline int
115 get_lineno (const_gimple stmt)
117 location_t loc;
119 if (!stmt)
120 return -1;
122 loc = gimple_location (stmt);
123 if (loc == UNKNOWN_LOCATION)
124 return -1;
126 return LOCATION_LINE (loc);
130 /* Return true if T (assumed to be a DECL) is a global variable.
131 A variable is considered global if its storage is not automatic. */
133 static inline bool
134 is_global_var (const_tree t)
136 return (TREE_STATIC (t) || DECL_EXTERNAL (t));
140 /* Return true if VAR may be aliased. A variable is considered as
141 maybe aliased if it has its address taken by the local TU
142 or possibly by another TU and might be modified through a pointer. */
144 static inline bool
145 may_be_aliased (const_tree var)
147 return (TREE_CODE (var) != CONST_DECL
148 && !((TREE_STATIC (var) || TREE_PUBLIC (var) || DECL_EXTERNAL (var))
149 && TREE_READONLY (var)
150 && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (var)))
151 && (TREE_PUBLIC (var)
152 || DECL_EXTERNAL (var)
153 || TREE_ADDRESSABLE (var)));
157 /* Returns the loop of the statement STMT. */
159 static inline struct loop *
160 loop_containing_stmt (gimple stmt)
162 basic_block bb = gimple_bb (stmt);
163 if (!bb)
164 return NULL;
166 return bb->loop_father;
171 /* Return true if VAR cannot be modified by the program. */
173 static inline bool
174 unmodifiable_var_p (const_tree var)
176 if (TREE_CODE (var) == SSA_NAME)
177 var = SSA_NAME_VAR (var);
179 return TREE_READONLY (var) && (TREE_STATIC (var) || DECL_EXTERNAL (var));
182 /* Return true if REF, a handled component reference, has an ARRAY_REF
183 somewhere in it. */
185 static inline bool
186 ref_contains_array_ref (const_tree ref)
188 gcc_checking_assert (handled_component_p (ref));
190 do {
191 if (TREE_CODE (ref) == ARRAY_REF)
192 return true;
193 ref = TREE_OPERAND (ref, 0);
194 } while (handled_component_p (ref));
196 return false;
199 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
201 static inline bool
202 contains_view_convert_expr_p (const_tree ref)
204 while (handled_component_p (ref))
206 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
207 return true;
208 ref = TREE_OPERAND (ref, 0);
211 return false;
214 /* Return true, if the two ranges [POS1, SIZE1] and [POS2, SIZE2]
215 overlap. SIZE1 and/or SIZE2 can be (unsigned)-1 in which case the
216 range is open-ended. Otherwise return false. */
218 static inline bool
219 ranges_overlap_p (unsigned HOST_WIDE_INT pos1,
220 unsigned HOST_WIDE_INT size1,
221 unsigned HOST_WIDE_INT pos2,
222 unsigned HOST_WIDE_INT size2)
224 if (pos1 >= pos2
225 && (size2 == (unsigned HOST_WIDE_INT)-1
226 || pos1 < (pos2 + size2)))
227 return true;
228 if (pos2 >= pos1
229 && (size1 == (unsigned HOST_WIDE_INT)-1
230 || pos2 < (pos1 + size1)))
231 return true;
233 return false;
236 /* Accessor to tree-ssa-operands.c caches. */
237 static inline struct ssa_operands *
238 gimple_ssa_operands (const struct function *fun)
240 return &fun->gimple_df->ssa_operands;
244 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
245 denotes the starting address of the memory access EXP.
246 Returns NULL_TREE if the offset is not constant or any component
247 is not BITS_PER_UNIT-aligned.
248 VALUEIZE if non-NULL is used to valueize SSA names. It should return
249 its argument or a constant if the argument is known to be constant. */
250 /* ??? This is a static inline here to avoid the overhead of the indirect calls
251 to VALUEIZE. But is this overhead really that significant? And should we
252 perhaps just rely on WHOPR to specialize the function? */
254 static inline tree
255 get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
256 tree (*valueize) (tree))
258 HOST_WIDE_INT byte_offset = 0;
260 /* Compute cumulative byte-offset for nested component-refs and array-refs,
261 and find the ultimate containing object. */
262 while (1)
264 switch (TREE_CODE (exp))
266 case BIT_FIELD_REF:
268 HOST_WIDE_INT this_off = TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
269 if (this_off % BITS_PER_UNIT)
270 return NULL_TREE;
271 byte_offset += this_off / BITS_PER_UNIT;
273 break;
275 case COMPONENT_REF:
277 tree field = TREE_OPERAND (exp, 1);
278 tree this_offset = component_ref_field_offset (exp);
279 HOST_WIDE_INT hthis_offset;
281 if (!this_offset
282 || TREE_CODE (this_offset) != INTEGER_CST
283 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
284 % BITS_PER_UNIT))
285 return NULL_TREE;
287 hthis_offset = TREE_INT_CST_LOW (this_offset);
288 hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
289 / BITS_PER_UNIT);
290 byte_offset += hthis_offset;
292 break;
294 case ARRAY_REF:
295 case ARRAY_RANGE_REF:
297 tree index = TREE_OPERAND (exp, 1);
298 tree low_bound, unit_size;
300 if (valueize
301 && TREE_CODE (index) == SSA_NAME)
302 index = (*valueize) (index);
304 /* If the resulting bit-offset is constant, track it. */
305 if (TREE_CODE (index) == INTEGER_CST
306 && (low_bound = array_ref_low_bound (exp),
307 TREE_CODE (low_bound) == INTEGER_CST)
308 && (unit_size = array_ref_element_size (exp),
309 TREE_CODE (unit_size) == INTEGER_CST))
311 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index);
313 hindex -= TREE_INT_CST_LOW (low_bound);
314 hindex *= TREE_INT_CST_LOW (unit_size);
315 byte_offset += hindex;
317 else
318 return NULL_TREE;
320 break;
322 case REALPART_EXPR:
323 break;
325 case IMAGPART_EXPR:
326 byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
327 break;
329 case VIEW_CONVERT_EXPR:
330 break;
332 case MEM_REF:
334 tree base = TREE_OPERAND (exp, 0);
335 if (valueize
336 && TREE_CODE (base) == SSA_NAME)
337 base = (*valueize) (base);
339 /* Hand back the decl for MEM[&decl, off]. */
340 if (TREE_CODE (base) == ADDR_EXPR)
342 if (!integer_zerop (TREE_OPERAND (exp, 1)))
344 double_int off = mem_ref_offset (exp);
345 gcc_assert (off.high == -1 || off.high == 0);
346 byte_offset += off.to_shwi ();
348 exp = TREE_OPERAND (base, 0);
350 goto done;
353 case TARGET_MEM_REF:
355 tree base = TREE_OPERAND (exp, 0);
356 if (valueize
357 && TREE_CODE (base) == SSA_NAME)
358 base = (*valueize) (base);
360 /* Hand back the decl for MEM[&decl, off]. */
361 if (TREE_CODE (base) == ADDR_EXPR)
363 if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
364 return NULL_TREE;
365 if (!integer_zerop (TMR_OFFSET (exp)))
367 double_int off = mem_ref_offset (exp);
368 gcc_assert (off.high == -1 || off.high == 0);
369 byte_offset += off.to_shwi ();
371 exp = TREE_OPERAND (base, 0);
373 goto done;
376 default:
377 goto done;
380 exp = TREE_OPERAND (exp, 0);
382 done:
384 *poffset = byte_offset;
385 return exp;
388 #endif /* _TREE_FLOW_INLINE_H */