freebsd64.h: Remove duplicated entries from last commit.
[official-gcc.git] / gcc / vec.h
blobd47795879f1834f0e6c2eec97ce888ec08b0b999
1 /* Vector API for GNU compiler.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Nathan Sidwell <nathan@codesourcery.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #ifndef GCC_VEC_H
23 #define GCC_VEC_H
25 #include "statistics.h" /* For MEM_STAT_DECL. */
27 /* The macros here implement a set of templated vector types and
28 associated interfaces. These templates are implemented with
29 macros, as we're not in C++ land. The interface functions are
30 typesafe and use static inline functions, sometimes backed by
31 out-of-line generic functions. The vectors are designed to
32 interoperate with the GTY machinery.
34 Because of the different behavior of structure objects, scalar
35 objects and of pointers, there are three flavors, one for each of
36 these variants. Both the structure object and pointer variants
37 pass pointers to objects around -- in the former case the pointers
38 are stored into the vector and in the latter case the pointers are
39 dereferenced and the objects copied into the vector. The scalar
40 object variant is suitable for int-like objects, and the vector
41 elements are returned by value.
43 There are both 'index' and 'iterate' accessors. The iterator
44 returns a boolean iteration condition and updates the iteration
45 variable passed by reference. Because the iterator will be
46 inlined, the address-of can be optimized away.
48 The vectors are implemented using the trailing array idiom, thus
49 they are not resizeable without changing the address of the vector
50 object itself. This means you cannot have variables or fields of
51 vector type -- always use a pointer to a vector. The one exception
52 is the final field of a structure, which could be a vector type.
53 You will have to use the embedded_size & embedded_init calls to
54 create such objects, and they will probably not be resizeable (so
55 don't use the 'safe' allocation variants). The trailing array
56 idiom is used (rather than a pointer to an array of data), because,
57 if we allow NULL to also represent an empty vector, empty vectors
58 occupy minimal space in the structure containing them.
60 Each operation that increases the number of active elements is
61 available in 'quick' and 'safe' variants. The former presumes that
62 there is sufficient allocated space for the operation to succeed
63 (it dies if there is not). The latter will reallocate the
64 vector, if needed. Reallocation causes an exponential increase in
65 vector size. If you know you will be adding N elements, it would
66 be more efficient to use the reserve operation before adding the
67 elements with the 'quick' operation. This will ensure there are at
68 least as many elements as you ask for, it will exponentially
69 increase if there are too few spare slots. If you want reserve a
70 specific number of slots, but do not want the exponential increase
71 (for instance, you know this is the last allocation), use the
72 reserve_exact operation. You can also create a vector of a
73 specific size from the get go.
75 You should prefer the push and pop operations, as they append and
76 remove from the end of the vector. If you need to remove several
77 items in one go, use the truncate operation. The insert and remove
78 operations allow you to change elements in the middle of the
79 vector. There are two remove operations, one which preserves the
80 element ordering 'ordered_remove', and one which does not
81 'unordered_remove'. The latter function copies the end element
82 into the removed slot, rather than invoke a memmove operation. The
83 'lower_bound' function will determine where to place an item in the
84 array using insert that will maintain sorted order.
86 When a vector type is defined, first a non-memory managed version
87 is created. You can then define either or both garbage collected
88 and heap allocated versions. The allocation mechanism is specified
89 when the type is defined, and is therefore part of the type. If
90 you need both gc'd and heap allocated versions, you still must have
91 *exactly* one definition of the common non-memory managed base vector.
93 If you need to directly manipulate a vector, then the 'address'
94 accessor will return the address of the start of the vector. Also
95 the 'space' predicate will tell you whether there is spare capacity
96 in the vector. You will not normally need to use these two functions.
98 Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro, to
99 get the non-memory allocation version, and then a
100 DEF_VEC_ALLOC_{O,P,I}(TYPEDEF,ALLOC) macro to get memory managed
101 vectors. Variables of vector type are declared using a
102 VEC(TYPEDEF,ALLOC) macro. The ALLOC argument specifies the
103 allocation strategy, and can be either 'gc' or 'heap' for garbage
104 collected and heap allocated respectively. It can be 'none' to get
105 a vector that must be explicitly allocated (for instance as a
106 trailing array of another structure). The characters O, P and I
107 indicate whether TYPEDEF is a pointer (P), object (O) or integral
108 (I) type. Be careful to pick the correct one, as you'll get an
109 awkward and inefficient API if you use the wrong one. There is a
110 check, which results in a compile-time warning, for the P and I
111 versions, but there is no check for the O versions, as that is not
112 possible in plain C. Due to the way GTY works, you must annotate
113 any structures you wish to insert or reference from a vector with a
114 GTY(()) tag. You need to do this even if you never declare the GC
115 allocated variants.
117 An example of their use would be,
119 DEF_VEC_P(tree); // non-managed tree vector.
120 DEF_VEC_ALLOC_P(tree,gc); // gc'd vector of tree pointers. This must
121 // appear at file scope.
123 struct my_struct {
124 VEC(tree,gc) *v; // A (pointer to) a vector of tree pointers.
127 struct my_struct *s;
129 if (VEC_length(tree,s->v)) { we have some contents }
130 VEC_safe_push(tree,gc,s->v,decl); // append some decl onto the end
131 for (ix = 0; VEC_iterate(tree,s->v,ix,elt); ix++)
132 { do something with elt }
136 /* Macros to invoke API calls. A single macro works for both pointer
137 and object vectors, but the argument and return types might well be
138 different. In each macro, T is the typedef of the vector elements,
139 and A is the allocation strategy. The allocation strategy is only
140 present when it is required. Some of these macros pass the vector,
141 V, by reference (by taking its address), this is noted in the
142 descriptions. */
144 /* Length of vector
145 unsigned VEC_T_length(const VEC(T) *v);
147 Return the number of active elements in V. V can be NULL, in which
148 case zero is returned. */
150 #define VEC_length(T,V) (VEC_OP(T,base,length)(VEC_BASE(V)))
153 /* Check if vector is empty
154 int VEC_T_empty(const VEC(T) *v);
156 Return nonzero if V is an empty vector (or V is NULL), zero otherwise. */
158 #define VEC_empty(T,V) (VEC_length (T,V) == 0)
161 /* Get the final element of the vector.
162 T VEC_T_last(VEC(T) *v); // Integer
163 T VEC_T_last(VEC(T) *v); // Pointer
164 T *VEC_T_last(VEC(T) *v); // Object
166 Return the final element. V must not be empty. */
168 #define VEC_last(T,V) (VEC_OP(T,base,last)(VEC_BASE(V) VEC_CHECK_INFO))
170 /* Index into vector
171 T VEC_T_index(VEC(T) *v, unsigned ix); // Integer
172 T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
173 T *VEC_T_index(VEC(T) *v, unsigned ix); // Object
175 Return the IX'th element. If IX must be in the domain of V. */
177 #define VEC_index(T,V,I) (VEC_OP(T,base,index)(VEC_BASE(V),I VEC_CHECK_INFO))
179 /* Iterate over vector
180 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer
181 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
182 int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object
184 Return iteration condition and update PTR to point to the IX'th
185 element. At the end of iteration, sets PTR to NULL. Use this to
186 iterate over the elements of a vector as follows,
188 for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
189 continue; */
191 #define VEC_iterate(T,V,I,P) (VEC_OP(T,base,iterate)(VEC_BASE(V),I,&(P)))
193 /* Convenience macro for forward iteration. */
195 #define FOR_EACH_VEC_ELT(T, V, I, P) \
196 for (I = 0; VEC_iterate (T, (V), (I), (P)); ++(I))
198 /* Likewise, but start from FROM rather than 0. */
200 #define FOR_EACH_VEC_ELT_FROM(T, V, I, P, FROM) \
201 for (I = (FROM); VEC_iterate (T, (V), (I), (P)); ++(I))
203 /* Convenience macro for reverse iteration. */
205 #define FOR_EACH_VEC_ELT_REVERSE(T,V,I,P) \
206 for (I = VEC_length (T, (V)) - 1; \
207 VEC_iterate (T, (V), (I), (P)); \
208 (I)--)
210 /* Allocate new vector.
211 VEC(T,A) *VEC_T_A_alloc(int reserve);
213 Allocate a new vector with space for RESERVE objects. If RESERVE
214 is zero, NO vector is created. */
216 #define VEC_alloc(T,A,N) (VEC_OP(T,A,alloc)(N MEM_STAT_INFO))
218 /* Free a vector.
219 void VEC_T_A_free(VEC(T,A) *&);
221 Free a vector and set it to NULL. */
223 #define VEC_free(T,A,V) (VEC_OP(T,A,free)(&V))
225 /* Use these to determine the required size and initialization of a
226 vector embedded within another structure (as the final member).
228 size_t VEC_T_embedded_size(int reserve);
229 void VEC_T_embedded_init(VEC(T) *v, int reserve);
231 These allow the caller to perform the memory allocation. */
233 #define VEC_embedded_size(T,N) (VEC_OP(T,base,embedded_size)(N))
234 #define VEC_embedded_init(T,O,N) (VEC_OP(T,base,embedded_init)(VEC_BASE(O),N))
236 /* Copy a vector.
237 VEC(T,A) *VEC_T_A_copy(VEC(T) *);
239 Copy the live elements of a vector into a new vector. The new and
240 old vectors need not be allocated by the same mechanism. */
242 #define VEC_copy(T,A,V) (VEC_OP(T,A,copy)(VEC_BASE(V) MEM_STAT_INFO))
244 /* Determine if a vector has additional capacity.
246 int VEC_T_space (VEC(T) *v,int reserve)
248 If V has space for RESERVE additional entries, return nonzero. You
249 usually only need to use this if you are doing your own vector
250 reallocation, for instance on an embedded vector. This returns
251 nonzero in exactly the same circumstances that VEC_T_reserve
252 will. */
254 #define VEC_space(T,V,R) \
255 (VEC_OP(T,base,space)(VEC_BASE(V),R VEC_CHECK_INFO))
257 /* Reserve space.
258 int VEC_T_A_reserve(VEC(T,A) *&v, int reserve);
260 Ensure that V has at least RESERVE slots available. This will
261 create additional headroom. Note this can cause V to be
262 reallocated. Returns nonzero iff reallocation actually
263 occurred. */
265 #define VEC_reserve(T,A,V,R) \
266 (VEC_OP(T,A,reserve)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
268 /* Reserve space exactly.
269 int VEC_T_A_reserve_exact(VEC(T,A) *&v, int reserve);
271 Ensure that V has at least RESERVE slots available. This will not
272 create additional headroom. Note this can cause V to be
273 reallocated. Returns nonzero iff reallocation actually
274 occurred. */
276 #define VEC_reserve_exact(T,A,V,R) \
277 (VEC_OP(T,A,reserve_exact)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
279 /* Copy elements with no reallocation
280 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Integer
281 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Pointer
282 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Object
284 Copy the elements in SRC to the end of DST as if by memcpy. DST and
285 SRC need not be allocated with the same mechanism, although they most
286 often will be. DST is assumed to have sufficient headroom
287 available. */
289 #define VEC_splice(T,DST,SRC) \
290 (VEC_OP(T,base,splice)(VEC_BASE(DST), VEC_BASE(SRC) VEC_CHECK_INFO))
292 /* Copy elements with reallocation
293 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Integer
294 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Pointer
295 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Object
297 Copy the elements in SRC to the end of DST as if by memcpy. DST and
298 SRC need not be allocated with the same mechanism, although they most
299 often will be. DST need not have sufficient headroom and will be
300 reallocated if needed. */
302 #define VEC_safe_splice(T,A,DST,SRC) \
303 (VEC_OP(T,A,safe_splice)(&(DST), VEC_BASE(SRC) VEC_CHECK_INFO MEM_STAT_INFO))
305 /* Push object with no reallocation
306 T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer
307 T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
308 T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object
310 Push a new element onto the end, returns a pointer to the slot
311 filled in. For object vectors, the new value can be NULL, in which
312 case NO initialization is performed. There must
313 be sufficient space in the vector. */
315 #define VEC_quick_push(T,V,O) \
316 (VEC_OP(T,base,quick_push)(VEC_BASE(V),O VEC_CHECK_INFO))
318 /* Push object with reallocation
319 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Integer
320 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Pointer
321 T *VEC_T_A_safe_push (VEC(T,A) *&v, T *obj); // Object
323 Push a new element onto the end, returns a pointer to the slot
324 filled in. For object vectors, the new value can be NULL, in which
325 case NO initialization is performed. Reallocates V, if needed. */
327 #define VEC_safe_push(T,A,V,O) \
328 (VEC_OP(T,A,safe_push)(&(V),O VEC_CHECK_INFO MEM_STAT_INFO))
330 /* Pop element off end
331 T VEC_T_pop (VEC(T) *v); // Integer
332 T VEC_T_pop (VEC(T) *v); // Pointer
333 void VEC_T_pop (VEC(T) *v); // Object
335 Pop the last element off the end. Returns the element popped, for
336 pointer vectors. */
338 #define VEC_pop(T,V) (VEC_OP(T,base,pop)(VEC_BASE(V) VEC_CHECK_INFO))
340 /* Truncate to specific length
341 void VEC_T_truncate (VEC(T) *v, unsigned len);
343 Set the length as specified. The new length must be less than or
344 equal to the current length. This is an O(1) operation. */
346 #define VEC_truncate(T,V,I) \
347 (VEC_OP(T,base,truncate)(VEC_BASE(V),I VEC_CHECK_INFO))
349 /* Grow to a specific length.
350 void VEC_T_A_safe_grow (VEC(T,A) *&v, int len);
352 Grow the vector to a specific length. The LEN must be as
353 long or longer than the current length. The new elements are
354 uninitialized. */
356 #define VEC_safe_grow(T,A,V,I) \
357 (VEC_OP(T,A,safe_grow)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
359 /* Grow to a specific length.
360 void VEC_T_A_safe_grow_cleared (VEC(T,A) *&v, int len);
362 Grow the vector to a specific length. The LEN must be as
363 long or longer than the current length. The new elements are
364 initialized to zero. */
366 #define VEC_safe_grow_cleared(T,A,V,I) \
367 (VEC_OP(T,A,safe_grow_cleared)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
369 /* Replace element
370 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer
371 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
372 T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val); // Object
374 Replace the IXth element of V with a new value, VAL. For pointer
375 vectors returns the original value. For object vectors returns a
376 pointer to the new value. For object vectors the new value can be
377 NULL, in which case no overwriting of the slot is actually
378 performed. */
380 #define VEC_replace(T,V,I,O) \
381 (VEC_OP(T,base,replace)(VEC_BASE(V),I,O VEC_CHECK_INFO))
383 /* Insert object with no reallocation
384 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer
385 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
386 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object
388 Insert an element, VAL, at the IXth position of V. Return a pointer
389 to the slot created. For vectors of object, the new value can be
390 NULL, in which case no initialization of the inserted slot takes
391 place. There must be sufficient space. */
393 #define VEC_quick_insert(T,V,I,O) \
394 (VEC_OP(T,base,quick_insert)(VEC_BASE(V),I,O VEC_CHECK_INFO))
396 /* Insert object with reallocation
397 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer
398 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
399 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
401 Insert an element, VAL, at the IXth position of V. Return a pointer
402 to the slot created. For vectors of object, the new value can be
403 NULL, in which case no initialization of the inserted slot takes
404 place. Reallocate V, if necessary. */
406 #define VEC_safe_insert(T,A,V,I,O) \
407 (VEC_OP(T,A,safe_insert)(&(V),I,O VEC_CHECK_INFO MEM_STAT_INFO))
409 /* Remove element retaining order
410 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer
411 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
412 void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
414 Remove an element from the IXth position of V. Ordering of
415 remaining elements is preserved. For pointer vectors returns the
416 removed object. This is an O(N) operation due to a memmove. */
418 #define VEC_ordered_remove(T,V,I) \
419 (VEC_OP(T,base,ordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
421 /* Remove element destroying order
422 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer
423 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
424 void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object
426 Remove an element from the IXth position of V. Ordering of
427 remaining elements is destroyed. For pointer vectors returns the
428 removed object. This is an O(1) operation. */
430 #define VEC_unordered_remove(T,V,I) \
431 (VEC_OP(T,base,unordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
433 /* Remove a block of elements
434 void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len);
436 Remove LEN elements starting at the IXth. Ordering is retained.
437 This is an O(N) operation due to memmove. */
439 #define VEC_block_remove(T,V,I,L) \
440 (VEC_OP(T,base,block_remove)(VEC_BASE(V),I,L VEC_CHECK_INFO))
442 /* Get the address of the array of elements
443 T *VEC_T_address (VEC(T) v)
445 If you need to directly manipulate the array (for instance, you
446 want to feed it to qsort), use this accessor. */
448 #define VEC_address(T,V) (VEC_OP(T,base,address)(VEC_BASE(V)))
450 /* Conveniently sort the contents of the vector with qsort.
451 void VEC_qsort (VEC(T) *v, int (*cmp_func)(const void *, const void *)) */
453 #define VEC_qsort(T,V,CMP) qsort(VEC_address (T,V), VEC_length(T,V), \
454 sizeof (T), CMP)
456 /* Find the first index in the vector not less than the object.
457 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
458 bool (*lessthan) (const T, const T)); // Integer
459 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
460 bool (*lessthan) (const T, const T)); // Pointer
461 unsigned VEC_T_lower_bound (VEC(T) *v, const T *val,
462 bool (*lessthan) (const T*, const T*)); // Object
464 Find the first position in which VAL could be inserted without
465 changing the ordering of V. LESSTHAN is a function that returns
466 true if the first argument is strictly less than the second. */
468 #define VEC_lower_bound(T,V,O,LT) \
469 (VEC_OP(T,base,lower_bound)(VEC_BASE(V),O,LT VEC_CHECK_INFO))
471 /* Reallocate an array of elements with prefix. */
472 extern void *vec_gc_p_reserve (void *, int MEM_STAT_DECL);
473 extern void *vec_gc_p_reserve_exact (void *, int MEM_STAT_DECL);
474 extern void *vec_gc_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
475 extern void *vec_gc_o_reserve_exact (void *, int, size_t, size_t
476 MEM_STAT_DECL);
477 extern void ggc_free (void *);
478 #define vec_gc_free(V) ggc_free (V)
479 extern void *vec_heap_p_reserve (void *, int MEM_STAT_DECL);
480 extern void *vec_heap_p_reserve_exact (void *, int MEM_STAT_DECL);
481 extern void *vec_heap_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
482 extern void *vec_heap_o_reserve_exact (void *, int, size_t, size_t
483 MEM_STAT_DECL);
484 extern void dump_vec_loc_statistics (void);
485 #ifdef GATHER_STATISTICS
486 void vec_heap_free (void *);
487 #else
488 /* Avoid problems with frontends that #define free(x). */
489 #define vec_heap_free(V) (free) (V)
490 #endif
492 #if ENABLE_CHECKING
493 #define VEC_CHECK_INFO ,__FILE__,__LINE__,__FUNCTION__
494 #define VEC_CHECK_DECL ,const char *file_,unsigned line_,const char *function_
495 #define VEC_CHECK_PASS ,file_,line_,function_
497 #define VEC_ASSERT(EXPR,OP,T,A) \
498 (void)((EXPR) ? 0 : (VEC_ASSERT_FAIL(OP,VEC(T,A)), 0))
500 extern void vec_assert_fail (const char *, const char * VEC_CHECK_DECL)
501 ATTRIBUTE_NORETURN;
502 #define VEC_ASSERT_FAIL(OP,VEC) vec_assert_fail (OP,#VEC VEC_CHECK_PASS)
503 #else
504 #define VEC_CHECK_INFO
505 #define VEC_CHECK_DECL
506 #define VEC_CHECK_PASS
507 #define VEC_ASSERT(EXPR,OP,T,A) (void)(EXPR)
508 #endif
510 /* Note: gengtype has hardwired knowledge of the expansions of the
511 VEC, DEF_VEC_*, and DEF_VEC_ALLOC_* macros. If you change the
512 expansions of these macros you may need to change gengtype too. */
514 typedef struct GTY(()) vec_prefix
516 unsigned num;
517 unsigned alloc;
518 } vec_prefix;
520 #define VEC(T,A) VEC_##T##_##A
521 #define VEC_OP(T,A,OP) VEC_##T##_##A##_##OP
523 /* Base of vector type, not user visible. */
524 #define VEC_T(T,B) \
525 typedef struct VEC(T,B) \
527 struct vec_prefix prefix; \
528 T vec[1]; \
529 } VEC(T,B)
531 #define VEC_T_GTY(T,B) \
532 typedef struct GTY(()) VEC(T,B) \
534 struct vec_prefix prefix; \
535 T GTY ((length ("%h.prefix.num"))) vec[1]; \
536 } VEC(T,B)
538 /* Derived vector type, user visible. */
539 #define VEC_TA_GTY(T,B,A,GTY) \
540 typedef struct GTY VEC(T,A) \
542 VEC(T,B) base; \
543 } VEC(T,A)
545 #define VEC_TA(T,B,A) \
546 typedef struct VEC(T,A) \
548 VEC(T,B) base; \
549 } VEC(T,A)
551 /* Convert to base type. */
552 #if GCC_VERSION >= 4000
553 #define VEC_BASE(P) \
554 ((offsetof (__typeof (*P), base) == 0 || (P)) ? &(P)->base : 0)
555 #else
556 #define VEC_BASE(P) ((P) ? &(P)->base : 0)
557 #endif
559 /* Vector of integer-like object. */
560 #define DEF_VEC_I(T) \
561 static inline void VEC_OP (T,must_be,integral_type) (void) \
563 (void)~(T)0; \
566 VEC_T(T,base); \
567 VEC_TA(T,base,none); \
568 DEF_VEC_FUNC_P(T) \
569 struct vec_swallow_trailing_semi
570 #define DEF_VEC_ALLOC_I(T,A) \
571 VEC_TA(T,base,A); \
572 DEF_VEC_ALLOC_FUNC_I(T,A) \
573 DEF_VEC_NONALLOC_FUNCS_I(T,A) \
574 struct vec_swallow_trailing_semi
576 /* Vector of pointer to object. */
577 #define DEF_VEC_P(T) \
578 static inline void VEC_OP (T,must_be,pointer_type) (void) \
580 (void)((T)1 == (void *)1); \
583 VEC_T_GTY(T,base); \
584 VEC_TA(T,base,none); \
585 DEF_VEC_FUNC_P(T) \
586 struct vec_swallow_trailing_semi
587 #define DEF_VEC_ALLOC_P(T,A) \
588 VEC_TA(T,base,A); \
589 DEF_VEC_ALLOC_FUNC_P(T,A) \
590 DEF_VEC_NONALLOC_FUNCS_P(T,A) \
591 struct vec_swallow_trailing_semi
593 #define DEF_VEC_FUNC_P(T) \
594 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \
596 return vec_ ? vec_->prefix.num : 0; \
599 static inline T VEC_OP (T,base,last) \
600 (const VEC(T,base) *vec_ VEC_CHECK_DECL) \
602 VEC_ASSERT (vec_ && vec_->prefix.num, "last", T, base); \
604 return vec_->vec[vec_->prefix.num - 1]; \
607 static inline T VEC_OP (T,base,index) \
608 (const VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
610 VEC_ASSERT (vec_ && ix_ < vec_->prefix.num, "index", T, base); \
612 return vec_->vec[ix_]; \
615 static inline int VEC_OP (T,base,iterate) \
616 (const VEC(T,base) *vec_, unsigned ix_, T *ptr) \
618 if (vec_ && ix_ < vec_->prefix.num) \
620 *ptr = vec_->vec[ix_]; \
621 return 1; \
623 else \
625 *ptr = (T) 0; \
626 return 0; \
630 static inline size_t VEC_OP (T,base,embedded_size) \
631 (int alloc_) \
633 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \
636 static inline void VEC_OP (T,base,embedded_init) \
637 (VEC(T,base) *vec_, int alloc_) \
639 vec_->prefix.num = 0; \
640 vec_->prefix.alloc = alloc_; \
643 static inline int VEC_OP (T,base,space) \
644 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \
646 VEC_ASSERT (alloc_ >= 0, "space", T, base); \
647 return vec_ ? vec_->prefix.alloc - vec_->prefix.num >= (unsigned)alloc_ : !alloc_; \
650 static inline void VEC_OP(T,base,splice) \
651 (VEC(T,base) *dst_, VEC(T,base) *src_ VEC_CHECK_DECL) \
653 if (src_) \
655 unsigned len_ = src_->prefix.num; \
656 VEC_ASSERT (dst_->prefix.num + len_ <= dst_->prefix.alloc, "splice", T, base); \
658 memcpy (&dst_->vec[dst_->prefix.num], &src_->vec[0], len_ * sizeof (T)); \
659 dst_->prefix.num += len_; \
663 static inline T *VEC_OP (T,base,quick_push) \
664 (VEC(T,base) *vec_, T obj_ VEC_CHECK_DECL) \
666 T *slot_; \
668 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "push", T, base); \
669 slot_ = &vec_->vec[vec_->prefix.num++]; \
670 *slot_ = obj_; \
672 return slot_; \
675 static inline T VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
677 T obj_; \
679 VEC_ASSERT (vec_->prefix.num, "pop", T, base); \
680 obj_ = vec_->vec[--vec_->prefix.num]; \
682 return obj_; \
685 static inline void VEC_OP (T,base,truncate) \
686 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \
688 VEC_ASSERT (vec_ ? vec_->prefix.num >= size_ : !size_, "truncate", T, base); \
689 if (vec_) \
690 vec_->prefix.num = size_; \
693 static inline T VEC_OP (T,base,replace) \
694 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \
696 T old_obj_; \
698 VEC_ASSERT (ix_ < vec_->prefix.num, "replace", T, base); \
699 old_obj_ = vec_->vec[ix_]; \
700 vec_->vec[ix_] = obj_; \
702 return old_obj_; \
705 static inline T *VEC_OP (T,base,quick_insert) \
706 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \
708 T *slot_; \
710 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "insert", T, base); \
711 VEC_ASSERT (ix_ <= vec_->prefix.num, "insert", T, base); \
712 slot_ = &vec_->vec[ix_]; \
713 memmove (slot_ + 1, slot_, (vec_->prefix.num++ - ix_) * sizeof (T)); \
714 *slot_ = obj_; \
716 return slot_; \
719 static inline T VEC_OP (T,base,ordered_remove) \
720 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
722 T *slot_; \
723 T obj_; \
725 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
726 slot_ = &vec_->vec[ix_]; \
727 obj_ = *slot_; \
728 memmove (slot_, slot_ + 1, (--vec_->prefix.num - ix_) * sizeof (T)); \
730 return obj_; \
733 static inline T VEC_OP (T,base,unordered_remove) \
734 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
736 T *slot_; \
737 T obj_; \
739 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
740 slot_ = &vec_->vec[ix_]; \
741 obj_ = *slot_; \
742 *slot_ = vec_->vec[--vec_->prefix.num]; \
744 return obj_; \
747 static inline void VEC_OP (T,base,block_remove) \
748 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \
750 T *slot_; \
752 VEC_ASSERT (ix_ + len_ <= vec_->prefix.num, "block_remove", T, base); \
753 slot_ = &vec_->vec[ix_]; \
754 vec_->prefix.num -= len_; \
755 memmove (slot_, slot_ + len_, (vec_->prefix.num - ix_) * sizeof (T)); \
758 static inline T *VEC_OP (T,base,address) \
759 (VEC(T,base) *vec_) \
761 return vec_ ? vec_->vec : 0; \
764 static inline unsigned VEC_OP (T,base,lower_bound) \
765 (VEC(T,base) *vec_, const T obj_, \
766 bool (*lessthan_)(const T, const T) VEC_CHECK_DECL) \
768 unsigned int len_ = VEC_OP (T,base, length) (vec_); \
769 unsigned int half_, middle_; \
770 unsigned int first_ = 0; \
771 while (len_ > 0) \
773 T middle_elem_; \
774 half_ = len_ >> 1; \
775 middle_ = first_; \
776 middle_ += half_; \
777 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
778 if (lessthan_ (middle_elem_, obj_)) \
780 first_ = middle_; \
781 ++first_; \
782 len_ = len_ - half_ - 1; \
784 else \
785 len_ = half_; \
787 return first_; \
790 #define DEF_VEC_ALLOC_FUNC_P(T,A) \
791 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
792 (int alloc_ MEM_STAT_DECL) \
794 return (VEC(T,A) *) vec_##A##_p_reserve_exact (NULL, alloc_ \
795 PASS_MEM_STAT); \
799 #define DEF_VEC_NONALLOC_FUNCS_P(T,A) \
800 static inline void VEC_OP (T,A,free) \
801 (VEC(T,A) **vec_) \
803 if (*vec_) \
804 vec_##A##_free (*vec_); \
805 *vec_ = NULL; \
808 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
810 size_t len_ = vec_ ? vec_->prefix.num : 0; \
811 VEC (T,A) *new_vec_ = NULL; \
813 if (len_) \
815 new_vec_ = (VEC (T,A) *)(vec_##A##_p_reserve_exact \
816 (NULL, len_ PASS_MEM_STAT)); \
818 new_vec_->base.prefix.num = len_; \
819 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
821 return new_vec_; \
824 static inline int VEC_OP (T,A,reserve) \
825 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
827 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
828 VEC_CHECK_PASS); \
830 if (extend) \
831 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve (*vec_, alloc_ PASS_MEM_STAT); \
833 return extend; \
836 static inline int VEC_OP (T,A,reserve_exact) \
837 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
839 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
840 VEC_CHECK_PASS); \
842 if (extend) \
843 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve_exact (*vec_, alloc_ \
844 PASS_MEM_STAT); \
846 return extend; \
849 static inline void VEC_OP (T,A,safe_grow) \
850 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
852 VEC_ASSERT (size_ >= 0 \
853 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
854 "grow", T, A); \
855 VEC_OP (T,A,reserve_exact) (vec_, \
856 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
857 VEC_CHECK_PASS PASS_MEM_STAT); \
858 VEC_BASE (*vec_)->prefix.num = size_; \
861 static inline void VEC_OP (T,A,safe_grow_cleared) \
862 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
864 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
865 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
866 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
867 sizeof (T) * (size_ - oldsize)); \
870 static inline void VEC_OP(T,A,safe_splice) \
871 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \
873 if (src_) \
875 VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num \
876 VEC_CHECK_PASS MEM_STAT_INFO); \
878 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \
879 VEC_CHECK_PASS); \
883 static inline T *VEC_OP (T,A,safe_push) \
884 (VEC(T,A) **vec_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
886 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
888 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
891 static inline T *VEC_OP (T,A,safe_insert) \
892 (VEC(T,A) **vec_, unsigned ix_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
894 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
896 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
897 VEC_CHECK_PASS); \
900 /* Vector of object. */
901 #define DEF_VEC_O(T) \
902 VEC_T_GTY(T,base); \
903 VEC_TA(T,base,none); \
904 DEF_VEC_FUNC_O(T) \
905 struct vec_swallow_trailing_semi
906 #define DEF_VEC_ALLOC_O(T,A) \
907 VEC_TA(T,base,A); \
908 DEF_VEC_ALLOC_FUNC_O(T,A) \
909 DEF_VEC_NONALLOC_FUNCS_O(T,A) \
910 struct vec_swallow_trailing_semi
912 #define DEF_VEC_FUNC_O(T) \
913 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \
915 return vec_ ? vec_->prefix.num : 0; \
918 static inline T *VEC_OP (T,base,last) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
920 VEC_ASSERT (vec_ && vec_->prefix.num, "last", T, base); \
922 return &vec_->vec[vec_->prefix.num - 1]; \
925 static inline T *VEC_OP (T,base,index) \
926 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
928 VEC_ASSERT (vec_ && ix_ < vec_->prefix.num, "index", T, base); \
930 return &vec_->vec[ix_]; \
933 static inline int VEC_OP (T,base,iterate) \
934 (VEC(T,base) *vec_, unsigned ix_, T **ptr) \
936 if (vec_ && ix_ < vec_->prefix.num) \
938 *ptr = &vec_->vec[ix_]; \
939 return 1; \
941 else \
943 *ptr = 0; \
944 return 0; \
948 static inline size_t VEC_OP (T,base,embedded_size) \
949 (int alloc_) \
951 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \
954 static inline void VEC_OP (T,base,embedded_init) \
955 (VEC(T,base) *vec_, int alloc_) \
957 vec_->prefix.num = 0; \
958 vec_->prefix.alloc = alloc_; \
961 static inline int VEC_OP (T,base,space) \
962 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \
964 VEC_ASSERT (alloc_ >= 0, "space", T, base); \
965 return vec_ ? vec_->prefix.alloc - vec_->prefix.num >= (unsigned)alloc_ : !alloc_; \
968 static inline void VEC_OP(T,base,splice) \
969 (VEC(T,base) *dst_, VEC(T,base) *src_ VEC_CHECK_DECL) \
971 if (src_) \
973 unsigned len_ = src_->prefix.num; \
974 VEC_ASSERT (dst_->prefix.num + len_ <= dst_->prefix.alloc, "splice", T, base); \
976 memcpy (&dst_->vec[dst_->prefix.num], &src_->vec[0], len_ * sizeof (T)); \
977 dst_->prefix.num += len_; \
981 static inline T *VEC_OP (T,base,quick_push) \
982 (VEC(T,base) *vec_, const T *obj_ VEC_CHECK_DECL) \
984 T *slot_; \
986 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "push", T, base); \
987 slot_ = &vec_->vec[vec_->prefix.num++]; \
988 if (obj_) \
989 *slot_ = *obj_; \
991 return slot_; \
994 static inline void VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
996 VEC_ASSERT (vec_->prefix.num, "pop", T, base); \
997 --vec_->prefix.num; \
1000 static inline void VEC_OP (T,base,truncate) \
1001 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \
1003 VEC_ASSERT (vec_ ? vec_->prefix.num >= size_ : !size_, "truncate", T, base); \
1004 if (vec_) \
1005 vec_->prefix.num = size_; \
1008 static inline T *VEC_OP (T,base,replace) \
1009 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \
1011 T *slot_; \
1013 VEC_ASSERT (ix_ < vec_->prefix.num, "replace", T, base); \
1014 slot_ = &vec_->vec[ix_]; \
1015 if (obj_) \
1016 *slot_ = *obj_; \
1018 return slot_; \
1021 static inline T *VEC_OP (T,base,quick_insert) \
1022 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \
1024 T *slot_; \
1026 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "insert", T, base); \
1027 VEC_ASSERT (ix_ <= vec_->prefix.num, "insert", T, base); \
1028 slot_ = &vec_->vec[ix_]; \
1029 memmove (slot_ + 1, slot_, (vec_->prefix.num++ - ix_) * sizeof (T)); \
1030 if (obj_) \
1031 *slot_ = *obj_; \
1033 return slot_; \
1036 static inline void VEC_OP (T,base,ordered_remove) \
1037 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
1039 T *slot_; \
1041 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
1042 slot_ = &vec_->vec[ix_]; \
1043 memmove (slot_, slot_ + 1, (--vec_->prefix.num - ix_) * sizeof (T)); \
1046 static inline void VEC_OP (T,base,unordered_remove) \
1047 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
1049 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
1050 vec_->vec[ix_] = vec_->vec[--vec_->prefix.num]; \
1053 static inline void VEC_OP (T,base,block_remove) \
1054 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \
1056 T *slot_; \
1058 VEC_ASSERT (ix_ + len_ <= vec_->prefix.num, "block_remove", T, base); \
1059 slot_ = &vec_->vec[ix_]; \
1060 vec_->prefix.num -= len_; \
1061 memmove (slot_, slot_ + len_, (vec_->prefix.num - ix_) * sizeof (T)); \
1064 static inline T *VEC_OP (T,base,address) \
1065 (VEC(T,base) *vec_) \
1067 return vec_ ? vec_->vec : 0; \
1070 static inline unsigned VEC_OP (T,base,lower_bound) \
1071 (VEC(T,base) *vec_, const T *obj_, \
1072 bool (*lessthan_)(const T *, const T *) VEC_CHECK_DECL) \
1074 unsigned int len_ = VEC_OP (T, base, length) (vec_); \
1075 unsigned int half_, middle_; \
1076 unsigned int first_ = 0; \
1077 while (len_ > 0) \
1079 T *middle_elem_; \
1080 half_ = len_ >> 1; \
1081 middle_ = first_; \
1082 middle_ += half_; \
1083 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
1084 if (lessthan_ (middle_elem_, obj_)) \
1086 first_ = middle_; \
1087 ++first_; \
1088 len_ = len_ - half_ - 1; \
1090 else \
1091 len_ = half_; \
1093 return first_; \
1096 #define DEF_VEC_ALLOC_FUNC_O(T,A) \
1097 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
1098 (int alloc_ MEM_STAT_DECL) \
1100 return (VEC(T,A) *) vec_##A##_o_reserve_exact (NULL, alloc_, \
1101 offsetof (VEC(T,A),base.vec), \
1102 sizeof (T) \
1103 PASS_MEM_STAT); \
1106 #define DEF_VEC_NONALLOC_FUNCS_O(T,A) \
1107 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1109 size_t len_ = vec_ ? vec_->prefix.num : 0; \
1110 VEC (T,A) *new_vec_ = NULL; \
1112 if (len_) \
1114 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \
1115 (NULL, len_, \
1116 offsetof (VEC(T,A),base.vec), sizeof (T) \
1117 PASS_MEM_STAT)); \
1119 new_vec_->base.prefix.num = len_; \
1120 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
1122 return new_vec_; \
1125 static inline void VEC_OP (T,A,free) \
1126 (VEC(T,A) **vec_) \
1128 if (*vec_) \
1129 vec_##A##_free (*vec_); \
1130 *vec_ = NULL; \
1133 static inline int VEC_OP (T,A,reserve) \
1134 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1136 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1137 VEC_CHECK_PASS); \
1139 if (extend) \
1140 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \
1141 offsetof (VEC(T,A),base.vec),\
1142 sizeof (T) \
1143 PASS_MEM_STAT); \
1145 return extend; \
1148 static inline int VEC_OP (T,A,reserve_exact) \
1149 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1151 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1152 VEC_CHECK_PASS); \
1154 if (extend) \
1155 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \
1156 (*vec_, alloc_, \
1157 offsetof (VEC(T,A),base.vec), \
1158 sizeof (T) PASS_MEM_STAT); \
1160 return extend; \
1163 static inline void VEC_OP (T,A,safe_grow) \
1164 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1166 VEC_ASSERT (size_ >= 0 \
1167 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1168 "grow", T, A); \
1169 VEC_OP (T,A,reserve_exact) (vec_, \
1170 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
1171 VEC_CHECK_PASS PASS_MEM_STAT); \
1172 VEC_BASE (*vec_)->prefix.num = size_; \
1175 static inline void VEC_OP (T,A,safe_grow_cleared) \
1176 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1178 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
1179 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
1180 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
1181 sizeof (T) * (size_ - oldsize)); \
1184 static inline void VEC_OP(T,A,safe_splice) \
1185 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \
1187 if (src_) \
1189 VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num \
1190 VEC_CHECK_PASS MEM_STAT_INFO); \
1192 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \
1193 VEC_CHECK_PASS); \
1197 static inline T *VEC_OP (T,A,safe_push) \
1198 (VEC(T,A) **vec_, const T *obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
1200 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1202 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
1205 static inline T *VEC_OP (T,A,safe_insert) \
1206 (VEC(T,A) **vec_, unsigned ix_, const T *obj_ \
1207 VEC_CHECK_DECL MEM_STAT_DECL) \
1209 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1211 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
1212 VEC_CHECK_PASS); \
1215 #define DEF_VEC_ALLOC_FUNC_I(T,A) \
1216 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
1217 (int alloc_ MEM_STAT_DECL) \
1219 return (VEC(T,A) *) vec_##A##_o_reserve_exact \
1220 (NULL, alloc_, offsetof (VEC(T,A),base.vec), \
1221 sizeof (T) PASS_MEM_STAT); \
1224 #define DEF_VEC_NONALLOC_FUNCS_I(T,A) \
1225 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1227 size_t len_ = vec_ ? vec_->prefix.num : 0; \
1228 VEC (T,A) *new_vec_ = NULL; \
1230 if (len_) \
1232 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \
1233 (NULL, len_, \
1234 offsetof (VEC(T,A),base.vec), sizeof (T) \
1235 PASS_MEM_STAT)); \
1237 new_vec_->base.prefix.num = len_; \
1238 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
1240 return new_vec_; \
1243 static inline void VEC_OP (T,A,free) \
1244 (VEC(T,A) **vec_) \
1246 if (*vec_) \
1247 vec_##A##_free (*vec_); \
1248 *vec_ = NULL; \
1251 static inline int VEC_OP (T,A,reserve) \
1252 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1254 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1255 VEC_CHECK_PASS); \
1257 if (extend) \
1258 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \
1259 offsetof (VEC(T,A),base.vec),\
1260 sizeof (T) \
1261 PASS_MEM_STAT); \
1263 return extend; \
1266 static inline int VEC_OP (T,A,reserve_exact) \
1267 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1269 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1270 VEC_CHECK_PASS); \
1272 if (extend) \
1273 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \
1274 (*vec_, alloc_, offsetof (VEC(T,A),base.vec), \
1275 sizeof (T) PASS_MEM_STAT); \
1277 return extend; \
1280 static inline void VEC_OP (T,A,safe_grow) \
1281 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1283 VEC_ASSERT (size_ >= 0 \
1284 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1285 "grow", T, A); \
1286 VEC_OP (T,A,reserve_exact) (vec_, \
1287 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
1288 VEC_CHECK_PASS PASS_MEM_STAT); \
1289 VEC_BASE (*vec_)->prefix.num = size_; \
1292 static inline void VEC_OP (T,A,safe_grow_cleared) \
1293 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1295 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
1296 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
1297 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
1298 sizeof (T) * (size_ - oldsize)); \
1301 static inline void VEC_OP(T,A,safe_splice) \
1302 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \
1304 if (src_) \
1306 VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num \
1307 VEC_CHECK_PASS MEM_STAT_INFO); \
1309 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \
1310 VEC_CHECK_PASS); \
1314 static inline T *VEC_OP (T,A,safe_push) \
1315 (VEC(T,A) **vec_, const T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
1317 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1319 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
1322 static inline T *VEC_OP (T,A,safe_insert) \
1323 (VEC(T,A) **vec_, unsigned ix_, const T obj_ \
1324 VEC_CHECK_DECL MEM_STAT_DECL) \
1326 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1328 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
1329 VEC_CHECK_PASS); \
1332 /* We support a vector which starts out with space on the stack and
1333 switches to heap space when forced to reallocate. This works a
1334 little differently. Instead of DEF_VEC_ALLOC_P(TYPE, heap|gc), use
1335 DEF_VEC_ALLOC_P_STACK(TYPE). This uses alloca to get the initial
1336 space; because alloca can not be usefully called in an inline
1337 function, and because a macro can not define a macro, you must then
1338 write a #define for each type:
1340 #define VEC_{TYPE}_stack_alloc(alloc) \
1341 VEC_stack_alloc({TYPE}, alloc)
1343 This is really a hack and perhaps can be made better. Note that
1344 this macro will wind up evaluating the ALLOC parameter twice.
1346 Only the initial allocation will be made using alloca, so pass a
1347 reasonable estimate that doesn't use too much stack space; don't
1348 pass zero. Don't return a VEC(TYPE,stack) vector from the function
1349 which allocated it. */
1351 extern void *vec_stack_p_reserve (void *, int MEM_STAT_DECL);
1352 extern void *vec_stack_p_reserve_exact (void *, int MEM_STAT_DECL);
1353 extern void *vec_stack_p_reserve_exact_1 (int, void *);
1354 extern void *vec_stack_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
1355 extern void *vec_stack_o_reserve_exact (void *, int, size_t, size_t
1356 MEM_STAT_DECL);
1357 extern void vec_stack_free (void *);
1359 #ifdef GATHER_STATISTICS
1360 #define VEC_stack_alloc(T,alloc,name,line,function) \
1361 (VEC_OP (T,stack,alloc1) \
1362 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1363 #else
1364 #define VEC_stack_alloc(T,alloc) \
1365 (VEC_OP (T,stack,alloc1) \
1366 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1367 #endif
1369 #define DEF_VEC_ALLOC_P_STACK(T) \
1370 VEC_TA(T,base,stack); \
1371 DEF_VEC_ALLOC_FUNC_P_STACK(T) \
1372 DEF_VEC_NONALLOC_FUNCS_P(T,stack) \
1373 struct vec_swallow_trailing_semi
1375 #define DEF_VEC_ALLOC_FUNC_P_STACK(T) \
1376 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1377 (int alloc_, VEC(T,stack)* space) \
1379 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1382 #define DEF_VEC_ALLOC_O_STACK(T) \
1383 VEC_TA(T,base,stack); \
1384 DEF_VEC_ALLOC_FUNC_O_STACK(T) \
1385 DEF_VEC_NONALLOC_FUNCS_O(T,stack) \
1386 struct vec_swallow_trailing_semi
1388 #define DEF_VEC_ALLOC_FUNC_O_STACK(T) \
1389 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1390 (int alloc_, VEC(T,stack)* space) \
1392 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1395 #define DEF_VEC_ALLOC_I_STACK(T) \
1396 VEC_TA(T,base,stack); \
1397 DEF_VEC_ALLOC_FUNC_I_STACK(T) \
1398 DEF_VEC_NONALLOC_FUNCS_I(T,stack) \
1399 struct vec_swallow_trailing_semi
1401 #define DEF_VEC_ALLOC_FUNC_I_STACK(T) \
1402 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1403 (int alloc_, VEC(T,stack)* space) \
1405 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1408 #endif /* GCC_VEC_H */