1 /* Vector API for GNU compiler.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Nathan Sidwell <nathan@codesourcery.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "statistics.h" /* For MEM_STAT_DECL. */
27 /* The macros here implement a set of templated vector types and
28 associated interfaces. These templates are implemented with
29 macros, as we're not in C++ land. The interface functions are
30 typesafe and use static inline functions, sometimes backed by
31 out-of-line generic functions. The vectors are designed to
32 interoperate with the GTY machinery.
34 Because of the different behavior of structure objects, scalar
35 objects and of pointers, there are three flavors, one for each of
36 these variants. Both the structure object and pointer variants
37 pass pointers to objects around -- in the former case the pointers
38 are stored into the vector and in the latter case the pointers are
39 dereferenced and the objects copied into the vector. The scalar
40 object variant is suitable for int-like objects, and the vector
41 elements are returned by value.
43 There are both 'index' and 'iterate' accessors. The iterator
44 returns a boolean iteration condition and updates the iteration
45 variable passed by reference. Because the iterator will be
46 inlined, the address-of can be optimized away.
48 The vectors are implemented using the trailing array idiom, thus
49 they are not resizeable without changing the address of the vector
50 object itself. This means you cannot have variables or fields of
51 vector type -- always use a pointer to a vector. The one exception
52 is the final field of a structure, which could be a vector type.
53 You will have to use the embedded_size & embedded_init calls to
54 create such objects, and they will probably not be resizeable (so
55 don't use the 'safe' allocation variants). The trailing array
56 idiom is used (rather than a pointer to an array of data), because,
57 if we allow NULL to also represent an empty vector, empty vectors
58 occupy minimal space in the structure containing them.
60 Each operation that increases the number of active elements is
61 available in 'quick' and 'safe' variants. The former presumes that
62 there is sufficient allocated space for the operation to succeed
63 (it dies if there is not). The latter will reallocate the
64 vector, if needed. Reallocation causes an exponential increase in
65 vector size. If you know you will be adding N elements, it would
66 be more efficient to use the reserve operation before adding the
67 elements with the 'quick' operation. This will ensure there are at
68 least as many elements as you ask for, it will exponentially
69 increase if there are too few spare slots. If you want reserve a
70 specific number of slots, but do not want the exponential increase
71 (for instance, you know this is the last allocation), use the
72 reserve_exact operation. You can also create a vector of a
73 specific size from the get go.
75 You should prefer the push and pop operations, as they append and
76 remove from the end of the vector. If you need to remove several
77 items in one go, use the truncate operation. The insert and remove
78 operations allow you to change elements in the middle of the
79 vector. There are two remove operations, one which preserves the
80 element ordering 'ordered_remove', and one which does not
81 'unordered_remove'. The latter function copies the end element
82 into the removed slot, rather than invoke a memmove operation. The
83 'lower_bound' function will determine where to place an item in the
84 array using insert that will maintain sorted order.
86 When a vector type is defined, first a non-memory managed version
87 is created. You can then define either or both garbage collected
88 and heap allocated versions. The allocation mechanism is specified
89 when the type is defined, and is therefore part of the type. If
90 you need both gc'd and heap allocated versions, you still must have
91 *exactly* one definition of the common non-memory managed base vector.
93 If you need to directly manipulate a vector, then the 'address'
94 accessor will return the address of the start of the vector. Also
95 the 'space' predicate will tell you whether there is spare capacity
96 in the vector. You will not normally need to use these two functions.
98 Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro, to
99 get the non-memory allocation version, and then a
100 DEF_VEC_ALLOC_{O,P,I}(TYPEDEF,ALLOC) macro to get memory managed
101 vectors. Variables of vector type are declared using a
102 VEC(TYPEDEF,ALLOC) macro. The ALLOC argument specifies the
103 allocation strategy, and can be either 'gc' or 'heap' for garbage
104 collected and heap allocated respectively. It can be 'none' to get
105 a vector that must be explicitly allocated (for instance as a
106 trailing array of another structure). The characters O, P and I
107 indicate whether TYPEDEF is a pointer (P), object (O) or integral
108 (I) type. Be careful to pick the correct one, as you'll get an
109 awkward and inefficient API if you use the wrong one. There is a
110 check, which results in a compile-time warning, for the P and I
111 versions, but there is no check for the O versions, as that is not
112 possible in plain C. Due to the way GTY works, you must annotate
113 any structures you wish to insert or reference from a vector with a
114 GTY(()) tag. You need to do this even if you never declare the GC
117 An example of their use would be,
119 DEF_VEC_P(tree); // non-managed tree vector.
120 DEF_VEC_ALLOC_P(tree,gc); // gc'd vector of tree pointers. This must
121 // appear at file scope.
124 VEC(tree,gc) *v; // A (pointer to) a vector of tree pointers.
129 if (VEC_length(tree,s->v)) { we have some contents }
130 VEC_safe_push(tree,gc,s->v,decl); // append some decl onto the end
131 for (ix = 0; VEC_iterate(tree,s->v,ix,elt); ix++)
132 { do something with elt }
136 /* Macros to invoke API calls. A single macro works for both pointer
137 and object vectors, but the argument and return types might well be
138 different. In each macro, T is the typedef of the vector elements,
139 and A is the allocation strategy. The allocation strategy is only
140 present when it is required. Some of these macros pass the vector,
141 V, by reference (by taking its address), this is noted in the
145 unsigned VEC_T_length(const VEC(T) *v);
147 Return the number of active elements in V. V can be NULL, in which
148 case zero is returned. */
150 #define VEC_length(T,V) (VEC_OP(T,base,length)(VEC_BASE(V)))
153 /* Check if vector is empty
154 int VEC_T_empty(const VEC(T) *v);
156 Return nonzero if V is an empty vector (or V is NULL), zero otherwise. */
158 #define VEC_empty(T,V) (VEC_length (T,V) == 0)
161 /* Get the final element of the vector.
162 T VEC_T_last(VEC(T) *v); // Integer
163 T VEC_T_last(VEC(T) *v); // Pointer
164 T *VEC_T_last(VEC(T) *v); // Object
166 Return the final element. V must not be empty. */
168 #define VEC_last(T,V) (VEC_OP(T,base,last)(VEC_BASE(V) VEC_CHECK_INFO))
171 T VEC_T_index(VEC(T) *v, unsigned ix); // Integer
172 T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
173 T *VEC_T_index(VEC(T) *v, unsigned ix); // Object
175 Return the IX'th element. If IX must be in the domain of V. */
177 #define VEC_index(T,V,I) (VEC_OP(T,base,index)(VEC_BASE(V),I VEC_CHECK_INFO))
179 /* Iterate over vector
180 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer
181 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
182 int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object
184 Return iteration condition and update PTR to point to the IX'th
185 element. At the end of iteration, sets PTR to NULL. Use this to
186 iterate over the elements of a vector as follows,
188 for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
191 #define VEC_iterate(T,V,I,P) (VEC_OP(T,base,iterate)(VEC_BASE(V),I,&(P)))
193 /* Convenience macro for forward iteration. */
195 #define FOR_EACH_VEC_ELT(T, V, I, P) \
196 for (I = 0; VEC_iterate (T, (V), (I), (P)); ++(I))
198 /* Convenience macro for reverse iteration. */
200 #define FOR_EACH_VEC_ELT_REVERSE(T,V,I,P) \
201 for (I = VEC_length (T, (V)) - 1; \
202 VEC_iterate (T, (V), (I), (P)); \
205 /* Allocate new vector.
206 VEC(T,A) *VEC_T_A_alloc(int reserve);
208 Allocate a new vector with space for RESERVE objects. If RESERVE
209 is zero, NO vector is created. */
211 #define VEC_alloc(T,A,N) (VEC_OP(T,A,alloc)(N MEM_STAT_INFO))
214 void VEC_T_A_free(VEC(T,A) *&);
216 Free a vector and set it to NULL. */
218 #define VEC_free(T,A,V) (VEC_OP(T,A,free)(&V))
220 /* Use these to determine the required size and initialization of a
221 vector embedded within another structure (as the final member).
223 size_t VEC_T_embedded_size(int reserve);
224 void VEC_T_embedded_init(VEC(T) *v, int reserve);
226 These allow the caller to perform the memory allocation. */
228 #define VEC_embedded_size(T,N) (VEC_OP(T,base,embedded_size)(N))
229 #define VEC_embedded_init(T,O,N) (VEC_OP(T,base,embedded_init)(VEC_BASE(O),N))
232 VEC(T,A) *VEC_T_A_copy(VEC(T) *);
234 Copy the live elements of a vector into a new vector. The new and
235 old vectors need not be allocated by the same mechanism. */
237 #define VEC_copy(T,A,V) (VEC_OP(T,A,copy)(VEC_BASE(V) MEM_STAT_INFO))
239 /* Determine if a vector has additional capacity.
241 int VEC_T_space (VEC(T) *v,int reserve)
243 If V has space for RESERVE additional entries, return nonzero. You
244 usually only need to use this if you are doing your own vector
245 reallocation, for instance on an embedded vector. This returns
246 nonzero in exactly the same circumstances that VEC_T_reserve
249 #define VEC_space(T,V,R) \
250 (VEC_OP(T,base,space)(VEC_BASE(V),R VEC_CHECK_INFO))
253 int VEC_T_A_reserve(VEC(T,A) *&v, int reserve);
255 Ensure that V has at least RESERVE slots available. This will
256 create additional headroom. Note this can cause V to be
257 reallocated. Returns nonzero iff reallocation actually
260 #define VEC_reserve(T,A,V,R) \
261 (VEC_OP(T,A,reserve)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
263 /* Reserve space exactly.
264 int VEC_T_A_reserve_exact(VEC(T,A) *&v, int reserve);
266 Ensure that V has at least RESERVE slots available. This will not
267 create additional headroom. Note this can cause V to be
268 reallocated. Returns nonzero iff reallocation actually
271 #define VEC_reserve_exact(T,A,V,R) \
272 (VEC_OP(T,A,reserve_exact)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO))
274 /* Copy elements with no reallocation
275 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Integer
276 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Pointer
277 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Object
279 Copy the elements in SRC to the end of DST as if by memcpy. DST and
280 SRC need not be allocated with the same mechanism, although they most
281 often will be. DST is assumed to have sufficient headroom
284 #define VEC_splice(T,DST,SRC) \
285 (VEC_OP(T,base,splice)(VEC_BASE(DST), VEC_BASE(SRC) VEC_CHECK_INFO))
287 /* Copy elements with reallocation
288 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Integer
289 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Pointer
290 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Object
292 Copy the elements in SRC to the end of DST as if by memcpy. DST and
293 SRC need not be allocated with the same mechanism, although they most
294 often will be. DST need not have sufficient headroom and will be
295 reallocated if needed. */
297 #define VEC_safe_splice(T,A,DST,SRC) \
298 (VEC_OP(T,A,safe_splice)(&(DST), VEC_BASE(SRC) VEC_CHECK_INFO MEM_STAT_INFO))
300 /* Push object with no reallocation
301 T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer
302 T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
303 T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object
305 Push a new element onto the end, returns a pointer to the slot
306 filled in. For object vectors, the new value can be NULL, in which
307 case NO initialization is performed. There must
308 be sufficient space in the vector. */
310 #define VEC_quick_push(T,V,O) \
311 (VEC_OP(T,base,quick_push)(VEC_BASE(V),O VEC_CHECK_INFO))
313 /* Push object with reallocation
314 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Integer
315 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Pointer
316 T *VEC_T_A_safe_push (VEC(T,A) *&v, T *obj); // Object
318 Push a new element onto the end, returns a pointer to the slot
319 filled in. For object vectors, the new value can be NULL, in which
320 case NO initialization is performed. Reallocates V, if needed. */
322 #define VEC_safe_push(T,A,V,O) \
323 (VEC_OP(T,A,safe_push)(&(V),O VEC_CHECK_INFO MEM_STAT_INFO))
325 /* Pop element off end
326 T VEC_T_pop (VEC(T) *v); // Integer
327 T VEC_T_pop (VEC(T) *v); // Pointer
328 void VEC_T_pop (VEC(T) *v); // Object
330 Pop the last element off the end. Returns the element popped, for
333 #define VEC_pop(T,V) (VEC_OP(T,base,pop)(VEC_BASE(V) VEC_CHECK_INFO))
335 /* Truncate to specific length
336 void VEC_T_truncate (VEC(T) *v, unsigned len);
338 Set the length as specified. The new length must be less than or
339 equal to the current length. This is an O(1) operation. */
341 #define VEC_truncate(T,V,I) \
342 (VEC_OP(T,base,truncate)(VEC_BASE(V),I VEC_CHECK_INFO))
344 /* Grow to a specific length.
345 void VEC_T_A_safe_grow (VEC(T,A) *&v, int len);
347 Grow the vector to a specific length. The LEN must be as
348 long or longer than the current length. The new elements are
351 #define VEC_safe_grow(T,A,V,I) \
352 (VEC_OP(T,A,safe_grow)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
354 /* Grow to a specific length.
355 void VEC_T_A_safe_grow_cleared (VEC(T,A) *&v, int len);
357 Grow the vector to a specific length. The LEN must be as
358 long or longer than the current length. The new elements are
359 initialized to zero. */
361 #define VEC_safe_grow_cleared(T,A,V,I) \
362 (VEC_OP(T,A,safe_grow_cleared)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO))
365 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer
366 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
367 T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val); // Object
369 Replace the IXth element of V with a new value, VAL. For pointer
370 vectors returns the original value. For object vectors returns a
371 pointer to the new value. For object vectors the new value can be
372 NULL, in which case no overwriting of the slot is actually
375 #define VEC_replace(T,V,I,O) \
376 (VEC_OP(T,base,replace)(VEC_BASE(V),I,O VEC_CHECK_INFO))
378 /* Insert object with no reallocation
379 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer
380 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
381 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object
383 Insert an element, VAL, at the IXth position of V. Return a pointer
384 to the slot created. For vectors of object, the new value can be
385 NULL, in which case no initialization of the inserted slot takes
386 place. There must be sufficient space. */
388 #define VEC_quick_insert(T,V,I,O) \
389 (VEC_OP(T,base,quick_insert)(VEC_BASE(V),I,O VEC_CHECK_INFO))
391 /* Insert object with reallocation
392 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer
393 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
394 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
396 Insert an element, VAL, at the IXth position of V. Return a pointer
397 to the slot created. For vectors of object, the new value can be
398 NULL, in which case no initialization of the inserted slot takes
399 place. Reallocate V, if necessary. */
401 #define VEC_safe_insert(T,A,V,I,O) \
402 (VEC_OP(T,A,safe_insert)(&(V),I,O VEC_CHECK_INFO MEM_STAT_INFO))
404 /* Remove element retaining order
405 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer
406 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
407 void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
409 Remove an element from the IXth position of V. Ordering of
410 remaining elements is preserved. For pointer vectors returns the
411 removed object. This is an O(N) operation due to a memmove. */
413 #define VEC_ordered_remove(T,V,I) \
414 (VEC_OP(T,base,ordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
416 /* Remove element destroying order
417 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer
418 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
419 void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object
421 Remove an element from the IXth position of V. Ordering of
422 remaining elements is destroyed. For pointer vectors returns the
423 removed object. This is an O(1) operation. */
425 #define VEC_unordered_remove(T,V,I) \
426 (VEC_OP(T,base,unordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO))
428 /* Remove a block of elements
429 void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len);
431 Remove LEN elements starting at the IXth. Ordering is retained.
432 This is an O(N) operation due to memmove. */
434 #define VEC_block_remove(T,V,I,L) \
435 (VEC_OP(T,base,block_remove)(VEC_BASE(V),I,L VEC_CHECK_INFO))
437 /* Get the address of the array of elements
438 T *VEC_T_address (VEC(T) v)
440 If you need to directly manipulate the array (for instance, you
441 want to feed it to qsort), use this accessor. */
443 #define VEC_address(T,V) (VEC_OP(T,base,address)(VEC_BASE(V)))
445 /* Conveniently sort the contents of the vector with qsort.
446 void VEC_qsort (VEC(T) *v, int (*cmp_func)(const void *, const void *)) */
448 #define VEC_qsort(T,V,CMP) qsort(VEC_address (T,V), VEC_length(T,V), \
451 /* Find the first index in the vector not less than the object.
452 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
453 bool (*lessthan) (const T, const T)); // Integer
454 unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
455 bool (*lessthan) (const T, const T)); // Pointer
456 unsigned VEC_T_lower_bound (VEC(T) *v, const T *val,
457 bool (*lessthan) (const T*, const T*)); // Object
459 Find the first position in which VAL could be inserted without
460 changing the ordering of V. LESSTHAN is a function that returns
461 true if the first argument is strictly less than the second. */
463 #define VEC_lower_bound(T,V,O,LT) \
464 (VEC_OP(T,base,lower_bound)(VEC_BASE(V),O,LT VEC_CHECK_INFO))
466 /* Reallocate an array of elements with prefix. */
467 extern void *vec_gc_p_reserve (void *, int MEM_STAT_DECL
);
468 extern void *vec_gc_p_reserve_exact (void *, int MEM_STAT_DECL
);
469 extern void *vec_gc_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL
);
470 extern void *vec_gc_o_reserve_exact (void *, int, size_t, size_t
472 extern void ggc_free (void *);
473 #define vec_gc_free(V) ggc_free (V)
474 extern void *vec_heap_p_reserve (void *, int MEM_STAT_DECL
);
475 extern void *vec_heap_p_reserve_exact (void *, int MEM_STAT_DECL
);
476 extern void *vec_heap_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL
);
477 extern void *vec_heap_o_reserve_exact (void *, int, size_t, size_t
479 extern void dump_vec_loc_statistics (void);
480 #ifdef GATHER_STATISTICS
481 void vec_heap_free (void *);
483 /* Avoid problems with frontends that #define free(x). */
484 #define vec_heap_free(V) (free) (V)
488 #define VEC_CHECK_INFO ,__FILE__,__LINE__,__FUNCTION__
489 #define VEC_CHECK_DECL ,const char *file_,unsigned line_,const char *function_
490 #define VEC_CHECK_PASS ,file_,line_,function_
492 #define VEC_ASSERT(EXPR,OP,T,A) \
493 (void)((EXPR) ? 0 : (VEC_ASSERT_FAIL(OP,VEC(T,A)), 0))
495 extern void vec_assert_fail (const char *, const char * VEC_CHECK_DECL
)
497 #define VEC_ASSERT_FAIL(OP,VEC) vec_assert_fail (OP,#VEC VEC_CHECK_PASS)
499 #define VEC_CHECK_INFO
500 #define VEC_CHECK_DECL
501 #define VEC_CHECK_PASS
502 #define VEC_ASSERT(EXPR,OP,T,A) (void)(EXPR)
505 /* Note: gengtype has hardwired knowledge of the expansions of the
506 VEC, DEF_VEC_*, and DEF_VEC_ALLOC_* macros. If you change the
507 expansions of these macros you may need to change gengtype too. */
509 typedef struct GTY(()) vec_prefix
515 #define VEC(T,A) VEC_##T##_##A
516 #define VEC_OP(T,A,OP) VEC_##T##_##A##_##OP
518 /* Base of vector type, not user visible. */
520 typedef struct VEC(T,B) \
522 struct vec_prefix prefix; \
526 #define VEC_T_GTY(T,B) \
527 typedef struct GTY(()) VEC(T,B) \
529 struct vec_prefix prefix; \
530 T GTY ((length ("%h.prefix.num"))) vec[1]; \
533 /* Derived vector type, user visible. */
534 #define VEC_TA_GTY(T,B,A,GTY) \
535 typedef struct GTY VEC(T,A) \
540 #define VEC_TA(T,B,A) \
541 typedef struct VEC(T,A) \
546 /* Convert to base type. */
547 #define VEC_BASE(P) ((P) ? &(P)->base : 0)
549 /* Vector of integer-like object. */
550 #define DEF_VEC_I(T) \
551 static inline void VEC_OP (T,must_be,integral_type) (void) \
557 VEC_TA(T,base,none); \
559 struct vec_swallow_trailing_semi
560 #define DEF_VEC_ALLOC_I(T,A) \
562 DEF_VEC_ALLOC_FUNC_I(T,A) \
563 DEF_VEC_NONALLOC_FUNCS_I(T,A) \
564 struct vec_swallow_trailing_semi
566 /* Vector of pointer to object. */
567 #define DEF_VEC_P(T) \
568 static inline void VEC_OP (T,must_be,pointer_type) (void) \
570 (void)((T)1 == (void *)1); \
574 VEC_TA(T,base,none); \
576 struct vec_swallow_trailing_semi
577 #define DEF_VEC_ALLOC_P(T,A) \
579 DEF_VEC_ALLOC_FUNC_P(T,A) \
580 DEF_VEC_NONALLOC_FUNCS_P(T,A) \
581 struct vec_swallow_trailing_semi
583 #define DEF_VEC_FUNC_P(T) \
584 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \
586 return vec_ ? vec_->prefix.num : 0; \
589 static inline T VEC_OP (T,base,last) \
590 (const VEC(T,base) *vec_ VEC_CHECK_DECL) \
592 VEC_ASSERT (vec_ && vec_->prefix.num, "last", T, base); \
594 return vec_->vec[vec_->prefix.num - 1]; \
597 static inline T VEC_OP (T,base,index) \
598 (const VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
600 VEC_ASSERT (vec_ && ix_ < vec_->prefix.num, "index", T, base); \
602 return vec_->vec[ix_]; \
605 static inline int VEC_OP (T,base,iterate) \
606 (const VEC(T,base) *vec_, unsigned ix_, T *ptr) \
608 if (vec_ && ix_ < vec_->prefix.num) \
610 *ptr = vec_->vec[ix_]; \
620 static inline size_t VEC_OP (T,base,embedded_size) \
623 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \
626 static inline void VEC_OP (T,base,embedded_init) \
627 (VEC(T,base) *vec_, int alloc_) \
629 vec_->prefix.num = 0; \
630 vec_->prefix.alloc = alloc_; \
633 static inline int VEC_OP (T,base,space) \
634 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \
636 VEC_ASSERT (alloc_ >= 0, "space", T, base); \
637 return vec_ ? vec_->prefix.alloc - vec_->prefix.num >= (unsigned)alloc_ : !alloc_; \
640 static inline void VEC_OP(T,base,splice) \
641 (VEC(T,base) *dst_, VEC(T,base) *src_ VEC_CHECK_DECL) \
645 unsigned len_ = src_->prefix.num; \
646 VEC_ASSERT (dst_->prefix.num + len_ <= dst_->prefix.alloc, "splice", T, base); \
648 memcpy (&dst_->vec[dst_->prefix.num], &src_->vec[0], len_ * sizeof (T)); \
649 dst_->prefix.num += len_; \
653 static inline T *VEC_OP (T,base,quick_push) \
654 (VEC(T,base) *vec_, T obj_ VEC_CHECK_DECL) \
658 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "push", T, base); \
659 slot_ = &vec_->vec[vec_->prefix.num++]; \
665 static inline T VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
669 VEC_ASSERT (vec_->prefix.num, "pop", T, base); \
670 obj_ = vec_->vec[--vec_->prefix.num]; \
675 static inline void VEC_OP (T,base,truncate) \
676 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \
678 VEC_ASSERT (vec_ ? vec_->prefix.num >= size_ : !size_, "truncate", T, base); \
680 vec_->prefix.num = size_; \
683 static inline T VEC_OP (T,base,replace) \
684 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \
688 VEC_ASSERT (ix_ < vec_->prefix.num, "replace", T, base); \
689 old_obj_ = vec_->vec[ix_]; \
690 vec_->vec[ix_] = obj_; \
695 static inline T *VEC_OP (T,base,quick_insert) \
696 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \
700 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "insert", T, base); \
701 VEC_ASSERT (ix_ <= vec_->prefix.num, "insert", T, base); \
702 slot_ = &vec_->vec[ix_]; \
703 memmove (slot_ + 1, slot_, (vec_->prefix.num++ - ix_) * sizeof (T)); \
709 static inline T VEC_OP (T,base,ordered_remove) \
710 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
715 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
716 slot_ = &vec_->vec[ix_]; \
718 memmove (slot_, slot_ + 1, (--vec_->prefix.num - ix_) * sizeof (T)); \
723 static inline T VEC_OP (T,base,unordered_remove) \
724 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
729 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
730 slot_ = &vec_->vec[ix_]; \
732 *slot_ = vec_->vec[--vec_->prefix.num]; \
737 static inline void VEC_OP (T,base,block_remove) \
738 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \
742 VEC_ASSERT (ix_ + len_ <= vec_->prefix.num, "block_remove", T, base); \
743 slot_ = &vec_->vec[ix_]; \
744 vec_->prefix.num -= len_; \
745 memmove (slot_, slot_ + len_, (vec_->prefix.num - ix_) * sizeof (T)); \
748 static inline T *VEC_OP (T,base,address) \
749 (VEC(T,base) *vec_) \
751 return vec_ ? vec_->vec : 0; \
754 static inline unsigned VEC_OP (T,base,lower_bound) \
755 (VEC(T,base) *vec_, const T obj_, \
756 bool (*lessthan_)(const T, const T) VEC_CHECK_DECL) \
758 unsigned int len_ = VEC_OP (T,base, length) (vec_); \
759 unsigned int half_, middle_; \
760 unsigned int first_ = 0; \
767 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
768 if (lessthan_ (middle_elem_, obj_)) \
772 len_ = len_ - half_ - 1; \
780 #define DEF_VEC_ALLOC_FUNC_P(T,A) \
781 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
782 (int alloc_ MEM_STAT_DECL) \
784 return (VEC(T,A) *) vec_##A##_p_reserve_exact (NULL, alloc_ \
789 #define DEF_VEC_NONALLOC_FUNCS_P(T,A) \
790 static inline void VEC_OP (T,A,free) \
794 vec_##A##_free (*vec_); \
798 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
800 size_t len_ = vec_ ? vec_->prefix.num : 0; \
801 VEC (T,A) *new_vec_ = NULL; \
805 new_vec_ = (VEC (T,A) *)(vec_##A##_p_reserve_exact \
806 (NULL, len_ PASS_MEM_STAT)); \
808 new_vec_->base.prefix.num = len_; \
809 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
814 static inline int VEC_OP (T,A,reserve) \
815 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
817 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
821 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve (*vec_, alloc_ PASS_MEM_STAT); \
826 static inline int VEC_OP (T,A,reserve_exact) \
827 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
829 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
833 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve_exact (*vec_, alloc_ \
839 static inline void VEC_OP (T,A,safe_grow) \
840 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
842 VEC_ASSERT (size_ >= 0 \
843 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
845 VEC_OP (T,A,reserve_exact) (vec_, \
846 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
847 VEC_CHECK_PASS PASS_MEM_STAT); \
848 VEC_BASE (*vec_)->prefix.num = size_; \
851 static inline void VEC_OP (T,A,safe_grow_cleared) \
852 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
854 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
855 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
856 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
857 sizeof (T) * (size_ - oldsize)); \
860 static inline void VEC_OP(T,A,safe_splice) \
861 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \
865 VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num \
866 VEC_CHECK_PASS MEM_STAT_INFO); \
868 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \
873 static inline T *VEC_OP (T,A,safe_push) \
874 (VEC(T,A) **vec_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
876 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
878 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
881 static inline T *VEC_OP (T,A,safe_insert) \
882 (VEC(T,A) **vec_, unsigned ix_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
884 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
886 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
890 /* Vector of object. */
891 #define DEF_VEC_O(T) \
893 VEC_TA(T,base,none); \
895 struct vec_swallow_trailing_semi
896 #define DEF_VEC_ALLOC_O(T,A) \
898 DEF_VEC_ALLOC_FUNC_O(T,A) \
899 DEF_VEC_NONALLOC_FUNCS_O(T,A) \
900 struct vec_swallow_trailing_semi
902 #define DEF_VEC_FUNC_O(T) \
903 static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \
905 return vec_ ? vec_->prefix.num : 0; \
908 static inline T *VEC_OP (T,base,last) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
910 VEC_ASSERT (vec_ && vec_->prefix.num, "last", T, base); \
912 return &vec_->vec[vec_->prefix.num - 1]; \
915 static inline T *VEC_OP (T,base,index) \
916 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
918 VEC_ASSERT (vec_ && ix_ < vec_->prefix.num, "index", T, base); \
920 return &vec_->vec[ix_]; \
923 static inline int VEC_OP (T,base,iterate) \
924 (VEC(T,base) *vec_, unsigned ix_, T **ptr) \
926 if (vec_ && ix_ < vec_->prefix.num) \
928 *ptr = &vec_->vec[ix_]; \
938 static inline size_t VEC_OP (T,base,embedded_size) \
941 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \
944 static inline void VEC_OP (T,base,embedded_init) \
945 (VEC(T,base) *vec_, int alloc_) \
947 vec_->prefix.num = 0; \
948 vec_->prefix.alloc = alloc_; \
951 static inline int VEC_OP (T,base,space) \
952 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \
954 VEC_ASSERT (alloc_ >= 0, "space", T, base); \
955 return vec_ ? vec_->prefix.alloc - vec_->prefix.num >= (unsigned)alloc_ : !alloc_; \
958 static inline void VEC_OP(T,base,splice) \
959 (VEC(T,base) *dst_, VEC(T,base) *src_ VEC_CHECK_DECL) \
963 unsigned len_ = src_->prefix.num; \
964 VEC_ASSERT (dst_->prefix.num + len_ <= dst_->prefix.alloc, "splice", T, base); \
966 memcpy (&dst_->vec[dst_->prefix.num], &src_->vec[0], len_ * sizeof (T)); \
967 dst_->prefix.num += len_; \
971 static inline T *VEC_OP (T,base,quick_push) \
972 (VEC(T,base) *vec_, const T *obj_ VEC_CHECK_DECL) \
976 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "push", T, base); \
977 slot_ = &vec_->vec[vec_->prefix.num++]; \
984 static inline void VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \
986 VEC_ASSERT (vec_->prefix.num, "pop", T, base); \
987 --vec_->prefix.num; \
990 static inline void VEC_OP (T,base,truncate) \
991 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \
993 VEC_ASSERT (vec_ ? vec_->prefix.num >= size_ : !size_, "truncate", T, base); \
995 vec_->prefix.num = size_; \
998 static inline T *VEC_OP (T,base,replace) \
999 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \
1003 VEC_ASSERT (ix_ < vec_->prefix.num, "replace", T, base); \
1004 slot_ = &vec_->vec[ix_]; \
1011 static inline T *VEC_OP (T,base,quick_insert) \
1012 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \
1016 VEC_ASSERT (vec_->prefix.num < vec_->prefix.alloc, "insert", T, base); \
1017 VEC_ASSERT (ix_ <= vec_->prefix.num, "insert", T, base); \
1018 slot_ = &vec_->vec[ix_]; \
1019 memmove (slot_ + 1, slot_, (vec_->prefix.num++ - ix_) * sizeof (T)); \
1026 static inline void VEC_OP (T,base,ordered_remove) \
1027 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
1031 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
1032 slot_ = &vec_->vec[ix_]; \
1033 memmove (slot_, slot_ + 1, (--vec_->prefix.num - ix_) * sizeof (T)); \
1036 static inline void VEC_OP (T,base,unordered_remove) \
1037 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \
1039 VEC_ASSERT (ix_ < vec_->prefix.num, "remove", T, base); \
1040 vec_->vec[ix_] = vec_->vec[--vec_->prefix.num]; \
1043 static inline void VEC_OP (T,base,block_remove) \
1044 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \
1048 VEC_ASSERT (ix_ + len_ <= vec_->prefix.num, "block_remove", T, base); \
1049 slot_ = &vec_->vec[ix_]; \
1050 vec_->prefix.num -= len_; \
1051 memmove (slot_, slot_ + len_, (vec_->prefix.num - ix_) * sizeof (T)); \
1054 static inline T *VEC_OP (T,base,address) \
1055 (VEC(T,base) *vec_) \
1057 return vec_ ? vec_->vec : 0; \
1060 static inline unsigned VEC_OP (T,base,lower_bound) \
1061 (VEC(T,base) *vec_, const T *obj_, \
1062 bool (*lessthan_)(const T *, const T *) VEC_CHECK_DECL) \
1064 unsigned int len_ = VEC_OP (T, base, length) (vec_); \
1065 unsigned int half_, middle_; \
1066 unsigned int first_ = 0; \
1070 half_ = len_ >> 1; \
1073 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \
1074 if (lessthan_ (middle_elem_, obj_)) \
1078 len_ = len_ - half_ - 1; \
1086 #define DEF_VEC_ALLOC_FUNC_O(T,A) \
1087 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
1088 (int alloc_ MEM_STAT_DECL) \
1090 return (VEC(T,A) *) vec_##A##_o_reserve_exact (NULL, alloc_, \
1091 offsetof (VEC(T,A),base.vec), \
1096 #define DEF_VEC_NONALLOC_FUNCS_O(T,A) \
1097 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1099 size_t len_ = vec_ ? vec_->prefix.num : 0; \
1100 VEC (T,A) *new_vec_ = NULL; \
1104 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \
1106 offsetof (VEC(T,A),base.vec), sizeof (T) \
1109 new_vec_->base.prefix.num = len_; \
1110 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
1115 static inline void VEC_OP (T,A,free) \
1119 vec_##A##_free (*vec_); \
1123 static inline int VEC_OP (T,A,reserve) \
1124 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1126 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1130 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \
1131 offsetof (VEC(T,A),base.vec),\
1138 static inline int VEC_OP (T,A,reserve_exact) \
1139 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1141 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1145 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \
1147 offsetof (VEC(T,A),base.vec), \
1148 sizeof (T) PASS_MEM_STAT); \
1153 static inline void VEC_OP (T,A,safe_grow) \
1154 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1156 VEC_ASSERT (size_ >= 0 \
1157 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1159 VEC_OP (T,A,reserve_exact) (vec_, \
1160 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
1161 VEC_CHECK_PASS PASS_MEM_STAT); \
1162 VEC_BASE (*vec_)->prefix.num = size_; \
1165 static inline void VEC_OP (T,A,safe_grow_cleared) \
1166 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1168 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
1169 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
1170 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
1171 sizeof (T) * (size_ - oldsize)); \
1174 static inline void VEC_OP(T,A,safe_splice) \
1175 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \
1179 VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num \
1180 VEC_CHECK_PASS MEM_STAT_INFO); \
1182 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \
1187 static inline T *VEC_OP (T,A,safe_push) \
1188 (VEC(T,A) **vec_, const T *obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
1190 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1192 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
1195 static inline T *VEC_OP (T,A,safe_insert) \
1196 (VEC(T,A) **vec_, unsigned ix_, const T *obj_ \
1197 VEC_CHECK_DECL MEM_STAT_DECL) \
1199 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1201 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
1205 #define DEF_VEC_ALLOC_FUNC_I(T,A) \
1206 static inline VEC(T,A) *VEC_OP (T,A,alloc) \
1207 (int alloc_ MEM_STAT_DECL) \
1209 return (VEC(T,A) *) vec_##A##_o_reserve_exact \
1210 (NULL, alloc_, offsetof (VEC(T,A),base.vec), \
1211 sizeof (T) PASS_MEM_STAT); \
1214 #define DEF_VEC_NONALLOC_FUNCS_I(T,A) \
1215 static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \
1217 size_t len_ = vec_ ? vec_->prefix.num : 0; \
1218 VEC (T,A) *new_vec_ = NULL; \
1222 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \
1224 offsetof (VEC(T,A),base.vec), sizeof (T) \
1227 new_vec_->base.prefix.num = len_; \
1228 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \
1233 static inline void VEC_OP (T,A,free) \
1237 vec_##A##_free (*vec_); \
1241 static inline int VEC_OP (T,A,reserve) \
1242 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1244 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1248 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \
1249 offsetof (VEC(T,A),base.vec),\
1256 static inline int VEC_OP (T,A,reserve_exact) \
1257 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \
1259 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \
1263 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \
1264 (*vec_, alloc_, offsetof (VEC(T,A),base.vec), \
1265 sizeof (T) PASS_MEM_STAT); \
1270 static inline void VEC_OP (T,A,safe_grow) \
1271 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1273 VEC_ASSERT (size_ >= 0 \
1274 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \
1276 VEC_OP (T,A,reserve_exact) (vec_, \
1277 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->prefix.num : 0) \
1278 VEC_CHECK_PASS PASS_MEM_STAT); \
1279 VEC_BASE (*vec_)->prefix.num = size_; \
1282 static inline void VEC_OP (T,A,safe_grow_cleared) \
1283 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \
1285 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \
1286 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \
1287 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \
1288 sizeof (T) * (size_ - oldsize)); \
1291 static inline void VEC_OP(T,A,safe_splice) \
1292 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \
1296 VEC_OP (T,A,reserve_exact) (dst_, src_->prefix.num \
1297 VEC_CHECK_PASS MEM_STAT_INFO); \
1299 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \
1304 static inline T *VEC_OP (T,A,safe_push) \
1305 (VEC(T,A) **vec_, const T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \
1307 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1309 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \
1312 static inline T *VEC_OP (T,A,safe_insert) \
1313 (VEC(T,A) **vec_, unsigned ix_, const T obj_ \
1314 VEC_CHECK_DECL MEM_STAT_DECL) \
1316 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \
1318 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \
1322 /* We support a vector which starts out with space on the stack and
1323 switches to heap space when forced to reallocate. This works a
1324 little differently. Instead of DEF_VEC_ALLOC_P(TYPE, heap|gc), use
1325 DEF_VEC_ALLOC_P_STACK(TYPE). This uses alloca to get the initial
1326 space; because alloca can not be usefully called in an inline
1327 function, and because a macro can not define a macro, you must then
1328 write a #define for each type:
1330 #define VEC_{TYPE}_stack_alloc(alloc) \
1331 VEC_stack_alloc({TYPE}, alloc)
1333 This is really a hack and perhaps can be made better. Note that
1334 this macro will wind up evaluating the ALLOC parameter twice.
1336 Only the initial allocation will be made using alloca, so pass a
1337 reasonable estimate that doesn't use too much stack space; don't
1338 pass zero. Don't return a VEC(TYPE,stack) vector from the function
1339 which allocated it. */
1341 extern void *vec_stack_p_reserve (void *, int MEM_STAT_DECL
);
1342 extern void *vec_stack_p_reserve_exact (void *, int MEM_STAT_DECL
);
1343 extern void *vec_stack_p_reserve_exact_1 (int, void *);
1344 extern void *vec_stack_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL
);
1345 extern void *vec_stack_o_reserve_exact (void *, int, size_t, size_t
1347 extern void vec_stack_free (void *);
1349 #ifdef GATHER_STATISTICS
1350 #define VEC_stack_alloc(T,alloc,name,line,function) \
1351 (VEC_OP (T,stack,alloc1) \
1352 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1354 #define VEC_stack_alloc(T,alloc) \
1355 (VEC_OP (T,stack,alloc1) \
1356 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc))))
1359 #define DEF_VEC_ALLOC_P_STACK(T) \
1360 VEC_TA(T,base,stack); \
1361 DEF_VEC_ALLOC_FUNC_P_STACK(T) \
1362 DEF_VEC_NONALLOC_FUNCS_P(T,stack) \
1363 struct vec_swallow_trailing_semi
1365 #define DEF_VEC_ALLOC_FUNC_P_STACK(T) \
1366 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1367 (int alloc_, VEC(T,stack)* space) \
1369 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1372 #define DEF_VEC_ALLOC_O_STACK(T) \
1373 VEC_TA(T,base,stack); \
1374 DEF_VEC_ALLOC_FUNC_O_STACK(T) \
1375 DEF_VEC_NONALLOC_FUNCS_O(T,stack) \
1376 struct vec_swallow_trailing_semi
1378 #define DEF_VEC_ALLOC_FUNC_O_STACK(T) \
1379 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1380 (int alloc_, VEC(T,stack)* space) \
1382 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1385 #define DEF_VEC_ALLOC_I_STACK(T) \
1386 VEC_TA(T,base,stack); \
1387 DEF_VEC_ALLOC_FUNC_I_STACK(T) \
1388 DEF_VEC_NONALLOC_FUNCS_I(T,stack) \
1389 struct vec_swallow_trailing_semi
1391 #define DEF_VEC_ALLOC_FUNC_I_STACK(T) \
1392 static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \
1393 (int alloc_, VEC(T,stack)* space) \
1395 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \
1398 #endif /* GCC_VEC_H */