gdb/riscv: Use legacy register numbers in default target description
[external/binutils.git] / gdb / common / vec.h
1 /* Vector API for GDB.
2    Copyright (C) 2004-2019 Free Software Foundation, Inc.
3    Contributed by Nathan Sidwell <nathan@codesourcery.com>
4
5    This file is part of GDB.
6
7    This program is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3 of the License, or
10    (at your option) any later version.
11
12    This program is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16
17    You should have received a copy of the GNU General Public License
18    along with this program.  If not, see <http://www.gnu.org/licenses/>.  */
19
20 #ifndef COMMON_VEC_H
21 #define COMMON_VEC_H
22
23 #include "diagnostics.h"
24
25 /* clang has a bug that makes it warn (-Wunused-function) about unused functions
26    that are the result of the DEF_VEC_* macro expansion.  See:
27
28      https://bugs.llvm.org/show_bug.cgi?id=22712
29
30    We specifically ignore this warning for the vec functions when the compiler
31    is clang.  */
32 #ifdef __clang__
33 # define DIAGNOSTIC_IGNORE_UNUSED_VEC_FUNCTION \
34     DIAGNOSTIC_IGNORE_UNUSED_FUNCTION
35 #else
36 # define DIAGNOSTIC_IGNORE_UNUSED_VEC_FUNCTION
37 #endif
38
39 /* The macros here implement a set of templated vector types and
40    associated interfaces.  These templates are implemented with
41    macros, as we're not in C++ land.  The interface functions are
42    typesafe and use static inline functions, sometimes backed by
43    out-of-line generic functions.
44
45    Because of the different behavior of structure objects, scalar
46    objects and of pointers, there are three flavors, one for each of
47    these variants.  Both the structure object and pointer variants
48    pass pointers to objects around -- in the former case the pointers
49    are stored into the vector and in the latter case the pointers are
50    dereferenced and the objects copied into the vector.  The scalar
51    object variant is suitable for int-like objects, and the vector
52    elements are returned by value.
53
54    There are both 'index' and 'iterate' accessors.  The iterator
55    returns a boolean iteration condition and updates the iteration
56    variable passed by reference.  Because the iterator will be
57    inlined, the address-of can be optimized away.
58
59    The vectors are implemented using the trailing array idiom, thus
60    they are not resizeable without changing the address of the vector
61    object itself.  This means you cannot have variables or fields of
62    vector type -- always use a pointer to a vector.  The one exception
63    is the final field of a structure, which could be a vector type.
64    You will have to use the embedded_size & embedded_init calls to
65    create such objects, and they will probably not be resizeable (so
66    don't use the 'safe' allocation variants).  The trailing array
67    idiom is used (rather than a pointer to an array of data), because,
68    if we allow NULL to also represent an empty vector, empty vectors
69    occupy minimal space in the structure containing them.
70
71    Each operation that increases the number of active elements is
72    available in 'quick' and 'safe' variants.  The former presumes that
73    there is sufficient allocated space for the operation to succeed
74    (it dies if there is not).  The latter will reallocate the
75    vector, if needed.  Reallocation causes an exponential increase in
76    vector size.  If you know you will be adding N elements, it would
77    be more efficient to use the reserve operation before adding the
78    elements with the 'quick' operation.  This will ensure there are at
79    least as many elements as you ask for, it will exponentially
80    increase if there are too few spare slots.  If you want reserve a
81    specific number of slots, but do not want the exponential increase
82    (for instance, you know this is the last allocation), use a
83    negative number for reservation.  You can also create a vector of a
84    specific size from the get go.
85
86    You should prefer the push and pop operations, as they append and
87    remove from the end of the vector.  If you need to remove several
88    items in one go, use the truncate operation.  The insert and remove
89    operations allow you to change elements in the middle of the
90    vector.  There are two remove operations, one which preserves the
91    element ordering 'ordered_remove', and one which does not
92    'unordered_remove'.  The latter function copies the end element
93    into the removed slot, rather than invoke a memmove operation.  The
94    'lower_bound' function will determine where to place an item in the
95    array using insert that will maintain sorted order.
96
97    If you need to directly manipulate a vector, then the 'address'
98    accessor will return the address of the start of the vector.  Also
99    the 'space' predicate will tell you whether there is spare capacity
100    in the vector.  You will not normally need to use these two functions.
101
102    Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro.
103    Variables of vector type are declared using a VEC(TYPEDEF) macro.
104    The characters O, P and I indicate whether TYPEDEF is a pointer
105    (P), object (O) or integral (I) type.  Be careful to pick the
106    correct one, as you'll get an awkward and inefficient API if you
107    use the wrong one.  There is a check, which results in a
108    compile-time warning, for the P and I versions, but there is no
109    check for the O versions, as that is not possible in plain C.
110
111    An example of their use would be,
112
113    DEF_VEC_P(tree);   // non-managed tree vector.
114
115    struct my_struct {
116      VEC(tree) *v;      // A (pointer to) a vector of tree pointers.
117    };
118
119    struct my_struct *s;
120
121    if (VEC_length(tree, s->v)) { we have some contents }
122    VEC_safe_push(tree, s->v, decl); // append some decl onto the end
123    for (ix = 0; VEC_iterate(tree, s->v, ix, elt); ix++)
124      { do something with elt }
125
126 */
127
128 /* Macros to invoke API calls.  A single macro works for both pointer
129    and object vectors, but the argument and return types might well be
130    different.  In each macro, T is the typedef of the vector elements.
131    Some of these macros pass the vector, V, by reference (by taking
132    its address), this is noted in the descriptions.  */
133
134 /* Length of vector
135    unsigned VEC_T_length(const VEC(T) *v);
136
137    Return the number of active elements in V.  V can be NULL, in which
138    case zero is returned.  */
139
140 #define VEC_length(T,V) (VEC_OP(T,length)(V))
141
142
143 /* Check if vector is empty
144    int VEC_T_empty(const VEC(T) *v);
145
146    Return nonzero if V is an empty vector (or V is NULL), zero otherwise.  */
147
148 #define VEC_empty(T,V)  (VEC_length (T,V) == 0)
149
150
151 /* Get the final element of the vector.
152    T VEC_T_last(VEC(T) *v); // Integer
153    T VEC_T_last(VEC(T) *v); // Pointer
154    T *VEC_T_last(VEC(T) *v); // Object
155
156    Return the final element.  V must not be empty.  */
157
158 #define VEC_last(T,V)   (VEC_OP(T,last)(V VEC_ASSERT_INFO))
159
160 /* Index into vector
161    T VEC_T_index(VEC(T) *v, unsigned ix); // Integer
162    T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
163    T *VEC_T_index(VEC(T) *v, unsigned ix); // Object
164
165    Return the IX'th element.  If IX must be in the domain of V.  */
166
167 #define VEC_index(T,V,I) (VEC_OP(T,index)(V,I VEC_ASSERT_INFO))
168
169 /* Iterate over vector
170    int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer
171    int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
172    int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object
173
174    Return iteration condition and update PTR to point to the IX'th
175    element.  At the end of iteration, sets PTR to NULL.  Use this to
176    iterate over the elements of a vector as follows,
177
178      for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
179        continue;  */
180
181 #define VEC_iterate(T,V,I,P)    (VEC_OP(T,iterate)(V,I,&(P)))
182
183 /* Allocate new vector.
184    VEC(T,A) *VEC_T_alloc(int reserve);
185
186    Allocate a new vector with space for RESERVE objects.  If RESERVE
187    is zero, NO vector is created.  */
188
189 #define VEC_alloc(T,N)  (VEC_OP(T,alloc)(N))
190
191 /* Free a vector.
192    void VEC_T_free(VEC(T,A) *&);
193
194    Free a vector and set it to NULL.  */
195
196 #define VEC_free(T,V)   (VEC_OP(T,free)(&V))
197
198 /* A cleanup function for a vector.
199    void VEC_T_cleanup(void *);
200    
201    Clean up a vector.  */
202
203 #define VEC_cleanup(T)  (VEC_OP(T,cleanup))
204
205 /* Use these to determine the required size and initialization of a
206    vector embedded within another structure (as the final member).
207
208    size_t VEC_T_embedded_size(int reserve);
209    void VEC_T_embedded_init(VEC(T) *v, int reserve);
210
211    These allow the caller to perform the memory allocation.  */
212
213 #define VEC_embedded_size(T,N)   (VEC_OP(T,embedded_size)(N))
214 #define VEC_embedded_init(T,O,N) (VEC_OP(T,embedded_init)(VEC_BASE(O),N))
215
216 /* Copy a vector.
217    VEC(T,A) *VEC_T_copy(VEC(T) *);
218
219    Copy the live elements of a vector into a new vector.  The new and
220    old vectors need not be allocated by the same mechanism.  */
221
222 #define VEC_copy(T,V) (VEC_OP(T,copy)(V))
223
224 /* Merge two vectors.
225    VEC(T,A) *VEC_T_merge(VEC(T) *, VEC(T) *);
226
227    Copy the live elements of both vectors into a new vector.  The new
228    and old vectors need not be allocated by the same mechanism.  */
229 #define VEC_merge(T,V1,V2) (VEC_OP(T,merge)(V1, V2))
230
231 /* Determine if a vector has additional capacity.
232
233    int VEC_T_space (VEC(T) *v,int reserve)
234
235    If V has space for RESERVE additional entries, return nonzero.  You
236    usually only need to use this if you are doing your own vector
237    reallocation, for instance on an embedded vector.  This returns
238    nonzero in exactly the same circumstances that VEC_T_reserve
239    will.  */
240
241 #define VEC_space(T,V,R) (VEC_OP(T,space)(V,R VEC_ASSERT_INFO))
242
243 /* Reserve space.
244    int VEC_T_reserve(VEC(T,A) *&v, int reserve);
245
246    Ensure that V has at least abs(RESERVE) slots available.  The
247    signedness of RESERVE determines the reallocation behavior.  A
248    negative value will not create additional headroom beyond that
249    requested.  A positive value will create additional headroom.  Note
250    this can cause V to be reallocated.  Returns nonzero iff
251    reallocation actually occurred.  */
252
253 #define VEC_reserve(T,V,R) (VEC_OP(T,reserve)(&(V),R VEC_ASSERT_INFO))
254
255 /* Push object with no reallocation
256    T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer
257    T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
258    T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object
259
260    Push a new element onto the end, returns a pointer to the slot
261    filled in.  For object vectors, the new value can be NULL, in which
262    case NO initialization is performed.  There must
263    be sufficient space in the vector.  */
264
265 #define VEC_quick_push(T,V,O) (VEC_OP(T,quick_push)(V,O VEC_ASSERT_INFO))
266
267 /* Push object with reallocation
268    T *VEC_T_safe_push (VEC(T,A) *&v, T obj); // Integer
269    T *VEC_T_safe_push (VEC(T,A) *&v, T obj); // Pointer
270    T *VEC_T_safe_push (VEC(T,A) *&v, T *obj); // Object
271
272    Push a new element onto the end, returns a pointer to the slot
273    filled in.  For object vectors, the new value can be NULL, in which
274    case NO initialization is performed.  Reallocates V, if needed.  */
275
276 #define VEC_safe_push(T,V,O) (VEC_OP(T,safe_push)(&(V),O VEC_ASSERT_INFO))
277
278 /* Pop element off end
279    T VEC_T_pop (VEC(T) *v);             // Integer
280    T VEC_T_pop (VEC(T) *v);             // Pointer
281    void VEC_T_pop (VEC(T) *v);          // Object
282
283    Pop the last element off the end.  Returns the element popped, for
284    pointer vectors.  */
285
286 #define VEC_pop(T,V)    (VEC_OP(T,pop)(V VEC_ASSERT_INFO))
287
288 /* Truncate to specific length
289    void VEC_T_truncate (VEC(T) *v, unsigned len);
290
291    Set the length as specified.  The new length must be less than or
292    equal to the current length.  This is an O(1) operation.  */
293
294 #define VEC_truncate(T,V,I)             \
295         (VEC_OP(T,truncate)(V,I VEC_ASSERT_INFO))
296
297 /* Grow to a specific length.
298    void VEC_T_safe_grow (VEC(T,A) *&v, int len);
299
300    Grow the vector to a specific length.  The LEN must be as
301    long or longer than the current length.  The new elements are
302    uninitialized.  */
303
304 #define VEC_safe_grow(T,V,I)            \
305         (VEC_OP(T,safe_grow)(&(V),I VEC_ASSERT_INFO))
306
307 /* Replace element
308    T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer
309    T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
310    T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val);  // Object
311
312    Replace the IXth element of V with a new value, VAL.  For pointer
313    vectors returns the original value.  For object vectors returns a
314    pointer to the new value.  For object vectors the new value can be
315    NULL, in which case no overwriting of the slot is actually
316    performed.  */
317
318 #define VEC_replace(T,V,I,O) (VEC_OP(T,replace)(V,I,O VEC_ASSERT_INFO))
319
320 /* Insert object with no reallocation
321    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer
322    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
323    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object
324
325    Insert an element, VAL, at the IXth position of V.  Return a pointer
326    to the slot created.  For vectors of object, the new value can be
327    NULL, in which case no initialization of the inserted slot takes
328    place.  There must be sufficient space.  */
329
330 #define VEC_quick_insert(T,V,I,O) \
331         (VEC_OP(T,quick_insert)(V,I,O VEC_ASSERT_INFO))
332
333 /* Insert object with reallocation
334    T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer
335    T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
336    T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
337
338    Insert an element, VAL, at the IXth position of V.  Return a pointer
339    to the slot created.  For vectors of object, the new value can be
340    NULL, in which case no initialization of the inserted slot takes
341    place.  Reallocate V, if necessary.  */
342
343 #define VEC_safe_insert(T,V,I,O)        \
344         (VEC_OP(T,safe_insert)(&(V),I,O VEC_ASSERT_INFO))
345
346 /* Remove element retaining order
347    T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer
348    T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
349    void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
350
351    Remove an element from the IXth position of V.  Ordering of
352    remaining elements is preserved.  For pointer vectors returns the
353    removed object.  This is an O(N) operation due to a memmove.  */
354
355 #define VEC_ordered_remove(T,V,I)       \
356         (VEC_OP(T,ordered_remove)(V,I VEC_ASSERT_INFO))
357
358 /* Remove element destroying order
359    T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer
360    T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
361    void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object
362
363    Remove an element from the IXth position of V.  Ordering of
364    remaining elements is destroyed.  For pointer vectors returns the
365    removed object.  This is an O(1) operation.  */
366
367 #define VEC_unordered_remove(T,V,I)     \
368         (VEC_OP(T,unordered_remove)(V,I VEC_ASSERT_INFO))
369
370 /* Remove a block of elements
371    void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len);
372
373    Remove LEN elements starting at the IXth.  Ordering is retained.
374    This is an O(N) operation due to memmove.  */
375
376 #define VEC_block_remove(T,V,I,L)       \
377         (VEC_OP(T,block_remove)(V,I,L VEC_ASSERT_INFO))
378
379 /* Get the address of the array of elements
380    T *VEC_T_address (VEC(T) v)
381
382    If you need to directly manipulate the array (for instance, you
383    want to feed it to qsort), use this accessor.  */
384
385 #define VEC_address(T,V)                (VEC_OP(T,address)(V))
386
387 /* Find the first index in the vector not less than the object.
388    unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
389                                int (*lessthan) (const T, const T)); // Integer
390    unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
391                                int (*lessthan) (const T, const T)); // Pointer
392    unsigned VEC_T_lower_bound (VEC(T) *v, const T *val,
393                                int (*lessthan) (const T*, const T*)); // Object
394
395    Find the first position in which VAL could be inserted without
396    changing the ordering of V.  LESSTHAN is a function that returns
397    true if the first argument is strictly less than the second.  */
398
399 #define VEC_lower_bound(T,V,O,LT)    \
400        (VEC_OP(T,lower_bound)(V,O,LT VEC_ASSERT_INFO))
401
402 /* Reallocate an array of elements with prefix.  */
403 extern void *vec_p_reserve (void *, int);
404 extern void *vec_o_reserve (void *, int, size_t, size_t);
405 #define vec_free_(V) xfree (V)
406
407 #define VEC_ASSERT_INFO ,__FILE__,__LINE__
408 #define VEC_ASSERT_DECL ,const char *file_,unsigned line_
409 #define VEC_ASSERT_PASS ,file_,line_
410 #define vec_assert(expr, op) \
411   ((void)((expr) ? 0 : (gdb_assert_fail (op, file_, line_, \
412                                          FUNCTION_NAME), 0)))
413
414 #define VEC(T) VEC_##T
415 #define VEC_OP(T,OP) VEC_##T##_##OP
416
417 #define VEC_T(T)                                                          \
418 typedef struct VEC(T)                                                     \
419 {                                                                         \
420   unsigned num;                                                           \
421   unsigned alloc;                                                         \
422   T vec[1];                                                               \
423 } VEC(T)
424
425 /* Vector of integer-like object.  */
426 #define DEF_VEC_I(T)                                                      \
427 DIAGNOSTIC_PUSH                                                           \
428 DIAGNOSTIC_IGNORE_UNUSED_VEC_FUNCTION                                     \
429 static inline void VEC_OP (T,must_be_integral_type) (void)                \
430 {                                                                         \
431   (void)~(T)0;                                                            \
432 }                                                                         \
433                                                                           \
434 VEC_T(T);                                                                 \
435 DEF_VEC_FUNC_P(T)                                                         \
436 DEF_VEC_ALLOC_FUNC_I(T)                                                   \
437 DIAGNOSTIC_POP                                                            \
438 struct vec_swallow_trailing_semi
439
440 /* Vector of pointer to object.  */
441 #define DEF_VEC_P(T)                                                      \
442 DIAGNOSTIC_PUSH                                                           \
443 DIAGNOSTIC_IGNORE_UNUSED_VEC_FUNCTION                                     \
444 static inline void VEC_OP (T,must_be_pointer_type) (void)                 \
445 {                                                                         \
446   (void)((T)1 == (void *)1);                                              \
447 }                                                                         \
448                                                                           \
449 VEC_T(T);                                                                 \
450 DEF_VEC_FUNC_P(T)                                                         \
451 DEF_VEC_ALLOC_FUNC_P(T)                                                   \
452 DIAGNOSTIC_POP                                                            \
453 struct vec_swallow_trailing_semi
454
455 /* Vector of object.  */
456 #define DEF_VEC_O(T)                                                      \
457 DIAGNOSTIC_PUSH                                                           \
458 DIAGNOSTIC_IGNORE_UNUSED_VEC_FUNCTION                                     \
459 VEC_T(T);                                                                 \
460 DEF_VEC_FUNC_O(T)                                                         \
461 DEF_VEC_ALLOC_FUNC_O(T)                                                   \
462 DIAGNOSTIC_POP                                                            \
463 struct vec_swallow_trailing_semi
464
465 /* Avoid offsetof (or its usual C implementation) as it triggers
466    -Winvalid-offsetof warnings with enum_flags types with G++ <= 4.4,
467    even though those types are memcpyable.  This requires allocating a
468    dummy local VEC in all routines that use this, but that has the
469    advantage that it only works if T is default constructible, which
470    is exactly a check we want, to keep C compatibility.  */
471 #define vec_offset(T, VPTR) ((size_t) ((char *) &(VPTR)->vec - (char *) VPTR))
472
473 #define DEF_VEC_ALLOC_FUNC_I(T)                                           \
474 static inline VEC(T) *VEC_OP (T,alloc)                                    \
475      (int alloc_)                                                         \
476 {                                                                         \
477   VEC(T) dummy;                                                           \
478                                                                           \
479   /* We must request exact size allocation, hence the negation.  */       \
480   return (VEC(T) *) vec_o_reserve (NULL, -alloc_,                         \
481                                    vec_offset (T, &dummy), sizeof (T));   \
482 }                                                                         \
483                                                                           \
484 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_)                      \
485 {                                                                         \
486   size_t len_ = vec_ ? vec_->num : 0;                                     \
487   VEC (T) *new_vec_ = NULL;                                               \
488                                                                           \
489   if (len_)                                                               \
490     {                                                                     \
491       VEC(T) dummy;                                                       \
492                                                                           \
493       /* We must request exact size allocation, hence the negation.  */   \
494       new_vec_ = (VEC (T) *)                                              \
495         vec_o_reserve (NULL, -len_, vec_offset (T, &dummy), sizeof (T));        \
496                                                                           \
497       new_vec_->num = len_;                                               \
498       memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_);               \
499     }                                                                     \
500   return new_vec_;                                                        \
501 }                                                                         \
502                                                                           \
503 static inline VEC(T) *VEC_OP (T,merge) (VEC(T) *vec1_, VEC(T) *vec2_)     \
504 {                                                                         \
505   if (vec1_ && vec2_)                                                     \
506     {                                                                     \
507       VEC(T) dummy;                                                       \
508       size_t len_ = vec1_->num + vec2_->num;                              \
509       VEC (T) *new_vec_ = NULL;                                           \
510                                                                           \
511       /* We must request exact size allocation, hence the negation.  */   \
512       new_vec_ = (VEC (T) *)                                              \
513         vec_o_reserve (NULL, -len_, vec_offset (T, &dummy), sizeof (T));          \
514                                                                           \
515       new_vec_->num = len_;                                               \
516       memcpy (new_vec_->vec, vec1_->vec, sizeof (T) * vec1_->num);        \
517       memcpy (new_vec_->vec + vec1_->num, vec2_->vec,                     \
518               sizeof (T) * vec2_->num);                                   \
519                                                                           \
520       return new_vec_;                                                    \
521     }                                                                     \
522   else                                                                    \
523     return VEC_copy (T, vec1_ ? vec1_ : vec2_);                           \
524 }                                                                         \
525                                                                           \
526 static inline void VEC_OP (T,free)                                        \
527      (VEC(T) **vec_)                                                      \
528 {                                                                         \
529   if (*vec_)                                                              \
530     vec_free_ (*vec_);                                                    \
531   *vec_ = NULL;                                                           \
532 }                                                                         \
533                                                                           \
534 static inline void VEC_OP (T,cleanup)                                     \
535      (void *arg_)                                                         \
536 {                                                                         \
537   VEC(T) **vec_ = (VEC(T) **) arg_;                                       \
538   if (*vec_)                                                              \
539     vec_free_ (*vec_);                                                    \
540   *vec_ = NULL;                                                           \
541 }                                                                         \
542                                                                           \
543 static inline int VEC_OP (T,reserve)                                      \
544      (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL)                          \
545 {                                                                         \
546   VEC(T) dummy;                                                           \
547   int extend = !VEC_OP (T,space)                                          \
548         (*vec_, alloc_ < 0 ? -alloc_ : alloc_ VEC_ASSERT_PASS);           \
549                                                                           \
550   if (extend)                                                             \
551     *vec_ = (VEC(T) *) vec_o_reserve (*vec_, alloc_,                      \
552                                       vec_offset (T, &dummy), sizeof (T)); \
553                                                                           \
554   return extend;                                                          \
555 }                                                                         \
556                                                                           \
557 static inline void VEC_OP (T,safe_grow)                                   \
558      (VEC(T) **vec_, int size_ VEC_ASSERT_DECL)                           \
559 {                                                                         \
560   vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_,  \
561         "safe_grow");                                                     \
562   VEC_OP (T,reserve) (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_       \
563                         VEC_ASSERT_PASS);                                 \
564   (*vec_)->num = size_;                                                   \
565 }                                                                         \
566                                                                           \
567 static inline T *VEC_OP (T,safe_push)                                     \
568      (VEC(T) **vec_, const T obj_ VEC_ASSERT_DECL)                        \
569 {                                                                         \
570   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
571                                                                           \
572   return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS);             \
573 }                                                                         \
574                                                                           \
575 static inline T *VEC_OP (T,safe_insert)                                   \
576      (VEC(T) **vec_, unsigned ix_, const T obj_ VEC_ASSERT_DECL)          \
577 {                                                                         \
578   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
579                                                                           \
580   return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS);      \
581 }
582
583 #define DEF_VEC_FUNC_P(T)                                                 \
584 static inline unsigned VEC_OP (T,length) (const VEC(T) *vec_)             \
585 {                                                                         \
586   return vec_ ? vec_->num : 0;                                            \
587 }                                                                         \
588                                                                           \
589 static inline T VEC_OP (T,last)                                           \
590         (const VEC(T) *vec_ VEC_ASSERT_DECL)                              \
591 {                                                                         \
592   vec_assert (vec_ && vec_->num, "last");                                 \
593                                                                           \
594   return vec_->vec[vec_->num - 1];                                        \
595 }                                                                         \
596                                                                           \
597 static inline T VEC_OP (T,index)                                          \
598      (const VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                   \
599 {                                                                         \
600   vec_assert (vec_ && ix_ < vec_->num, "index");                          \
601                                                                           \
602   return vec_->vec[ix_];                                                  \
603 }                                                                         \
604                                                                           \
605 static inline int VEC_OP (T,iterate)                                      \
606      (const VEC(T) *vec_, unsigned ix_, T *ptr)                           \
607 {                                                                         \
608   if (vec_ && ix_ < vec_->num)                                            \
609     {                                                                     \
610       *ptr = vec_->vec[ix_];                                              \
611       return 1;                                                           \
612     }                                                                     \
613   else                                                                    \
614     {                                                                     \
615       *ptr = (T) 0;                                                       \
616       return 0;                                                           \
617     }                                                                     \
618 }                                                                         \
619                                                                           \
620 static inline size_t VEC_OP (T,embedded_size)                             \
621      (int alloc_)                                                         \
622 {                                                                         \
623   VEC(T) dummy;                                                           \
624                                                                           \
625   return vec_offset (T, &dummy) + alloc_ * sizeof(T);                     \
626 }                                                                         \
627                                                                           \
628 static inline void VEC_OP (T,embedded_init)                               \
629      (VEC(T) *vec_, int alloc_)                                           \
630 {                                                                         \
631   vec_->num = 0;                                                          \
632   vec_->alloc = alloc_;                                                   \
633 }                                                                         \
634                                                                           \
635 static inline int VEC_OP (T,space)                                        \
636      (VEC(T) *vec_, int alloc_ VEC_ASSERT_DECL)                           \
637 {                                                                         \
638   vec_assert (alloc_ >= 0, "space");                                      \
639   return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_;    \
640 }                                                                         \
641                                                                           \
642 static inline T *VEC_OP (T,quick_push)                                    \
643      (VEC(T) *vec_, T obj_ VEC_ASSERT_DECL)                               \
644 {                                                                         \
645   T *slot_;                                                               \
646                                                                           \
647   vec_assert (vec_->num < vec_->alloc, "quick_push");                     \
648   slot_ = &vec_->vec[vec_->num++];                                        \
649   *slot_ = obj_;                                                          \
650                                                                           \
651   return slot_;                                                           \
652 }                                                                         \
653                                                                           \
654 static inline T VEC_OP (T,pop) (VEC(T) *vec_ VEC_ASSERT_DECL)             \
655 {                                                                         \
656   T obj_;                                                                 \
657                                                                           \
658   vec_assert (vec_->num, "pop");                                          \
659   obj_ = vec_->vec[--vec_->num];                                          \
660                                                                           \
661   return obj_;                                                            \
662 }                                                                         \
663                                                                           \
664 static inline void VEC_OP (T,truncate)                                    \
665      (VEC(T) *vec_, unsigned size_ VEC_ASSERT_DECL)                       \
666 {                                                                         \
667   vec_assert (vec_ ? vec_->num >= size_ : !size_, "truncate");            \
668   if (vec_)                                                               \
669     vec_->num = size_;                                                    \
670 }                                                                         \
671                                                                           \
672 static inline T VEC_OP (T,replace)                                        \
673      (VEC(T) *vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL)                 \
674 {                                                                         \
675   T old_obj_;                                                             \
676                                                                           \
677   vec_assert (ix_ < vec_->num, "replace");                                \
678   old_obj_ = vec_->vec[ix_];                                              \
679   vec_->vec[ix_] = obj_;                                                  \
680                                                                           \
681   return old_obj_;                                                        \
682 }                                                                         \
683                                                                           \
684 static inline T *VEC_OP (T,quick_insert)                                  \
685      (VEC(T) *vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL)                 \
686 {                                                                         \
687   T *slot_;                                                               \
688                                                                           \
689   vec_assert (vec_->num < vec_->alloc && ix_ <= vec_->num, "quick_insert"); \
690   slot_ = &vec_->vec[ix_];                                                \
691   memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T));           \
692   *slot_ = obj_;                                                          \
693                                                                           \
694   return slot_;                                                           \
695 }                                                                         \
696                                                                           \
697 static inline T VEC_OP (T,ordered_remove)                                 \
698      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                         \
699 {                                                                         \
700   T *slot_;                                                               \
701   T obj_;                                                                 \
702                                                                           \
703   vec_assert (ix_ < vec_->num, "ordered_remove");                         \
704   slot_ = &vec_->vec[ix_];                                                \
705   obj_ = *slot_;                                                          \
706   memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T));           \
707                                                                           \
708   return obj_;                                                            \
709 }                                                                         \
710                                                                           \
711 static inline T VEC_OP (T,unordered_remove)                               \
712      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                         \
713 {                                                                         \
714   T *slot_;                                                               \
715   T obj_;                                                                 \
716                                                                           \
717   vec_assert (ix_ < vec_->num, "unordered_remove");                       \
718   slot_ = &vec_->vec[ix_];                                                \
719   obj_ = *slot_;                                                          \
720   *slot_ = vec_->vec[--vec_->num];                                        \
721                                                                           \
722   return obj_;                                                            \
723 }                                                                         \
724                                                                           \
725 static inline void VEC_OP (T,block_remove)                                \
726      (VEC(T) *vec_, unsigned ix_, unsigned len_ VEC_ASSERT_DECL)          \
727 {                                                                         \
728   T *slot_;                                                               \
729                                                                           \
730   vec_assert (ix_ + len_ <= vec_->num, "block_remove");                   \
731   slot_ = &vec_->vec[ix_];                                                \
732   vec_->num -= len_;                                                      \
733   memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T));          \
734 }                                                                         \
735                                                                           \
736 static inline T *VEC_OP (T,address)                                       \
737      (VEC(T) *vec_)                                                       \
738 {                                                                         \
739   return vec_ ? vec_->vec : 0;                                            \
740 }                                                                         \
741                                                                           \
742 static inline unsigned VEC_OP (T,lower_bound)                             \
743      (VEC(T) *vec_, const T obj_,                                         \
744       int (*lessthan_)(const T, const T) VEC_ASSERT_DECL)                 \
745 {                                                                         \
746    unsigned int len_ = VEC_OP (T, length) (vec_);                         \
747    unsigned int half_, middle_;                                           \
748    unsigned int first_ = 0;                                               \
749    while (len_ > 0)                                                       \
750      {                                                                    \
751         T middle_elem_;                                                   \
752         half_ = len_ >> 1;                                                \
753         middle_ = first_;                                                 \
754         middle_ += half_;                                                 \
755         middle_elem_ = VEC_OP (T,index) (vec_, middle_ VEC_ASSERT_PASS);  \
756         if (lessthan_ (middle_elem_, obj_))                               \
757           {                                                               \
758              first_ = middle_;                                            \
759              ++first_;                                                    \
760              len_ = len_ - half_ - 1;                                     \
761           }                                                               \
762         else                                                              \
763           len_ = half_;                                                   \
764      }                                                                    \
765    return first_;                                                         \
766 }
767
768 #define DEF_VEC_ALLOC_FUNC_P(T)                                           \
769 static inline VEC(T) *VEC_OP (T,alloc)                                    \
770      (int alloc_)                                                         \
771 {                                                                         \
772   /* We must request exact size allocation, hence the negation.  */       \
773   return (VEC(T) *) vec_p_reserve (NULL, -alloc_);                        \
774 }                                                                         \
775                                                                           \
776 static inline void VEC_OP (T,free)                                        \
777      (VEC(T) **vec_)                                                      \
778 {                                                                         \
779   if (*vec_)                                                              \
780     vec_free_ (*vec_);                                                    \
781   *vec_ = NULL;                                                           \
782 }                                                                         \
783                                                                           \
784 static inline void VEC_OP (T,cleanup)                                     \
785      (void *arg_)                                                         \
786 {                                                                         \
787   VEC(T) **vec_ = (VEC(T) **) arg_;                                       \
788   if (*vec_)                                                              \
789     vec_free_ (*vec_);                                                    \
790   *vec_ = NULL;                                                           \
791 }                                                                         \
792                                                                           \
793 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_)                      \
794 {                                                                         \
795   size_t len_ = vec_ ? vec_->num : 0;                                     \
796   VEC (T) *new_vec_ = NULL;                                               \
797                                                                           \
798   if (len_)                                                               \
799     {                                                                     \
800       /* We must request exact size allocation, hence the negation.  */   \
801       new_vec_ = (VEC (T) *)(vec_p_reserve (NULL, -len_));                \
802                                                                           \
803       new_vec_->num = len_;                                               \
804       memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_);               \
805     }                                                                     \
806   return new_vec_;                                                        \
807 }                                                                         \
808                                                                           \
809 static inline VEC(T) *VEC_OP (T,merge) (VEC(T) *vec1_, VEC(T) *vec2_)     \
810 {                                                                         \
811   if (vec1_ && vec2_)                                                     \
812     {                                                                     \
813       size_t len_ = vec1_->num + vec2_->num;                              \
814       VEC (T) *new_vec_ = NULL;                                           \
815                                                                           \
816       /* We must request exact size allocation, hence the negation.  */   \
817       new_vec_ = (VEC (T) *)(vec_p_reserve (NULL, -len_));                \
818                                                                           \
819       new_vec_->num = len_;                                               \
820       memcpy (new_vec_->vec, vec1_->vec, sizeof (T) * vec1_->num);        \
821       memcpy (new_vec_->vec + vec1_->num, vec2_->vec,                     \
822               sizeof (T) * vec2_->num);                                   \
823                                                                           \
824       return new_vec_;                                                    \
825     }                                                                     \
826   else                                                                    \
827     return VEC_copy (T, vec1_ ? vec1_ : vec2_);                           \
828 }                                                                         \
829                                                                           \
830 static inline int VEC_OP (T,reserve)                                      \
831      (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL)                          \
832 {                                                                         \
833   int extend = !VEC_OP (T,space)                                          \
834         (*vec_, alloc_ < 0 ? -alloc_ : alloc_ VEC_ASSERT_PASS);           \
835                                                                           \
836   if (extend)                                                             \
837     *vec_ = (VEC(T) *) vec_p_reserve (*vec_, alloc_);                     \
838                                                                           \
839   return extend;                                                          \
840 }                                                                         \
841                                                                           \
842 static inline void VEC_OP (T,safe_grow)                                   \
843      (VEC(T) **vec_, int size_ VEC_ASSERT_DECL)                           \
844 {                                                                         \
845   vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_,  \
846         "safe_grow");                                                     \
847   VEC_OP (T,reserve)                                                      \
848         (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ VEC_ASSERT_PASS);  \
849   (*vec_)->num = size_;                                                   \
850 }                                                                         \
851                                                                           \
852 static inline T *VEC_OP (T,safe_push)                                     \
853      (VEC(T) **vec_, T obj_ VEC_ASSERT_DECL)                              \
854 {                                                                         \
855   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
856                                                                           \
857   return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS);             \
858 }                                                                         \
859                                                                           \
860 static inline T *VEC_OP (T,safe_insert)                                   \
861      (VEC(T) **vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL)                \
862 {                                                                         \
863   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
864                                                                           \
865   return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS);      \
866 }
867
868 #define DEF_VEC_FUNC_O(T)                                                 \
869 static inline unsigned VEC_OP (T,length) (const VEC(T) *vec_)             \
870 {                                                                         \
871   return vec_ ? vec_->num : 0;                                            \
872 }                                                                         \
873                                                                           \
874 static inline T *VEC_OP (T,last) (VEC(T) *vec_ VEC_ASSERT_DECL)           \
875 {                                                                         \
876   vec_assert (vec_ && vec_->num, "last");                                 \
877                                                                           \
878   return &vec_->vec[vec_->num - 1];                                       \
879 }                                                                         \
880                                                                           \
881 static inline T *VEC_OP (T,index)                                         \
882      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                         \
883 {                                                                         \
884   vec_assert (vec_ && ix_ < vec_->num, "index");                          \
885                                                                           \
886   return &vec_->vec[ix_];                                                 \
887 }                                                                         \
888                                                                           \
889 static inline int VEC_OP (T,iterate)                                      \
890      (VEC(T) *vec_, unsigned ix_, T **ptr)                                \
891 {                                                                         \
892   if (vec_ && ix_ < vec_->num)                                            \
893     {                                                                     \
894       *ptr = &vec_->vec[ix_];                                             \
895       return 1;                                                           \
896     }                                                                     \
897   else                                                                    \
898     {                                                                     \
899       *ptr = 0;                                                           \
900       return 0;                                                           \
901     }                                                                     \
902 }                                                                         \
903                                                                           \
904 static inline size_t VEC_OP (T,embedded_size)                             \
905      (int alloc_)                                                         \
906 {                                                                         \
907   VEC(T) dummy;                                                           \
908                                                                           \
909   return vec_offset (T, &dummy) + alloc_ * sizeof(T);                     \
910 }                                                                         \
911                                                                           \
912 static inline void VEC_OP (T,embedded_init)                               \
913      (VEC(T) *vec_, int alloc_)                                           \
914 {                                                                         \
915   vec_->num = 0;                                                          \
916   vec_->alloc = alloc_;                                                   \
917 }                                                                         \
918                                                                           \
919 static inline int VEC_OP (T,space)                                        \
920      (VEC(T) *vec_, int alloc_ VEC_ASSERT_DECL)                           \
921 {                                                                         \
922   vec_assert (alloc_ >= 0, "space");                                      \
923   return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_;    \
924 }                                                                         \
925                                                                           \
926 static inline T *VEC_OP (T,quick_push)                                    \
927      (VEC(T) *vec_, const T *obj_ VEC_ASSERT_DECL)                        \
928 {                                                                         \
929   T *slot_;                                                               \
930                                                                           \
931   vec_assert (vec_->num < vec_->alloc, "quick_push");                     \
932   slot_ = &vec_->vec[vec_->num++];                                        \
933   if (obj_)                                                               \
934     *slot_ = *obj_;                                                       \
935                                                                           \
936   return slot_;                                                           \
937 }                                                                         \
938                                                                           \
939 static inline void VEC_OP (T,pop) (VEC(T) *vec_ VEC_ASSERT_DECL)          \
940 {                                                                         \
941   vec_assert (vec_->num, "pop");                                          \
942   --vec_->num;                                                            \
943 }                                                                         \
944                                                                           \
945 static inline void VEC_OP (T,truncate)                                    \
946      (VEC(T) *vec_, unsigned size_ VEC_ASSERT_DECL)                       \
947 {                                                                         \
948   vec_assert (vec_ ? vec_->num >= size_ : !size_, "truncate");            \
949   if (vec_)                                                               \
950     vec_->num = size_;                                                    \
951 }                                                                         \
952                                                                           \
953 static inline T *VEC_OP (T,replace)                                       \
954      (VEC(T) *vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL)          \
955 {                                                                         \
956   T *slot_;                                                               \
957                                                                           \
958   vec_assert (ix_ < vec_->num, "replace");                                \
959   slot_ = &vec_->vec[ix_];                                                \
960   if (obj_)                                                               \
961     *slot_ = *obj_;                                                       \
962                                                                           \
963   return slot_;                                                           \
964 }                                                                         \
965                                                                           \
966 static inline T *VEC_OP (T,quick_insert)                                  \
967      (VEC(T) *vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL)          \
968 {                                                                         \
969   T *slot_;                                                               \
970                                                                           \
971   vec_assert (vec_->num < vec_->alloc && ix_ <= vec_->num, "quick_insert"); \
972   slot_ = &vec_->vec[ix_];                                                \
973   memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T));           \
974   if (obj_)                                                               \
975     *slot_ = *obj_;                                                       \
976                                                                           \
977   return slot_;                                                           \
978 }                                                                         \
979                                                                           \
980 static inline void VEC_OP (T,ordered_remove)                              \
981      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                         \
982 {                                                                         \
983   T *slot_;                                                               \
984                                                                           \
985   vec_assert (ix_ < vec_->num, "ordered_remove");                         \
986   slot_ = &vec_->vec[ix_];                                                \
987   memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T));           \
988 }                                                                         \
989                                                                           \
990 static inline void VEC_OP (T,unordered_remove)                            \
991      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                         \
992 {                                                                         \
993   vec_assert (ix_ < vec_->num, "unordered_remove");                       \
994   vec_->vec[ix_] = vec_->vec[--vec_->num];                                \
995 }                                                                         \
996                                                                           \
997 static inline void VEC_OP (T,block_remove)                                \
998      (VEC(T) *vec_, unsigned ix_, unsigned len_ VEC_ASSERT_DECL)          \
999 {                                                                         \
1000   T *slot_;                                                               \
1001                                                                           \
1002   vec_assert (ix_ + len_ <= vec_->num, "block_remove");                   \
1003   slot_ = &vec_->vec[ix_];                                                \
1004   vec_->num -= len_;                                                      \
1005   memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T));          \
1006 }                                                                         \
1007                                                                           \
1008 static inline T *VEC_OP (T,address)                                       \
1009      (VEC(T) *vec_)                                                       \
1010 {                                                                         \
1011   return vec_ ? vec_->vec : 0;                                            \
1012 }                                                                         \
1013                                                                           \
1014 static inline unsigned VEC_OP (T,lower_bound)                             \
1015      (VEC(T) *vec_, const T *obj_,                                        \
1016       int (*lessthan_)(const T *, const T *) VEC_ASSERT_DECL)             \
1017 {                                                                         \
1018    unsigned int len_ = VEC_OP (T, length) (vec_);                         \
1019    unsigned int half_, middle_;                                           \
1020    unsigned int first_ = 0;                                               \
1021    while (len_ > 0)                                                       \
1022      {                                                                    \
1023         T *middle_elem_;                                                  \
1024         half_ = len_ >> 1;                                                \
1025         middle_ = first_;                                                 \
1026         middle_ += half_;                                                 \
1027         middle_elem_ = VEC_OP (T,index) (vec_, middle_ VEC_ASSERT_PASS);  \
1028         if (lessthan_ (middle_elem_, obj_))                               \
1029           {                                                               \
1030              first_ = middle_;                                            \
1031              ++first_;                                                    \
1032              len_ = len_ - half_ - 1;                                     \
1033           }                                                               \
1034         else                                                              \
1035           len_ = half_;                                                   \
1036      }                                                                    \
1037    return first_;                                                         \
1038 }
1039
1040 #define DEF_VEC_ALLOC_FUNC_O(T)                                           \
1041 static inline VEC(T) *VEC_OP (T,alloc)                                    \
1042      (int alloc_)                                                         \
1043 {                                                                         \
1044   VEC(T) dummy;                                                           \
1045                                                                           \
1046   /* We must request exact size allocation, hence the negation.  */       \
1047   return (VEC(T) *) vec_o_reserve (NULL, -alloc_,                         \
1048                                    vec_offset (T, &dummy), sizeof (T));   \
1049 }                                                                         \
1050                                                                           \
1051 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_)                      \
1052 {                                                                         \
1053   size_t len_ = vec_ ? vec_->num : 0;                                     \
1054   VEC (T) *new_vec_ = NULL;                                               \
1055                                                                           \
1056   if (len_)                                                               \
1057     {                                                                     \
1058       VEC(T) dummy;                                                       \
1059                                                                           \
1060       /* We must request exact size allocation, hence the negation.  */   \
1061       new_vec_ = (VEC (T) *)                                              \
1062         vec_o_reserve  (NULL, -len_, vec_offset (T, &dummy), sizeof (T)); \
1063                                                                           \
1064       new_vec_->num = len_;                                               \
1065       memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_);               \
1066     }                                                                     \
1067   return new_vec_;                                                        \
1068 }                                                                         \
1069                                                                           \
1070 static inline VEC(T) *VEC_OP (T,merge) (VEC(T) *vec1_, VEC(T) *vec2_)     \
1071 {                                                                         \
1072   if (vec1_ && vec2_)                                                     \
1073     {                                                                     \
1074       VEC(T) dummy;                                                       \
1075       size_t len_ = vec1_->num + vec2_->num;                              \
1076       VEC (T) *new_vec_ = NULL;                                           \
1077                                                                           \
1078       /* We must request exact size allocation, hence the negation.  */   \
1079       new_vec_ = (VEC (T) *)                                              \
1080         vec_o_reserve (NULL, -len_, vec_offset (T, &dummy), sizeof (T));  \
1081                                                                           \
1082       new_vec_->num = len_;                                               \
1083       memcpy (new_vec_->vec, vec1_->vec, sizeof (T) * vec1_->num);        \
1084       memcpy (new_vec_->vec + vec1_->num, vec2_->vec,                     \
1085               sizeof (T) * vec2_->num);                                   \
1086                                                                           \
1087       return new_vec_;                                                    \
1088     }                                                                     \
1089   else                                                                    \
1090     return VEC_copy (T, vec1_ ? vec1_ : vec2_);                           \
1091 }                                                                         \
1092                                                                           \
1093 static inline void VEC_OP (T,free)                                        \
1094      (VEC(T) **vec_)                                                      \
1095 {                                                                         \
1096   if (*vec_)                                                              \
1097     vec_free_ (*vec_);                                                    \
1098   *vec_ = NULL;                                                           \
1099 }                                                                         \
1100                                                                           \
1101 static inline void VEC_OP (T,cleanup)                                     \
1102      (void *arg_)                                                         \
1103 {                                                                         \
1104   VEC(T) **vec_ = (VEC(T) **) arg_;                                       \
1105   if (*vec_)                                                              \
1106     vec_free_ (*vec_);                                                    \
1107   *vec_ = NULL;                                                           \
1108 }                                                                         \
1109                                                                           \
1110 static inline int VEC_OP (T,reserve)                                      \
1111      (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL)                          \
1112 {                                                                         \
1113   VEC(T) dummy;                                                           \
1114   int extend = !VEC_OP (T,space) (*vec_, alloc_ < 0 ? -alloc_ : alloc_    \
1115                                   VEC_ASSERT_PASS);                       \
1116                                                                           \
1117   if (extend)                                                             \
1118     *vec_ = (VEC(T) *)                                                    \
1119       vec_o_reserve (*vec_, alloc_, vec_offset (T, &dummy), sizeof (T));  \
1120                                                                           \
1121   return extend;                                                          \
1122 }                                                                         \
1123                                                                           \
1124 static inline void VEC_OP (T,safe_grow)                                   \
1125      (VEC(T) **vec_, int size_ VEC_ASSERT_DECL)                           \
1126 {                                                                         \
1127   vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_,  \
1128         "safe_grow");                                                     \
1129   VEC_OP (T,reserve)                                                      \
1130         (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ VEC_ASSERT_PASS);  \
1131   (*vec_)->num = size_;                                                   \
1132 }                                                                         \
1133                                                                           \
1134 static inline T *VEC_OP (T,safe_push)                                     \
1135      (VEC(T) **vec_, const T *obj_ VEC_ASSERT_DECL)                       \
1136 {                                                                         \
1137   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
1138                                                                           \
1139   return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS);             \
1140 }                                                                         \
1141                                                                           \
1142 static inline T *VEC_OP (T,safe_insert)                                   \
1143      (VEC(T) **vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL)         \
1144 {                                                                         \
1145   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
1146                                                                           \
1147   return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS);      \
1148 }
1149
1150 #endif /* COMMON_VEC_H */