Build system improvements
[ustl.git] / ualgobase.h
blobbafef8ca8dde5b92d903ab253b17013c5bcc1d33
1 // This file is part of the ustl library, an STL implementation.
2 //
3 // Copyright (C) 2005 by Mike Sharov <msharov@users.sourceforge.net>
4 // This file is free software, distributed under the MIT License.
5 //
6 // ualgobase.h
7 //
8 // Implementation of STL algorithms.
9 //
10 // The function prototypes are copied
11 // exactly from the SGI version of STL documentation along with comments about
12 // their use. The code is NOT the same, though the functionality usually is.
15 #ifndef UALGOBASE_H_683A0BE77546133C4CE0E3622CFAA2EB
16 #define UALGOBASE_H_683A0BE77546133C4CE0E3622CFAA2EB
18 #include "uutility.h"
19 #include <string.h>
21 namespace ustl {
23 /// Assigns the contents of a to b and the contents of b to a.
24 /// This is used as a primitive operation by many other algorithms.
25 /// \ingroup SwapAlgorithms
26 ///
27 template <typename Assignable>
28 inline void swap (Assignable& a, Assignable& b)
30 Assignable tmp = a;
31 a = b;
32 b = tmp;
35 /// Equivalent to swap (*a, *b)
36 /// \ingroup SwapAlgorithms
37 ///
38 template <typename Iterator>
39 inline void iter_swap (Iterator a, Iterator b)
41 swap (*a, *b);
44 /// Copy copies elements from the range [first, last) to the range
45 /// [result, result + (last - first)). That is, it performs the assignments
46 /// *result = *first, *(result + 1) = *(first + 1), and so on. [1] Generally,
47 /// for every integer n from 0 to last - first, copy performs the assignment
48 /// *(result + n) = *(first + n). Assignments are performed in forward order,
49 /// i.e. in order of increasing n.
50 /// \ingroup MutatingAlgorithms
51 ///
52 template <typename InputIterator, typename OutputIterator>
53 inline OutputIterator copy (InputIterator first, InputIterator last, OutputIterator result)
55 for (; first != last; ++result, ++first)
56 *result = *first;
57 return (result);
60 /// Copy_n copies elements from the range [first, first + n) to the range
61 /// [result, result + n). That is, it performs the assignments
62 /// *result = *first, *(result + 1) = *(first + 1), and so on. Generally,
63 /// for every integer i from 0 up to (but not including) n, copy_n performs
64 /// the assignment *(result + i) = *(first + i). Assignments are performed
65 /// in forward order, i.e. in order of increasing n.
66 /// \ingroup MutatingAlgorithms
67 ///
68 template <typename InputIterator, typename OutputIterator>
69 inline OutputIterator copy_n (InputIterator first, size_t count, OutputIterator result)
71 for (; count; --count, ++result, ++first)
72 *result = *first;
73 return (result);
76 /// \brief Copy copies elements from the range (last, first] to result.
77 /// \ingroup MutatingAlgorithms
78 /// Copies elements starting at last, decrementing both last and result.
79 ///
80 template <typename InputIterator, typename OutputIterator>
81 inline OutputIterator copy_backward (InputIterator first, InputIterator last, OutputIterator result)
83 while (first != last)
84 *--result = *--last;
85 return (result);
88 /// For_each applies the function object f to each element in the range
89 /// [first, last); f's return value, if any, is ignored. Applications are
90 /// performed in forward order, i.e. from first to last. For_each returns
91 /// the function object after it has been applied to each element.
92 /// \ingroup MutatingAlgorithms
93 ///
94 template <typename InputIterator, typename UnaryFunction>
95 inline UnaryFunction for_each (InputIterator first, InputIterator last, UnaryFunction f)
97 for (; first != last; ++first)
98 f (*first);
99 return (f);
102 /// Fill assigns the value value to every element in the range [first, last).
103 /// That is, for every iterator i in [first, last),
104 /// it performs the assignment *i = value.
105 /// \ingroup GeneratorAlgorithms
107 template <typename ForwardIterator, typename T>
108 inline void fill (ForwardIterator first, ForwardIterator last, const T& value)
110 for (; first != last; ++first)
111 *first = value;
114 /// Fill_n assigns the value value to every element in the range
115 /// [first, first+count). That is, for every iterator i in [first, first+count),
116 /// it performs the assignment *i = value. The return value is first + count.
117 /// \ingroup GeneratorAlgorithms
119 template <typename OutputIterator, typename T>
120 inline OutputIterator fill_n (OutputIterator first, size_t count, const T& value)
122 for (; count; --count, ++first)
123 *first = value;
124 return (first);
127 #if CPU_HAS_MMX
128 extern "C" void copy_n_fast (const void* src, size_t count, void* dest) throw();
129 #else
130 inline void copy_n_fast (const void* src, size_t count, void* dest) throw()
131 { memcpy (dest, src, count); }
132 #endif
133 #if __i386__ || __x86_64__
134 extern "C" void copy_backward_fast (const void* first, const void* last, void* result) throw();
135 #else
136 inline void copy_backward_fast (const void* first, const void* last, void* result) throw()
138 const size_t nBytes (distance (first, last));
139 memmove (advance (result, -nBytes), first, nBytes);
141 #endif
142 extern "C" void fill_n8_fast (uint8_t* dest, size_t count, uint8_t v) throw();
143 extern "C" void fill_n16_fast (uint16_t* dest, size_t count, uint16_t v) throw();
144 extern "C" void fill_n32_fast (uint32_t* dest, size_t count, uint32_t v) throw();
145 extern "C" void rotate_fast (void* first, void* middle, void* last) throw();
147 #if __GNUC__ >= 4
148 /// \brief Computes the number of 1 bits in a number.
149 /// \ingroup ConditionAlgorithms
150 inline size_t popcount (uint32_t v) { return (__builtin_popcount (v)); }
151 #if HAVE_INT64_T
152 inline size_t popcount (uint64_t v) { return (__builtin_popcountll (v)); }
153 #endif
154 #else
155 size_t popcount (uint32_t v);
156 #if HAVE_INT64_T
157 size_t popcount (uint64_t v);
158 #endif // HAVE_INT64_T
159 #endif // __GNUC__
161 //----------------------------------------------------------------------
162 // Optimized versions for standard types
163 //----------------------------------------------------------------------
165 #if WANT_UNROLLED_COPY
167 template <typename T>
168 inline T* unrolled_copy (const T* first, size_t count, T* result)
170 copy_n_fast (first, count * sizeof(T), result);
171 return (advance (result, count));
174 template <>
175 inline uint8_t* copy_backward (const uint8_t* first, const uint8_t* last, uint8_t* result)
177 copy_backward_fast (first, last, result);
178 return (result);
181 template <typename T>
182 inline T* unrolled_fill (T* result, size_t count, T value)
184 for (; count; --count, ++result)
185 *result = value;
186 return (result);
188 template <> inline uint8_t* unrolled_fill (uint8_t* result, size_t count, uint8_t value)
189 { fill_n8_fast (result, count, value); return (advance (result, count)); }
190 template <> inline uint16_t* unrolled_fill (uint16_t* result, size_t count, uint16_t value)
191 { fill_n16_fast (result, count, value); return (advance (result, count)); }
192 template <> inline uint32_t* unrolled_fill (uint32_t* result, size_t count, uint32_t value)
193 { fill_n32_fast (result, count, value); return (advance (result, count)); }
194 template <> inline float* unrolled_fill (float* result, size_t count, float value)
195 { fill_n32_fast ((uint32_t*) result, count, *noalias_cast<uint32_t*>(&value)); return (advance (result, count)); }
197 #if CPU_HAS_MMX
198 #define UNROLLED_COPY_SPECIALIZATION(type) \
199 template <> inline type* copy (const type* first, const type* last, type* result) \
200 { return (unrolled_copy (first, distance (first, last), result)); } \
201 template <> inline type* copy_n (const type* first, size_t count, type* result) \
202 { return (unrolled_copy (first, count, result)); }
203 #define UNROLLED_FILL_SPECIALIZATION(type) \
204 template <> inline void fill (type* first, type* last, const type& value) \
205 { unrolled_fill (first, distance (first, last), value); } \
206 template <> inline type* fill_n (type* first, size_t count, const type& value) \
207 { return (unrolled_fill (first, count, value)); }
208 UNROLLED_COPY_SPECIALIZATION(uint8_t)
209 UNROLLED_FILL_SPECIALIZATION(uint8_t)
210 UNROLLED_COPY_SPECIALIZATION(uint16_t)
211 UNROLLED_FILL_SPECIALIZATION(uint16_t)
212 UNROLLED_COPY_SPECIALIZATION(uint32_t)
213 UNROLLED_FILL_SPECIALIZATION(uint32_t)
214 UNROLLED_COPY_SPECIALIZATION(float)
215 UNROLLED_FILL_SPECIALIZATION(float)
216 #undef UNROLLED_FILL_SPECIALIZATION
217 #undef UNROLLED_COPY_SPECIALIZATION
218 #endif // WANT_UNROLLED_COPY
219 #endif // CPU_HAS_MMX
221 // Specializations for void* and char*, aliasing the above optimized versions.
223 // All these need duplication with const and non-const arguments, since
224 // otherwise the compiler will default to the unoptimized version for
225 // pointers not const in the caller's context, such as local variables.
226 // These are all inline, but they sure slow down compilation... :(
228 #define COPY_ALIAS_FUNC(ctype, type, alias_type) \
229 template <> inline type* copy (ctype* first, ctype* last, type* result) \
230 { return ((type*) copy ((const alias_type*) first, (const alias_type*) last, (alias_type*) result)); }
231 #if WANT_UNROLLED_COPY
232 #if HAVE_THREE_CHAR_TYPES
233 COPY_ALIAS_FUNC(const char, char, uint8_t)
234 COPY_ALIAS_FUNC(char, char, uint8_t)
235 #endif
236 COPY_ALIAS_FUNC(const int8_t, int8_t, uint8_t)
237 COPY_ALIAS_FUNC(int8_t, int8_t, uint8_t)
238 COPY_ALIAS_FUNC(uint8_t, uint8_t, uint8_t)
239 COPY_ALIAS_FUNC(const int16_t, int16_t, uint16_t)
240 COPY_ALIAS_FUNC(int16_t, int16_t, uint16_t)
241 COPY_ALIAS_FUNC(uint16_t, uint16_t, uint16_t)
242 #if CPU_HAS_MMX || (SIZE_OF_LONG > 4)
243 COPY_ALIAS_FUNC(const int32_t, int32_t, uint32_t)
244 COPY_ALIAS_FUNC(int32_t, int32_t, uint32_t)
245 COPY_ALIAS_FUNC(uint32_t, uint32_t, uint32_t)
246 #endif
247 #endif
248 COPY_ALIAS_FUNC(const void, void, uint8_t)
249 COPY_ALIAS_FUNC(void, void, uint8_t)
250 #undef COPY_ALIAS_FUNC
251 #define COPY_BACKWARD_ALIAS_FUNC(ctype, type, alias_type) \
252 template <> inline type* copy_backward (ctype* first, ctype* last, type* result) \
253 { return ((type*) copy_backward ((const alias_type*) first, (const alias_type*) last, (alias_type*) result)); }
254 #if WANT_UNROLLED_COPY
255 #if HAVE_THREE_CHAR_TYPES
256 COPY_BACKWARD_ALIAS_FUNC(char, char, uint8_t)
257 #endif
258 COPY_BACKWARD_ALIAS_FUNC(uint8_t, uint8_t, uint8_t)
259 COPY_BACKWARD_ALIAS_FUNC(int8_t, int8_t, uint8_t)
260 COPY_BACKWARD_ALIAS_FUNC(uint16_t, uint16_t, uint8_t)
261 COPY_BACKWARD_ALIAS_FUNC(const uint16_t, uint16_t, uint8_t)
262 COPY_BACKWARD_ALIAS_FUNC(int16_t, int16_t, uint8_t)
263 COPY_BACKWARD_ALIAS_FUNC(const int16_t, int16_t, uint8_t)
264 #endif
265 COPY_BACKWARD_ALIAS_FUNC(void, void, uint8_t)
266 COPY_BACKWARD_ALIAS_FUNC(const void, void, uint8_t)
267 #undef COPY_BACKWARD_ALIAS_FUNC
268 #define FILL_ALIAS_FUNC(type, alias_type, v_type) \
269 template <> inline void fill (type* first, type* last, const v_type& value) \
270 { fill ((alias_type*) first, (alias_type*) last, (const alias_type&) value); }
271 FILL_ALIAS_FUNC(void, uint8_t, char)
272 FILL_ALIAS_FUNC(void, uint8_t, uint8_t)
273 #if WANT_UNROLLED_COPY
274 #if HAVE_THREE_CHAR_TYPES
275 FILL_ALIAS_FUNC(char, uint8_t, char)
276 FILL_ALIAS_FUNC(char, uint8_t, uint8_t)
277 #endif
278 FILL_ALIAS_FUNC(int8_t, uint8_t, int8_t)
279 FILL_ALIAS_FUNC(int16_t, uint16_t, int16_t)
280 #if CPU_HAS_MMX || (SIZE_OF_LONG > 4)
281 FILL_ALIAS_FUNC(int32_t, uint32_t, int32_t)
282 #endif
283 #endif
284 #undef FILL_ALIAS_FUNC
285 #define COPY_N_ALIAS_FUNC(ctype, type, alias_type) \
286 template <> inline type* copy_n (ctype* first, size_t count, type* result) \
287 { return ((type*) copy_n ((const alias_type*) first, count, (alias_type*) result)); }
288 COPY_N_ALIAS_FUNC(const void, void, uint8_t)
289 COPY_N_ALIAS_FUNC(void, void, uint8_t)
290 #if WANT_UNROLLED_COPY
291 #if HAVE_THREE_CHAR_TYPES
292 COPY_N_ALIAS_FUNC(const char, char, uint8_t)
293 COPY_N_ALIAS_FUNC(char, char, uint8_t)
294 #endif
295 COPY_N_ALIAS_FUNC(int8_t, int8_t, uint8_t)
296 COPY_N_ALIAS_FUNC(uint8_t, uint8_t, uint8_t)
297 COPY_N_ALIAS_FUNC(const int8_t, int8_t, uint8_t)
298 COPY_N_ALIAS_FUNC(int16_t, int16_t, uint16_t)
299 COPY_N_ALIAS_FUNC(uint16_t, uint16_t, uint16_t)
300 COPY_N_ALIAS_FUNC(const int16_t, int16_t, uint16_t)
301 #if CPU_HAS_MMX || (SIZE_OF_LONG > 4)
302 COPY_N_ALIAS_FUNC(int32_t, int32_t, uint32_t)
303 COPY_N_ALIAS_FUNC(uint32_t, uint32_t, uint32_t)
304 COPY_N_ALIAS_FUNC(const int32_t, int32_t, uint32_t)
305 #endif
306 #endif
307 #undef COPY_N_ALIAS_FUNC
308 #define FILL_N_ALIAS_FUNC(type, alias_type, v_type) \
309 template <> inline type* fill_n (type* first, size_t n, const v_type& value) \
310 { return ((type*) fill_n ((alias_type*) first, n, (const alias_type&) value)); }
311 FILL_N_ALIAS_FUNC(void, uint8_t, char)
312 FILL_N_ALIAS_FUNC(void, uint8_t, uint8_t)
313 #if WANT_UNROLLED_COPY
314 #if HAVE_THREE_CHAR_TYPES
315 FILL_N_ALIAS_FUNC(char, uint8_t, char)
316 FILL_N_ALIAS_FUNC(char, uint8_t, uint8_t)
317 #endif
318 FILL_N_ALIAS_FUNC(int8_t, uint8_t, int8_t)
319 FILL_N_ALIAS_FUNC(int16_t, uint16_t, int16_t)
320 #if CPU_HAS_MMX || (SIZE_OF_LONG > 4)
321 FILL_N_ALIAS_FUNC(int32_t, uint32_t, int32_t)
322 #endif
323 #endif
324 #undef FILL_N_ALIAS_FUNC
326 extern const char _FmtPrtChr[2][8];
328 } // namespace ustl
330 #endif