1 /* Licensed to the Apache Software Foundation (ASF) under one or more
2 * contributor license agreements. See the NOTICE file distributed with
3 * this work for additional information regarding copyright ownership.
4 * The ASF licenses this file to You under the Apache License, Version 2.0
5 * (the "License"); you may not use this file except in compliance with
6 * the License. You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
22 * @brief APR Atomic Operations
26 #include "apr_pools.h"
28 /* Platform includes for atomics */
29 #if defined(NETWARE) || defined(__MVS__) /* OS/390 */
31 #elif defined(__FreeBSD__)
32 #include <machine/atomic.h>
40 * @defgroup apr_atomic Atomic Operations
45 /* easiest way to get these documented for the moment */
48 * structure for holding a atomic value.
49 * this number >only< has a 24 bit size on some platforms
54 * this function is required on some platforms to initialize the
55 * atomic operation's internal structures
57 * @return APR_SUCCESS on successful completion
59 apr_status_t
apr_atomic_init(apr_pool_t
*p
);
61 * read the value stored in a atomic variable
62 * @param mem the pointer
63 * @warning on certain platforms this number is not stored
64 * directly in the pointer. in others it is
66 apr_uint32_t
apr_atomic_read(volatile apr_atomic_t
*mem
);
68 * set the value for atomic.
69 * @param mem the pointer
70 * @param val the value
72 void apr_atomic_set(volatile apr_atomic_t
*mem
, apr_uint32_t val
);
74 * Add 'val' to the atomic variable
75 * @param mem pointer to the atomic value
76 * @param val the addition
78 void apr_atomic_add(volatile apr_atomic_t
*mem
, apr_uint32_t val
);
81 * increment the atomic variable by 1
82 * @param mem pointer to the atomic value
84 void apr_atomic_inc(volatile apr_atomic_t
*mem
);
87 * decrement the atomic variable by 1
88 * @param mem pointer to the atomic value
89 * @return zero if the value is zero, otherwise non-zero
91 int apr_atomic_dec(volatile apr_atomic_t
*mem
);
94 * compare the atomic's value with cmp.
95 * If they are the same swap the value with 'with'
96 * @param mem pointer to the atomic value
97 * @param with what to swap it with
98 * @param cmp the value to compare it to
99 * @return the old value of the atomic
100 * @warning do not mix apr_atomic's with the CAS function.
101 * on some platforms they may be implemented by different mechanisms
103 apr_uint32_t
apr_atomic_cas(volatile apr_uint32_t
*mem
, long with
, long cmp
);
106 * compare the pointer's value with cmp.
107 * If they are the same swap the value with 'with'
108 * @param mem pointer to the pointer
109 * @param with what to swap it with
110 * @param cmp the value to compare it to
111 * @return the old value of the pointer
113 void *apr_atomic_casptr(volatile void **mem
, void *with
, const void *cmp
);
116 /* The following definitions provide optimized, OS-specific
117 * implementations of the APR atomic functions on various
118 * platforms. Any atomic operation that isn't redefined as
119 * a macro here will be declared as a function later, and
120 * apr_atomic.c will provide a mutex-based default implementation.
125 #define apr_atomic_t LONG
127 #define apr_atomic_add(mem, val) InterlockedExchangeAdd(mem,val)
128 #define apr_atomic_dec(mem) InterlockedDecrement(mem)
129 #define apr_atomic_inc(mem) InterlockedIncrement(mem)
130 #define apr_atomic_set(mem, val) InterlockedExchange(mem, val)
131 #define apr_atomic_read(mem) (*mem)
132 #define apr_atomic_cas(mem,with,cmp) InterlockedCompareExchange(mem,with,cmp)
133 #define apr_atomic_init(pool) APR_SUCCESS
134 #define apr_atomic_casptr(mem,with,cmp) InterlockedCompareExchangePointer(mem,with,cmp)
136 #elif defined(NETWARE)
138 #define apr_atomic_t unsigned long
140 #define apr_atomic_add(mem, val) atomic_add(mem,val)
141 #define apr_atomic_inc(mem) atomic_inc(mem)
142 #define apr_atomic_set(mem, val) (*mem = val)
143 #define apr_atomic_read(mem) (*mem)
144 #define apr_atomic_init(pool) APR_SUCCESS
145 #define apr_atomic_cas(mem,with,cmp) atomic_cmpxchg((unsigned long *)(mem),(unsigned long)(cmp),(unsigned long)(with))
147 int apr_atomic_dec(apr_atomic_t
*mem
);
148 void *apr_atomic_casptr(void **mem
, void *with
, const void *cmp
);
149 #define APR_OVERRIDE_ATOMIC_DEC 1
150 #define APR_OVERRIDE_ATOMIC_CASPTR 1
152 inline int apr_atomic_dec(apr_atomic_t
*mem
)
154 return (atomic_xchgadd(mem
, 0xFFFFFFFF) - 1);
157 inline void *apr_atomic_casptr(void **mem
, void *with
, const void *cmp
)
159 return (void*)atomic_cmpxchg((unsigned long *)mem
,(unsigned long)cmp
,(unsigned long)with
);
162 #elif defined(__FreeBSD__)
164 #define apr_atomic_t apr_uint32_t
165 #define apr_atomic_add(mem, val) (atomic_add_int(mem,val),mem)
166 #define apr_atomic_dec(mem) (atomic_subtract_int(mem,1),mem)
167 #define apr_atomic_inc(mem) (atomic_add_int(mem,1),mem)
168 #define apr_atomic_set(mem, val) (atomic_set_int(mem, val),mem)
169 #define apr_atomic_read(mem) (*mem)
171 #elif (defined(__linux__) || defined(__EMX__)) && defined(__i386__) && !APR_FORCE_ATOMIC_GENERIC
173 #define apr_atomic_t apr_uint32_t
174 #define apr_atomic_cas(mem,with,cmp) \
175 ({ apr_atomic_t prev; \
176 asm volatile ("lock; cmpxchgl %1, %2" \
178 : "r" (with), "m" (*(mem)), "0"(cmp) \
182 #define apr_atomic_add(mem, val) \
183 ({ register apr_atomic_t last; \
186 } while (apr_atomic_cas((mem), last + (val), last) != last); \
189 #define apr_atomic_dec(mem) \
190 ({ register apr_atomic_t last; \
193 } while (apr_atomic_cas((mem), last - 1, last) != last); \
196 #define apr_atomic_inc(mem) \
197 ({ register apr_atomic_t last; \
200 } while (apr_atomic_cas((mem), last + 1, last) != last); \
203 #define apr_atomic_set(mem, val) (*(mem) = val)
204 #define apr_atomic_read(mem) (*(mem))
205 #define apr_atomic_init(pool) APR_SUCCESS
207 #elif defined(__MVS__) /* OS/390 */
209 #define apr_atomic_t cs_t
211 apr_int32_t
apr_atomic_add(volatile apr_atomic_t
*mem
, apr_int32_t val
);
212 apr_uint32_t
apr_atomic_cas(volatile apr_atomic_t
*mem
, apr_uint32_t swap
,
214 #define APR_OVERRIDE_ATOMIC_ADD 1
215 #define APR_OVERRIDE_ATOMIC_CAS 1
217 #define apr_atomic_inc(mem) apr_atomic_add(mem, 1)
218 #define apr_atomic_dec(mem) apr_atomic_add(mem, -1)
219 #define apr_atomic_init(pool) APR_SUCCESS
221 /* warning: the following two operations, _read and _set, are atomic
222 * if the memory variables are aligned (the usual case).
224 * If you try really hard and manage to mis-align them, they are not
225 * guaranteed to be atomic on S/390. But then your program will blow up
226 * with SIGBUS on a sparc, or with a S0C6 abend if you use the mis-aligned
227 * variables with other apr_atomic_* operations on OS/390.
230 #define apr_atomic_read(p) (*p)
231 #define apr_atomic_set(mem, val) (*mem = val)
233 #endif /* end big if-elseif switch for platform-specifics */
236 /* Default implementation of the atomic API
237 * The definitions above may override some or all of the
238 * atomic functions with optimized, platform-specific versions.
239 * Any operation that hasn't been overridden as a macro above
240 * is declared as a function here, unless APR_OVERRIDE_ATOMIC_[OPERATION]
241 * is defined. (The purpose of the APR_OVERRIDE_ATOMIC_* is
242 * to allow a platform to declare an apr_atomic_*() function
243 * with a different signature than the default.)
246 #if !defined(apr_atomic_t)
247 #define apr_atomic_t apr_uint32_t
250 #if !defined(apr_atomic_init) && !defined(APR_OVERRIDE_ATOMIC_INIT)
251 apr_status_t
apr_atomic_init(apr_pool_t
*p
);
254 #if !defined(apr_atomic_read) && !defined(APR_OVERRIDE_ATOMIC_READ)
255 #define apr_atomic_read(p) *p
258 #if !defined(apr_atomic_set) && !defined(APR_OVERRIDE_ATOMIC_SET)
259 void apr_atomic_set(volatile apr_atomic_t
*mem
, apr_uint32_t val
);
260 #define APR_ATOMIC_NEED_DEFAULT_INIT 1
263 #if !defined(apr_atomic_add) && !defined(APR_OVERRIDE_ATOMIC_ADD)
264 void apr_atomic_add(volatile apr_atomic_t
*mem
, apr_uint32_t val
);
265 #define APR_ATOMIC_NEED_DEFAULT_INIT 1
268 #if !defined(apr_atomic_inc) && !defined(APR_OVERRIDE_ATOMIC_INC)
269 void apr_atomic_inc(volatile apr_atomic_t
*mem
);
270 #define APR_ATOMIC_NEED_DEFAULT_INIT 1
273 #if !defined(apr_atomic_dec) && !defined(APR_OVERRIDE_ATOMIC_DEC)
274 int apr_atomic_dec(volatile apr_atomic_t
*mem
);
275 #define APR_ATOMIC_NEED_DEFAULT_INIT 1
278 #if !defined(apr_atomic_cas) && !defined(APR_OVERRIDE_ATOMIC_CAS)
279 apr_uint32_t
apr_atomic_cas(volatile apr_uint32_t
*mem
,long with
,long cmp
);
280 #define APR_ATOMIC_NEED_DEFAULT_INIT 1
283 #if !defined(apr_atomic_casptr) && !defined(APR_OVERRIDE_ATOMIC_CASPTR)
284 #if APR_SIZEOF_VOIDP == 4
285 #define apr_atomic_casptr(mem, with, cmp) (void *)apr_atomic_cas((apr_uint32_t *)(mem), (long)(with), (long)cmp)
287 void *apr_atomic_casptr(volatile void **mem
, void *with
, const void *cmp
);
288 #define APR_ATOMIC_NEED_DEFAULT_INIT 1
292 #ifndef APR_ATOMIC_NEED_DEFAULT_INIT
293 #define APR_ATOMIC_NEED_DEFAULT_INIT 0
296 /* If we're using the default versions of any of the atomic functions,
297 * we'll need the atomic init to set up mutexes. If a platform-specific
298 * override above has replaced the atomic_init with a macro, it's an error.
300 #if APR_ATOMIC_NEED_DEFAULT_INIT
301 #if defined(apr_atomic_init) || defined(APR_OVERRIDE_ATOMIC_INIT)
302 #error Platform has redefined apr_atomic_init, but other default default atomics require a default apr_atomic_init
304 #endif /* APR_ATOMIC_NEED_DEFAULT_INIT */
306 #endif /* !DOXYGEN */
314 #endif /* !APR_ATOMIC_H */