2 Copyright (C) 2010-2020 Free Software Foundation, Inc.
4 This program is free software; you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation; either version 2, or (at your option)
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU General Public License
15 along with this program; if not, see <https://www.gnu.org/licenses/>. */
17 /* Written by Paul Eggert, 2010, and Bruno Haible <bruno@clisp.org>, 2019. */
26 #if (defined _WIN32 && ! defined __CYGWIN__) && USE_WINDOWS_THREADS
27 # include "windows-spin.h"
30 #if (defined _WIN32 && ! defined __CYGWIN__) && USE_WINDOWS_THREADS
31 /* Use Windows threads. */
34 pthread_spin_init (pthread_spinlock_t
*lock
,
35 int shared_across_processes _GL_UNUSED
)
37 glwthread_spin_init (lock
);
42 pthread_spin_lock (pthread_spinlock_t
*lock
)
44 return glwthread_spin_lock (lock
);
48 pthread_spin_trylock (pthread_spinlock_t
*lock
)
50 return glwthread_spin_trylock (lock
);
54 pthread_spin_unlock (pthread_spinlock_t
*lock
)
56 return glwthread_spin_unlock (lock
);
60 pthread_spin_destroy (pthread_spinlock_t
*lock
)
62 return glwthread_spin_destroy (lock
);
66 /* Provide workarounds for POSIX threads. */
68 /* We don't use the C11 <stdatomic.h> (available in GCC >= 4.9) because it would
69 require to link with -latomic. */
71 # if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7) \
72 || __clang_major > 3 || (__clang_major__ == 3 && __clang_minor__ >= 1)) \
74 /* Use GCC built-ins (available in GCC >= 4.7 and clang >= 3.1) that operate on
75 the first byte of the lock.
77 <https://gcc.gnu.org/onlinedocs/gcc-4.7.0/gcc/_005f_005fatomic-Builtins.html> */
80 /* An implementation that verifies the unlocks. */
83 pthread_spin_init (pthread_spinlock_t
*lock
,
84 int shared_across_processes _GL_UNUSED
)
86 __atomic_store_n ((unsigned int *) lock
, 0, __ATOMIC_SEQ_CST
);
91 pthread_spin_lock (pthread_spinlock_t
*lock
)
93 /* Wait until *lock becomes 0, then replace it with 1. */
96 __atomic_compare_exchange_n ((unsigned int *) lock
, &zero
, 1, false,
97 __ATOMIC_SEQ_CST
, __ATOMIC_SEQ_CST
)))
103 pthread_spin_trylock (pthread_spinlock_t
*lock
)
107 __atomic_compare_exchange_n ((unsigned int *) lock
, &zero
, 1, false,
108 __ATOMIC_SEQ_CST
, __ATOMIC_SEQ_CST
)))
114 pthread_spin_unlock (pthread_spinlock_t
*lock
)
116 /* If *lock is 1, then replace it with 0. */
117 unsigned int one
= 1;
118 if (!__atomic_compare_exchange_n ((unsigned int *) lock
, &one
, 0, false,
119 __ATOMIC_SEQ_CST
, __ATOMIC_SEQ_CST
))
125 /* An implementation that is a little bit more optimized, but does not verify
129 pthread_spin_init (pthread_spinlock_t
*lock
,
130 int shared_across_processes _GL_UNUSED
)
132 __atomic_clear (lock
, __ATOMIC_SEQ_CST
);
137 pthread_spin_lock (pthread_spinlock_t
*lock
)
139 while (__atomic_test_and_set (lock
, __ATOMIC_SEQ_CST
))
145 pthread_spin_trylock (pthread_spinlock_t
*lock
)
147 if (__atomic_test_and_set (lock
, __ATOMIC_SEQ_CST
))
153 pthread_spin_unlock (pthread_spinlock_t
*lock
)
155 __atomic_clear (lock
, __ATOMIC_SEQ_CST
);
162 pthread_spin_destroy (pthread_spinlock_t
*lock
)
167 # elif (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1) \
168 || __clang_major__ >= 3) \
169 && !defined __ibmxl__
170 /* Use GCC built-ins (available in GCC >= 4.1 and clang >= 3.0).
172 <https://gcc.gnu.org/onlinedocs/gcc-4.1.2/gcc/Atomic-Builtins.html> */
175 pthread_spin_init (pthread_spinlock_t
*lock
,
176 int shared_across_processes _GL_UNUSED
)
178 * (volatile unsigned int *) lock
= 0;
179 __sync_synchronize ();
184 pthread_spin_lock (pthread_spinlock_t
*lock
)
186 /* Wait until *lock becomes 0, then replace it with 1. */
187 while (__sync_val_compare_and_swap ((unsigned int *) lock
, 0, 1) != 0)
193 pthread_spin_trylock (pthread_spinlock_t
*lock
)
195 if (__sync_val_compare_and_swap ((unsigned int *) lock
, 0, 1) != 0)
201 pthread_spin_unlock (pthread_spinlock_t
*lock
)
203 /* If *lock is 1, then replace it with 0. */
204 if (__sync_val_compare_and_swap ((unsigned int *) lock
, 1, 0) != 1)
210 pthread_spin_destroy (pthread_spinlock_t
*lock
)
216 /* Emulate a spin lock through a mutex. */
219 pthread_spin_init (pthread_spinlock_t
*lock
,
220 int shared_across_processes _GL_UNUSED
)
222 return pthread_mutex_init (lock
, NULL
);
226 pthread_spin_lock (pthread_spinlock_t
*lock
)
228 return pthread_mutex_lock (lock
);
232 pthread_spin_trylock (pthread_spinlock_t
*lock
)
234 return pthread_mutex_trylock (lock
);
238 pthread_spin_unlock (pthread_spinlock_t
*lock
)
240 return pthread_mutex_unlock (lock
);
244 pthread_spin_destroy (pthread_spinlock_t
*lock
)
246 return pthread_mutex_destroy (lock
);
252 /* Provide a dummy implementation for single-threaded applications. */
255 pthread_spin_init (pthread_spinlock_t
*lock _GL_UNUSED
,
256 int shared_across_processes _GL_UNUSED
)
262 pthread_spin_lock (pthread_spinlock_t
*lock _GL_UNUSED
)
268 pthread_spin_trylock (pthread_spinlock_t
*lock _GL_UNUSED
)
274 pthread_spin_unlock (pthread_spinlock_t
*lock _GL_UNUSED
)
280 pthread_spin_destroy (pthread_spinlock_t
*lock _GL_UNUSED
)