+2020-07-01 Bruno Haible <bruno@clisp.org>
+
+ pthread-spin: Optimize fallback for GCC versions >= 4.7.
+ * lib/pthread-spin.c (pthread_spin_init, pthread_spin_lock,
+ pthread_spin_trylock, pthread_spin_unlock): Use a lock word instead of a
+ lock byte.
+
2020-07-01 Bruno Haible <bruno@clisp.org>
pthread-spin: Add error checking.
require to link with -latomic. */
# if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7)
-/* Use GCC built-ins (available in GCC >= 4.7) that operate on the first byte
- of the lock.
+/* Use GCC built-ins (available in GCC >= 4.7) that operate on the first 32-bit
+ word of the lock.
Documentation:
<https://gcc.gnu.org/onlinedocs/gcc-4.7.0/gcc/_005f_005fatomic-Builtins.html> */
pthread_spin_init (pthread_spinlock_t *lock,
int shared_across_processes _GL_UNUSED)
{
- __atomic_store_n ((char *) lock, 0, __ATOMIC_SEQ_CST);
+ __atomic_store_n ((unsigned int *) lock, 0, __ATOMIC_SEQ_CST);
return 0;
}
pthread_spin_lock (pthread_spinlock_t *lock)
{
/* Wait until *lock becomes 0, then replace it with 1. */
- asyncsafe_spinlock_t zero;
+ unsigned int zero;
while (!(zero = 0,
- __atomic_compare_exchange_n ((char *) lock, &zero, 1, false,
+ __atomic_compare_exchange_n ((unsigned int *) lock, &zero, 1, false,
__ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST)))
;
return 0;
int
pthread_spin_trylock (pthread_spinlock_t *lock)
{
- asyncsafe_spinlock_t zero;
+ unsigned int zero;
if (!(zero = 0,
- __atomic_compare_exchange_n ((char *) lock, &zero, 1, false,
+ __atomic_compare_exchange_n ((unsigned int *) lock, &zero, 1, false,
__ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST)))
return EBUSY;
return 0;
pthread_spin_unlock (pthread_spinlock_t *lock)
{
/* If *lock is 1, then replace it with 0. */
- asyncsafe_spinlock_t one = 1;
- if (!__atomic_compare_exchange_n ((char *) lock, &one, 0, false,
+ unsigned int one = 1;
+ if (!__atomic_compare_exchange_n ((unsigned int *) lock, &one, 0, false,
__ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST))
abort ();
return 0;