Simple spinlock support
[AROS.git] / arch / arm-native / kernel / spinlock.c
blob8423e69aa57f418f7149e352011787f6b36bb023
1 /*
2 Copyright © 2015, The AROS Development Team. All rights reserved.
3 $Id$
4 */
6 #include <aros/types/spinlock_s.h>
7 #include <aros/kernel.h>
8 #include <aros/libcall.h>
10 #include <asm/arm/cpu.h>
12 #include <kernel_base.h>
14 #include <proto/kernel.h>
16 AROS_LH2(spinlock_t *, KrnSpinLock,
17 AROS_LHA(spinlock_t *, lock, A0),
18 AROS_LHA(ULONG, mode, D0),
19 struct KernelBase *, KernelBase, 43, Kernel)
21 AROS_LIBFUNC_INIT
23 unsigned long lock_value, result;
25 if (mode == SPINLOCK_MODE_WRITE)
27 asm volatile(
28 "1: ldrex %0, [%1] \n\t" // Load the lock value and gain exclusive lock to memory
29 " teq %0, #0 \n\t" // is the value 0? It means the lock was free
30 " wfene \n\t" // Wait for Event if lock was not free
31 " strexeq %0, %2, [%1] \n\t" // Store value to memory and check if it succeeded
32 " teq %0, #0 \n\t" // Test if write succeeded
33 " bne 1b \n\t" // Try again if locking failed
34 : "=&r"(lock_value)
35 : "r" (&lock->lock), "r"(0x80000000)
36 : "cc"
39 else
41 asm volatile(
42 "1: ldrex %0, [%2] \n\t" // Load lock value and gain exclusive lock to memory
43 " adds %0, %0, #1 \n\t" // Increase the lock value and update conditional bits
44 " wfemi \n\t" // Wait for event if lock value was negative
45 " strexpl %1, %0, [%2] \n\t" // Store value to memory if positive and check result
46 " rsbpls %0, %1, #0 \n\t" // Reverse substract and update conditionals: if strex write was
47 // successful, %0 contains 0. #0 - %0 clears N flag. If write failed
48 // %0 contains 1. #0 - %0 sets N flag (value 0xffffffff).
49 " bmi 1b \n\t" // Try again if N flag is set (because of locok value or write failure
50 : "=&r"(lock_value), "=&r"(result)
51 : "r"(&lock->lock)
52 : "cc"
56 dmb();
57 return lock;
59 AROS_LIBFUNC_EXIT