1 /* spinlock.h: 32-bit Sparc spinlock support.
3 * Copyright (C) 1997 David S. Miller (davem@caip.rutgers.edu)
6 #ifndef __SPARC_SPINLOCK_H
7 #define __SPARC_SPINLOCK_H
13 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0)
15 #define arch_spin_unlock_wait(lock) \
16 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
18 static inline void arch_spin_lock(arch_spinlock_t
*lock
)
22 "ldstub [%0], %%g2\n\t"
23 "orcc %%g2, 0x0, %%g0\n\t"
25 " ldub [%0], %%g2\n\t"
28 "orcc %%g2, 0x0, %%g0\n\t"
30 " ldub [%0], %%g2\n\t"
35 : "g2", "memory", "cc");
38 static inline int arch_spin_trylock(arch_spinlock_t
*lock
)
41 __asm__
__volatile__("ldstub [%1], %0"
48 static inline void arch_spin_unlock(arch_spinlock_t
*lock
)
50 __asm__
__volatile__("stb %%g0, [%0]" : : "r" (lock
) : "memory");
53 static inline void __arch_read_lock(arch_rwlock_t
*rw
)
55 register arch_rwlock_t
*lp
asm("g1");
59 "call ___rw_read_enter\n\t"
60 " ldstub [%%g1 + 3], %%g2\n"
63 : "g2", "g4", "memory", "cc");
66 #define arch_read_lock(lock) \
67 do { unsigned long flags; \
68 local_irq_save(flags); \
69 __arch_read_lock(lock); \
70 local_irq_restore(flags); \
73 static inline void __arch_read_unlock(arch_rwlock_t
*rw
)
75 register arch_rwlock_t
*lp
asm("g1");
79 "call ___rw_read_exit\n\t"
80 " ldstub [%%g1 + 3], %%g2\n"
83 : "g2", "g4", "memory", "cc");
86 #define arch_read_unlock(lock) \
87 do { unsigned long flags; \
88 local_irq_save(flags); \
89 __arch_read_unlock(lock); \
90 local_irq_restore(flags); \
93 static inline void arch_write_lock(arch_rwlock_t
*rw
)
95 register arch_rwlock_t
*lp
asm("g1");
99 "call ___rw_write_enter\n\t"
100 " ldstub [%%g1 + 3], %%g2\n"
103 : "g2", "g4", "memory", "cc");
104 *(volatile __u32
*)&lp
->lock
= ~0U;
107 static inline int arch_write_trylock(arch_rwlock_t
*rw
)
111 __asm__
__volatile__("ldstub [%1 + 3], %0"
117 val
= rw
->lock
& ~0xff;
119 ((volatile u8
*)&rw
->lock
)[3] = 0;
121 *(volatile u32
*)&rw
->lock
= ~0U;
127 static inline int __arch_read_trylock(arch_rwlock_t
*rw
)
129 register arch_rwlock_t
*lp
asm("g1");
130 register int res
asm("o0");
132 __asm__
__volatile__(
134 "call ___rw_read_try\n\t"
135 " ldstub [%%g1 + 3], %%g2\n"
138 : "g2", "g4", "memory", "cc");
142 #define arch_read_trylock(lock) \
143 ({ unsigned long flags; \
145 local_irq_save(flags); \
146 res = __arch_read_trylock(lock); \
147 local_irq_restore(flags); \
151 #define arch_write_unlock(rw) do { (rw)->lock = 0; } while(0)
153 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
154 #define arch_read_lock_flags(rw, flags) arch_read_lock(rw)
155 #define arch_write_lock_flags(rw, flags) arch_write_lock(rw)
157 #define arch_spin_relax(lock) cpu_relax()
158 #define arch_read_relax(lock) cpu_relax()
159 #define arch_write_relax(lock) cpu_relax()
161 #define arch_read_can_lock(rw) (!((rw)->lock & 0xff))
162 #define arch_write_can_lock(rw) (!(rw)->lock)
164 #endif /* !(__ASSEMBLY__) */
166 #endif /* __SPARC_SPINLOCK_H */