1 #ifndef _ALPHA_SPINLOCK_H
2 #define _ALPHA_SPINLOCK_H
4 #include <linux/config.h>
5 #include <asm/system.h>
6 #include <linux/kernel.h>
7 #include <asm/current.h>
11 * Simple spin lock operations. There are two variants, one clears IRQ's
12 * on the local processor, one does not.
14 * We make no fairness assumptions. They have a cost.
18 volatile unsigned int lock
/*__attribute__((aligned(32))) */;
19 #ifdef CONFIG_DEBUG_SPINLOCK
23 struct task_struct
* task
;
24 const char *base_file
;
28 #ifdef CONFIG_DEBUG_SPINLOCK
29 #define SPIN_LOCK_UNLOCKED (spinlock_t) {0, -1, 0, NULL, NULL, NULL}
30 #define spin_lock_init(x) \
31 ((x)->lock = 0, (x)->on_cpu = -1, (x)->previous = NULL, (x)->task = NULL)
33 #define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 }
34 #define spin_lock_init(x) ((x)->lock = 0)
37 #define spin_is_locked(x) ((x)->lock != 0)
38 #define spin_unlock_wait(x) ({ do { barrier(); } while ((x)->lock); })
39 #define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock)
41 #ifdef CONFIG_DEBUG_SPINLOCK
42 extern void _raw_spin_unlock(spinlock_t
* lock
);
43 extern void debug_spin_lock(spinlock_t
* lock
, const char *, int);
44 extern int debug_spin_trylock(spinlock_t
* lock
, const char *, int);
46 #define _raw_spin_lock(LOCK) debug_spin_lock(LOCK, __BASE_FILE__, __LINE__)
47 #define _raw_spin_trylock(LOCK) debug_spin_trylock(LOCK, __BASE_FILE__, __LINE__)
49 #define spin_lock_own(LOCK, LOCATION) \
51 if (!((LOCK)->lock && (LOCK)->on_cpu == smp_processor_id())) \
52 printk("%s: called on %d from %p but lock %s on %d\n", \
53 LOCATION, smp_processor_id(), \
54 __builtin_return_address(0), \
55 (LOCK)->lock ? "taken" : "freed", (LOCK)->on_cpu); \
58 static inline void _raw_spin_unlock(spinlock_t
* lock
)
64 static inline void _raw_spin_lock(spinlock_t
* lock
)
68 /* Use sub-sections to put the actual loop at the end
69 of this object file's text section so as to perfect
83 : "=&r" (tmp
), "=m" (lock
->lock
)
84 : "m"(lock
->lock
) : "memory");
87 static inline int _raw_spin_trylock(spinlock_t
*lock
)
89 return !test_and_set_bit(0, &lock
->lock
);
92 #define spin_lock_own(LOCK, LOCATION) ((void)0)
93 #endif /* CONFIG_DEBUG_SPINLOCK */
95 /***********************************************************/
98 volatile unsigned int write_lock
:1, read_counter
:31;
99 } /*__attribute__((aligned(32)))*/ rwlock_t
;
101 #define RW_LOCK_UNLOCKED (rwlock_t) { 0, 0 }
103 #define rwlock_init(x) do { *(x) = RW_LOCK_UNLOCKED; } while(0)
104 #define rwlock_is_locked(x) (*(volatile int *)(x) != 0)
106 #ifdef CONFIG_DEBUG_RWLOCK
107 extern void _raw_write_lock(rwlock_t
* lock
);
108 extern void _raw_read_lock(rwlock_t
* lock
);
110 static inline void _raw_write_lock(rwlock_t
* lock
)
114 __asm__
__volatile__(
126 : "=m" (*lock
), "=&r" (regx
)
127 : "m" (*lock
) : "memory");
130 static inline void _raw_read_lock(rwlock_t
* lock
)
134 __asm__
__volatile__(
146 : "=m" (*lock
), "=&r" (regx
)
147 : "m" (*lock
) : "memory");
149 #endif /* CONFIG_DEBUG_RWLOCK */
151 static inline int _raw_write_trylock(rwlock_t
* lock
)
156 __asm__
__volatile__(
168 : "=m" (*lock
), "=&r" (regx
), "=&r" (success
)
169 : "m" (*lock
) : "memory");
174 static inline void _raw_write_unlock(rwlock_t
* lock
)
177 *(volatile int *)lock
= 0;
180 static inline void _raw_read_unlock(rwlock_t
* lock
)
183 __asm__
__volatile__(
192 : "=m" (*lock
), "=&r" (regx
)
193 : "m" (*lock
) : "memory");
196 #endif /* _ALPHA_SPINLOCK_H */