initial commit with v2.6.9
[linux-2.6.9-moxart.git] / include / asm-alpha / spinlock.h
blobb5f355444fc2466a7ae91adb198b4cacd435f5c6
1 #ifndef _ALPHA_SPINLOCK_H
2 #define _ALPHA_SPINLOCK_H
4 #include <linux/config.h>
5 #include <asm/system.h>
6 #include <linux/kernel.h>
7 #include <asm/current.h>
11 * Simple spin lock operations. There are two variants, one clears IRQ's
12 * on the local processor, one does not.
14 * We make no fairness assumptions. They have a cost.
17 typedef struct {
18 volatile unsigned int lock /*__attribute__((aligned(32))) */;
19 #ifdef CONFIG_DEBUG_SPINLOCK
20 int on_cpu;
21 int line_no;
22 void *previous;
23 struct task_struct * task;
24 const char *base_file;
25 #endif
26 } spinlock_t;
28 #ifdef CONFIG_DEBUG_SPINLOCK
29 #define SPIN_LOCK_UNLOCKED (spinlock_t) {0, -1, 0, NULL, NULL, NULL}
30 #define spin_lock_init(x) \
31 ((x)->lock = 0, (x)->on_cpu = -1, (x)->previous = NULL, (x)->task = NULL)
32 #else
33 #define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 }
34 #define spin_lock_init(x) ((x)->lock = 0)
35 #endif
37 #define spin_is_locked(x) ((x)->lock != 0)
38 #define spin_unlock_wait(x) ({ do { barrier(); } while ((x)->lock); })
39 #define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock)
41 #ifdef CONFIG_DEBUG_SPINLOCK
42 extern void _raw_spin_unlock(spinlock_t * lock);
43 extern void debug_spin_lock(spinlock_t * lock, const char *, int);
44 extern int debug_spin_trylock(spinlock_t * lock, const char *, int);
46 #define _raw_spin_lock(LOCK) debug_spin_lock(LOCK, __BASE_FILE__, __LINE__)
47 #define _raw_spin_trylock(LOCK) debug_spin_trylock(LOCK, __BASE_FILE__, __LINE__)
49 #define spin_lock_own(LOCK, LOCATION) \
50 do { \
51 if (!((LOCK)->lock && (LOCK)->on_cpu == smp_processor_id())) \
52 printk("%s: called on %d from %p but lock %s on %d\n", \
53 LOCATION, smp_processor_id(), \
54 __builtin_return_address(0), \
55 (LOCK)->lock ? "taken" : "freed", (LOCK)->on_cpu); \
56 } while (0)
57 #else
58 static inline void _raw_spin_unlock(spinlock_t * lock)
60 mb();
61 lock->lock = 0;
64 static inline void _raw_spin_lock(spinlock_t * lock)
66 long tmp;
68 /* Use sub-sections to put the actual loop at the end
69 of this object file's text section so as to perfect
70 branch prediction. */
71 __asm__ __volatile__(
72 "1: ldl_l %0,%1\n"
73 " blbs %0,2f\n"
74 " or %0,1,%0\n"
75 " stl_c %0,%1\n"
76 " beq %0,2f\n"
77 " mb\n"
78 ".subsection 2\n"
79 "2: ldl %0,%1\n"
80 " blbs %0,2b\n"
81 " br 1b\n"
82 ".previous"
83 : "=&r" (tmp), "=m" (lock->lock)
84 : "m"(lock->lock) : "memory");
87 static inline int _raw_spin_trylock(spinlock_t *lock)
89 return !test_and_set_bit(0, &lock->lock);
92 #define spin_lock_own(LOCK, LOCATION) ((void)0)
93 #endif /* CONFIG_DEBUG_SPINLOCK */
95 /***********************************************************/
97 typedef struct {
98 volatile unsigned int write_lock:1, read_counter:31;
99 } /*__attribute__((aligned(32)))*/ rwlock_t;
101 #define RW_LOCK_UNLOCKED (rwlock_t) { 0, 0 }
103 #define rwlock_init(x) do { *(x) = RW_LOCK_UNLOCKED; } while(0)
104 #define rwlock_is_locked(x) (*(volatile int *)(x) != 0)
106 #ifdef CONFIG_DEBUG_RWLOCK
107 extern void _raw_write_lock(rwlock_t * lock);
108 extern void _raw_read_lock(rwlock_t * lock);
109 #else
110 static inline void _raw_write_lock(rwlock_t * lock)
112 long regx;
114 __asm__ __volatile__(
115 "1: ldl_l %1,%0\n"
116 " bne %1,6f\n"
117 " or $31,1,%1\n"
118 " stl_c %1,%0\n"
119 " beq %1,6f\n"
120 " mb\n"
121 ".subsection 2\n"
122 "6: ldl %1,%0\n"
123 " bne %1,6b\n"
124 " br 1b\n"
125 ".previous"
126 : "=m" (*lock), "=&r" (regx)
127 : "m" (*lock) : "memory");
130 static inline void _raw_read_lock(rwlock_t * lock)
132 long regx;
134 __asm__ __volatile__(
135 "1: ldl_l %1,%0\n"
136 " blbs %1,6f\n"
137 " subl %1,2,%1\n"
138 " stl_c %1,%0\n"
139 " beq %1,6f\n"
140 "4: mb\n"
141 ".subsection 2\n"
142 "6: ldl %1,%0\n"
143 " blbs %1,6b\n"
144 " br 1b\n"
145 ".previous"
146 : "=m" (*lock), "=&r" (regx)
147 : "m" (*lock) : "memory");
149 #endif /* CONFIG_DEBUG_RWLOCK */
151 static inline int _raw_write_trylock(rwlock_t * lock)
153 long regx;
154 int success;
156 __asm__ __volatile__(
157 "1: ldl_l %1,%0\n"
158 " lda %2,0\n"
159 " bne %1,2f\n"
160 " or $31,1,%1\n"
161 " stl_c %1,%0\n"
162 " beq %1,6f\n"
163 " lda %2,1\n"
164 "2: mb\n"
165 ".subsection 2\n"
166 "6: br 1b\n"
167 ".previous"
168 : "=m" (*lock), "=&r" (regx), "=&r" (success)
169 : "m" (*lock) : "memory");
171 return success;
174 static inline void _raw_write_unlock(rwlock_t * lock)
176 mb();
177 *(volatile int *)lock = 0;
180 static inline void _raw_read_unlock(rwlock_t * lock)
182 long regx;
183 __asm__ __volatile__(
184 " mb\n"
185 "1: ldl_l %1,%0\n"
186 " addl %1,2,%1\n"
187 " stl_c %1,%0\n"
188 " beq %1,6f\n"
189 ".subsection 2\n"
190 "6: br 1b\n"
191 ".previous"
192 : "=m" (*lock), "=&r" (regx)
193 : "m" (*lock) : "memory");
196 #endif /* _ALPHA_SPINLOCK_H */