Import 2.3.18pre1
[davej-history.git] / include / asm-sparc64 / semaphore.h
blobe119514c4cb9b0c3ad4ca9555b7d3705832e7bcd
1 #ifndef _SPARC64_SEMAPHORE_H
2 #define _SPARC64_SEMAPHORE_H
4 /* These are actually reasonable on the V9. */
5 #ifdef __KERNEL__
7 #include <asm/atomic.h>
8 #include <asm/system.h>
10 struct semaphore {
11 atomic_t count;
12 atomic_t waking;
13 wait_queue_head_t wait;
14 #if WAITQUEUE_DEBUG
15 long __magic;
16 #endif
19 #if WAITQUEUE_DEBUG
20 # define __SEM_DEBUG_INIT(name) \
21 , (long)&(name).__magic
22 #else
23 # define __SEM_DEBUG_INIT(name)
24 #endif
26 #define __SEMAPHORE_INITIALIZER(name,count) \
27 { ATOMIC_INIT(count), ATOMIC_INIT(0), __WAIT_QUEUE_HEAD_INITIALIZER((name).wait) \
28 __SEM_DEBUG_INIT(name) }
30 #define __MUTEX_INITIALIZER(name) \
31 __SEMAPHORE_INITIALIZER(name,1)
33 #define __DECLARE_SEMAPHORE_GENERIC(name,count) \
34 struct semaphore name = __SEMAPHORE_INITIALIZER(name,count)
36 #define DECLARE_MUTEX(name) __DECLARE_SEMAPHORE_GENERIC(name,1)
37 #define DECLARE_MUTEX_LOCKED(name) __DECLARE_SEMAPHORE_GENERIC(name,0)
39 extern inline void sema_init (struct semaphore *sem, int val)
41 atomic_set(&sem->count, val);
42 atomic_set(&sem->waking, 0);
43 init_waitqueue_head(&sem->wait);
44 #if WAITQUEUE_DEBUG
45 sem->__magic = (long)&sem->__magic;
46 #endif
49 static inline void init_MUTEX (struct semaphore *sem)
51 sema_init(sem, 1);
54 static inline void init_MUTEX_LOCKED (struct semaphore *sem)
56 sema_init(sem, 0);
59 extern void __down(struct semaphore * sem);
60 extern int __down_interruptible(struct semaphore * sem);
61 extern int __down_trylock(struct semaphore * sem);
62 extern void __up(struct semaphore * sem);
64 extern __inline__ void down(struct semaphore * sem)
66 #if WAITQUEUE_DEBUG
67 CHECK_MAGIC(sem->__magic);
68 #endif
69 __asm__ __volatile__("
70 1: lduw [%0], %%g5
71 sub %%g5, 1, %%g7
72 cas [%0], %%g5, %%g7
73 cmp %%g5, %%g7
74 bne,pn %%icc, 1b
75 cmp %%g7, 1
76 bl,pn %%icc, 3f
77 membar #StoreStore
79 .subsection 2
80 3: mov %0, %%g5
81 save %%sp, -160, %%sp
82 mov %%g1, %%l1
83 mov %%g2, %%l2
84 mov %%g3, %%l3
85 call %1
86 mov %%g5, %%o0
87 mov %%l1, %%g1
88 mov %%l2, %%g2
89 ba,pt %%xcc, 2b
90 restore %%l3, %%g0, %%g3
91 .previous\n"
92 : : "r" (__atomic_fool_gcc(sem)), "i" (__down)
93 : "g5", "g7", "memory", "cc");
96 extern __inline__ int down_interruptible(struct semaphore *sem)
98 int ret = 0;
100 #if WAITQUEUE_DEBUG
101 CHECK_MAGIC(sem->__magic);
102 #endif
103 __asm__ __volatile__("
104 1: lduw [%2], %%g5
105 sub %%g5, 1, %%g7
106 cas [%2], %%g5, %%g7
107 cmp %%g5, %%g7
108 bne,pn %%icc, 1b
109 cmp %%g7, 1
110 bl,pn %%icc, 3f
111 membar #StoreStore
113 .subsection 2
114 3: mov %2, %%g5
115 save %%sp, -160, %%sp
116 mov %%g1, %%l1
117 mov %%g2, %%l2
118 mov %%g3, %%l3
119 call %3
120 mov %%g5, %%o0
121 mov %%l1, %%g1
122 mov %%l2, %%g2
123 mov %%l3, %%g3
124 ba,pt %%xcc, 2b
125 restore %%o0, %%g0, %0
126 .previous\n"
127 : "=r" (ret)
128 : "0" (ret), "r" (__atomic_fool_gcc(sem)), "i" (__down_interruptible)
129 : "g5", "g7", "memory", "cc");
130 return ret;
133 extern inline int down_trylock(struct semaphore *sem)
135 int ret = 0;
136 #if WAITQUEUE_DEBUG
137 CHECK_MAGIC(sem->__magic);
138 #endif
139 __asm__ __volatile__("
140 1: lduw [%2], %%g5
141 sub %%g5, 1, %%g7
142 cas [%2], %%g5, %%g7
143 cmp %%g5, %%g7
144 bne,pn %%icc, 1b
145 cmp %%g7, 1
146 bl,pn %%icc, 3f
147 membar #StoreStore
149 .subsection 2
150 3: mov %2, %%g5
151 save %%sp, -160, %%sp
152 mov %%g1, %%l1
153 mov %%g2, %%l2
154 mov %%g3, %%l3
155 call %3
156 mov %%g5, %%o0
157 mov %%l1, %%g1
158 mov %%l2, %%g2
159 mov %%l3, %%g3
160 ba,pt %%xcc, 2b
161 restore %%o0, %%g0, %0
162 .previous\n"
163 : "=r" (ret)
164 : "0" (ret), "r" (__atomic_fool_gcc(sem)), "i" (__down_trylock)
165 : "g5", "g7", "memory", "cc");
166 return ret;
169 extern __inline__ void up(struct semaphore * sem)
171 #if WAITQUEUE_DEBUG
172 CHECK_MAGIC(sem->__magic);
173 #endif
174 __asm__ __volatile__("
175 membar #StoreLoad | #LoadLoad
176 1: lduw [%0], %%g5
177 add %%g5, 1, %%g7
178 cas [%0], %%g5, %%g7
179 cmp %%g5, %%g7
180 bne,pn %%icc, 1b
181 addcc %%g7, 1, %%g0
182 ble,pn %%icc, 3f
185 .subsection 2
186 3: mov %0, %%g5
187 save %%sp, -160, %%sp
188 mov %%g1, %%l1
189 mov %%g2, %%l2
190 mov %%g3, %%l3
191 call %1
192 mov %%g5, %%o0
193 mov %%l1, %%g1
194 mov %%l2, %%g2
195 ba,pt %%xcc, 2b
196 restore %%l3, %%g0, %%g3
197 .previous\n"
198 : : "r" (__atomic_fool_gcc(sem)), "i" (__up)
199 : "g5", "g7", "memory", "cc");
202 #endif /* __KERNEL__ */
204 #endif /* !(_SPARC64_SEMAPHORE_H) */