1 #ifndef _SPARC_SEMAPHORE_HELPER_H
2 #define _SPARC_SEMAPHORE_HELPER_H
5 * (barely) SMP- and interrupt-safe semaphore helper functions, sparc version.
7 * (C) Copyright 1999 David S. Miller (davem@redhat.com)
8 * (C) Copyright 1999 Jakub Jelinek (jj@ultra.linux.cz)
10 #define wake_one_more(sem) atomic_inc(&(sem)->waking)
11 static __inline__
int waking_non_zero(struct semaphore
*sem
)
18 __asm__
__volatile__("
23 1: ldstub [%2 + 3], %0
36 : "=&r" (ret
), "=&r" (tmp
)
37 : "r" (&sem
->waking
), "i" (PSR_PIL
)
38 : "g1", "memory", "cc");
40 __asm__
__volatile__("
51 1: wr %%g1, 0x0, %%psr
54 : "r" (&sem
->waking
), "i" (PSR_PIL
)
55 : "g1", "memory", "cc");
60 static __inline__
int waking_non_zero_interruptible(struct semaphore
*sem
,
61 struct task_struct
*tsk
)
68 __asm__
__volatile__("
73 1: ldstub [%2 + 3], %0
86 : "=&r" (ret
), "=&r" (tmp
)
87 : "r" (&sem
->waking
), "i" (PSR_PIL
)
88 : "g1", "memory", "cc");
90 __asm__
__volatile__("
101 1: wr %%g1, 0x0, %%psr
104 : "r" (&sem
->waking
), "i" (PSR_PIL
)
105 : "g1", "memory", "cc");
107 if(ret
== 0 && signal_pending(tsk
)) {
108 atomic_inc(&sem
->count
);
114 static __inline__
int waking_non_zero_trylock(struct semaphore
*sem
)
121 __asm__
__volatile__("
126 1: ldstub [%2 + 3], %0
139 : "=&r" (ret
), "=&r" (tmp
)
140 : "r" (&sem
->waking
), "i" (PSR_PIL
)
141 : "g1", "memory", "cc");
143 __asm__
__volatile__("
154 1: wr %%g1, 0x0, %%psr
157 : "r" (&sem
->waking
), "i" (PSR_PIL
)
158 : "g1", "memory", "cc");
162 atomic_inc(&sem
->count
);
166 #endif /* !(_SPARC_SEMAPHORE_HELPER_H) */