4 * i386 SMP lock implementation
6 #include <linux/interrupt.h>
7 #include <asm/spinlock.h>
9 extern spinlock_t kernel_flag
;
12 extern void __check_locks(unsigned int);
14 #define __check_locks(x) do { } while (0)
18 * Release global kernel lock and global interrupt lock
20 #define release_kernel_lock(task, cpu) \
22 if (task->lock_depth >= 0) \
23 __asm__ __volatile__(spin_unlock_string \
24 :"=m" (__dummy_lock(&kernel_flag))); \
25 release_irqlock(cpu); \
30 * Re-acquire the kernel lock
32 #define reacquire_kernel_lock(task) \
34 if (task->lock_depth >= 0) \
35 __asm__ __volatile__(spin_lock_string \
36 :"=m" (__dummy_lock(&kernel_flag))); \
41 * Getting the big kernel lock.
43 * This cannot happen asynchronously,
44 * so we only need to worry about other
47 extern __inline__
void lock_kernel(void)
55 :"=m" (__dummy_lock(&kernel_flag
)),
56 "=m" (current
->lock_depth
));
59 extern __inline__
void unlock_kernel(void)
66 :"=m" (__dummy_lock(&kernel_flag
)),
67 "=m" (current
->lock_depth
));