4 * i386 SMP lock implementation
6 #include <linux/interrupt.h>
7 #include <linux/spinlock.h>
8 #include <linux/sched.h>
9 #include <asm/current.h>
11 extern spinlock_t kernel_flag
;
13 #define kernel_locked() spin_is_locked(&kernel_flag)
16 * Release global kernel lock and global interrupt lock
18 #define release_kernel_lock(task, cpu) \
20 if (task->lock_depth >= 0) \
21 spin_unlock(&kernel_flag); \
22 release_irqlock(cpu); \
27 * Re-acquire the kernel lock
29 #define reacquire_kernel_lock(task) \
31 if (task->lock_depth >= 0) \
32 spin_lock(&kernel_flag); \
37 * Getting the big kernel lock.
39 * This cannot happen asynchronously,
40 * so we only need to worry about other
43 extern __inline__
void lock_kernel(void)
46 if (!++current
->lock_depth
)
47 spin_lock(&kernel_flag
);
54 :"=m" (__dummy_lock(&kernel_flag
)),
55 "=m" (current
->lock_depth
));
59 extern __inline__
void unlock_kernel(void)
61 if (current
->lock_depth
< 0)
64 if (--current
->lock_depth
< 0)
65 spin_unlock(&kernel_flag
);
72 :"=m" (__dummy_lock(&kernel_flag
)),
73 "=m" (current
->lock_depth
));