4 * i386 SMP lock implementation
6 #include <linux/interrupt.h>
7 #include <linux/spinlock.h>
8 #include <linux/sched.h>
9 #include <asm/current.h>
11 extern spinlock_t kernel_flag
;
14 #define kernel_locked() spin_is_locked(&kernel_flag)
17 #define kernel_locked() preempt_count()
19 #define kernel_locked() 1
24 * Release global kernel lock and global interrupt lock
26 #define release_kernel_lock(task) \
28 if (unlikely(task->lock_depth >= 0)) \
29 spin_unlock(&kernel_flag); \
33 * Re-acquire the kernel lock
35 #define reacquire_kernel_lock(task) \
37 if (unlikely(task->lock_depth >= 0)) \
38 spin_lock(&kernel_flag); \
43 * Getting the big kernel lock.
45 * This cannot happen asynchronously,
46 * so we only need to worry about other
49 static __inline__
void lock_kernel(void)
52 if (current
->lock_depth
== -1)
53 spin_lock(&kernel_flag
);
54 ++current
->lock_depth
;
57 if (!++current
->lock_depth
)
58 spin_lock(&kernel_flag
);
65 :"=m" (__dummy_lock(&kernel_flag
)),
66 "=m" (current
->lock_depth
));
71 static __inline__
void unlock_kernel(void)
73 if (current
->lock_depth
< 0)
76 if (--current
->lock_depth
< 0)
77 spin_unlock(&kernel_flag
);
84 :"=m" (__dummy_lock(&kernel_flag
)),
85 "=m" (current
->lock_depth
));