Move the callout_reset into the critical section.
[dragonfly/netmp.git] / sys / i386 / include / lock.h
blobc98ab02e9b697486f5d0b5bb78d15906ed32767d
1 /*
2 * Copyright (c) 2003,2004 The DragonFly Project. All rights reserved.
3 *
4 * This code is derived from software contributed to The DragonFly Project
5 * by Matthew Dillon <dillon@backplane.com>
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
11 * 1. Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
16 * distribution.
17 * 3. Neither the name of The DragonFly Project nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific, prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
24 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
25 * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
26 * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
27 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
28 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
29 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
31 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
32 * SUCH DAMAGE.
34 * $FreeBSD: src/sys/i386/include/lock.h,v 1.11.2.2 2000/09/30 02:49:34 ps Exp $
35 * $DragonFly: src/sys/i386/include/Attic/lock.h,v 1.10 2004/11/20 20:50:36 dillon Exp $
38 #ifndef _MACHINE_LOCK_H_
39 #define _MACHINE_LOCK_H_
41 #ifndef _MACHINE_PSL_H_
42 #include "psl.h"
43 #endif
46 * MP_FREE_LOCK is used by both assembly and C under SMP.
48 #ifdef SMP
49 #define MP_FREE_LOCK 0xffffffff /* value of lock when free */
50 #endif
52 #ifdef LOCORE
55 * Spinlock assembly support. Note: eax and ecx can be tromped. No
56 * other register will be. Note that these routines are sometimes
57 * called with (%edx) as the mem argument.
59 * Under UP the spinlock routines still serve to disable/restore
60 * interrupts.
64 #ifdef SMP
66 #define SPIN_INIT(mem) \
67 movl $0,mem ; \
69 #define SPIN_INIT_NOREG(mem) \
70 SPIN_INIT(mem) ; \
72 #define SPIN_LOCK(mem) \
73 pushfl ; \
74 popl %ecx ; /* flags */ \
75 cli ; \
76 orl $PSL_C,%ecx ; /* make sure non-zero */ \
77 7: ; \
78 movl $0,%eax ; /* expected contents of lock */ \
79 lock cmpxchgl %ecx,mem ; /* Z=1 (jz) on success */ \
80 pause ; \
81 jnz 7b ; \
83 #define SPIN_LOCK_PUSH_REGS \
84 subl $8,%esp ; \
85 movl %ecx,(%esp) ; \
86 movl %eax,4(%esp) ; \
88 #define SPIN_LOCK_POP_REGS \
89 movl (%esp),%ecx ; \
90 movl 4(%esp),%eax ; \
91 addl $8,%esp ; \
93 #define SPIN_LOCK_FRAME_SIZE 8
95 #define SPIN_LOCK_NOREG(mem) \
96 SPIN_LOCK_PUSH_REGS ; \
97 SPIN_LOCK(mem) ; \
98 SPIN_LOCK_POP_REGS ; \
100 #define SPIN_UNLOCK(mem) \
101 pushl mem ; \
102 movl $0,mem ; \
103 popfl ; \
105 #define SPIN_UNLOCK_PUSH_REGS
106 #define SPIN_UNLOCK_POP_REGS
107 #define SPIN_UNLOCK_FRAME_SIZE 0
109 #define SPIN_UNLOCK_NOREG(mem) \
110 SPIN_UNLOCK(mem) ; \
112 #else
114 #define SPIN_LOCK(mem) \
115 pushfl ; \
116 cli ; \
117 orl $PSL_C,(%esp) ; \
118 popl mem ; \
120 #define SPIN_LOCK_PUSH_RESG
121 #define SPIN_LOCK_POP_REGS
122 #define SPIN_LOCK_FRAME_SIZE 0
124 #define SPIN_UNLOCK(mem) \
125 pushl mem ; \
126 movl $0,mem ; \
127 popfl ; \
129 #define SPIN_UNLOCK_PUSH_REGS
130 #define SPIN_UNLOCK_POP_REGS
131 #define SPIN_UNLOCK_FRAME_SIZE 0
133 #endif /* SMP */
135 #else /* !LOCORE */
137 #ifdef _KERNEL
140 * Spinlock functions (UP and SMP). Under UP a spinlock still serves
141 * to disable/restore interrupts even if it doesn't spin.
143 struct spinlock {
144 volatile int opaque;
147 typedef struct spinlock *spinlock_t;
149 void mpintr_lock(void); /* disables int / spinlock combo */
150 void mpintr_unlock(void);
151 void com_lock(void); /* disables int / spinlock combo */
152 void com_unlock(void);
153 void imen_lock(void); /* disables int / spinlock combo */
154 void imen_unlock(void);
155 void clock_lock(void); /* disables int / spinlock combo */
156 void clock_unlock(void);
157 void cons_lock(void); /* disables int / spinlock combo */
158 void cons_unlock(void);
160 extern struct spinlock smp_rv_spinlock;
162 void spin_lock(spinlock_t lock);
163 void spin_lock_np(spinlock_t lock);
164 void spin_unlock(spinlock_t lock);
165 void spin_unlock_np(spinlock_t lock);
166 #if 0
167 void spin_lock_init(spinlock_t lock);
168 #endif
171 * Inline version of spinlock routines -- overrides assembly. Only unlock
172 * and init here please.
174 static __inline void
175 spin_lock_init(spinlock_t lock)
177 lock->opaque = 0;
180 #endif /* _KERNEL */
182 #if defined(_KERNEL) || defined(_UTHREAD)
185 * MP LOCK functions for SMP and UP. Under UP the MP lock does not exist
186 * but we leave a few functions intact as macros for convenience.
188 #ifdef SMP
190 void get_mplock(void);
191 int try_mplock(void);
192 void rel_mplock(void);
193 int cpu_try_mplock(void);
194 void cpu_get_initial_mplock(void);
196 extern u_int mp_lock;
198 #define MP_LOCK_HELD() (mp_lock == mycpu->gd_cpuid)
199 #define ASSERT_MP_LOCK_HELD() KKASSERT(MP_LOCK_HELD())
201 static __inline void
202 cpu_rel_mplock(void)
204 mp_lock = MP_FREE_LOCK;
207 #else
209 #define get_mplock()
210 #define try_mplock() 1
211 #define rel_mplock()
212 #define ASSERT_MP_LOCK_HELD()
214 #endif /* SMP */
215 #endif /* _KERNEL || _UTHREAD */
216 #endif /* LOCORE */
217 #endif /* !_MACHINE_LOCK_H_ */