x86: replace LOCK_PREFIX in futex.h
[linux-2.6/kvm.git] / include / asm-x86 / futex.h
blob9d919264923a0b12db0bd804abed465c5ce82c55
1 #ifndef _ASM_X86_FUTEX_H
2 #define _ASM_X86_FUTEX_H
4 #ifdef __KERNEL__
6 #include <linux/futex.h>
8 #include <asm/asm.h>
9 #include <asm/errno.h>
10 #include <asm/processor.h>
11 #include <asm/system.h>
12 #include <asm/uaccess.h>
14 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
15 __asm__ __volatile( \
16 "1: " insn "\n" \
17 "2: .section .fixup,\"ax\"\n \
18 3: mov %3, %1\n \
19 jmp 2b\n \
20 .previous\n \
21 .section __ex_table,\"a\"\n \
22 .align 8\n" \
23 _ASM_PTR "1b,3b\n \
24 .previous" \
25 : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
26 : "i" (-EFAULT), "0" (oparg), "1" (0))
28 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
29 __asm__ __volatile( \
30 "1: movl %2, %0\n \
31 movl %0, %3\n" \
32 insn "\n" \
33 "2: lock; cmpxchgl %3, %2\n \
34 jnz 1b\n \
35 3: .section .fixup,\"ax\"\n \
36 4: mov %5, %1\n \
37 jmp 3b\n \
38 .previous\n \
39 .section __ex_table,\"a\"\n \
40 .align 8\n" \
41 _ASM_PTR "1b,4b,2b,4b\n \
42 .previous" \
43 : "=&a" (oldval), "=&r" (ret), "+m" (*uaddr), \
44 "=&r" (tem) \
45 : "r" (oparg), "i" (-EFAULT), "1" (0))
47 static inline int
48 futex_atomic_op_inuser(int encoded_op, int __user *uaddr)
50 int op = (encoded_op >> 28) & 7;
51 int cmp = (encoded_op >> 24) & 15;
52 int oparg = (encoded_op << 8) >> 20;
53 int cmparg = (encoded_op << 20) >> 20;
54 int oldval = 0, ret, tem;
56 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
57 oparg = 1 << oparg;
59 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
60 return -EFAULT;
62 #if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP)
63 /* Real i386 machines can only support FUTEX_OP_SET */
64 if (op != FUTEX_OP_SET && boot_cpu_data.x86 == 3)
65 return -ENOSYS;
66 #endif
68 pagefault_disable();
70 switch (op) {
71 case FUTEX_OP_SET:
72 __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
73 break;
74 case FUTEX_OP_ADD:
75 __futex_atomic_op1("lock; xaddl %0, %2", ret, oldval,
76 uaddr, oparg);
77 break;
78 case FUTEX_OP_OR:
79 __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
80 break;
81 case FUTEX_OP_ANDN:
82 __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
83 break;
84 case FUTEX_OP_XOR:
85 __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
86 break;
87 default:
88 ret = -ENOSYS;
91 pagefault_enable();
93 if (!ret) {
94 switch (cmp) {
95 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
96 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
97 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
98 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
99 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
100 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
101 default: ret = -ENOSYS;
104 return ret;
107 static inline int
108 futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
110 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
111 return -EFAULT;
113 __asm__ __volatile__(
115 "1: lock; cmpxchgl %3, %1 \n"
116 "2: .section .fixup, \"ax\" \n"
117 "3: mov %2, %0 \n"
118 " jmp 2b \n"
119 " .previous \n"
121 " .section __ex_table, \"a\" \n"
122 " .align 8 \n"
123 _ASM_PTR " 1b,3b \n"
124 " .previous \n"
126 : "=a" (oldval), "+m" (*uaddr)
127 : "i" (-EFAULT), "r" (newval), "0" (oldval)
128 : "memory"
131 return oldval;
134 #endif
135 #endif