6 #include <linux/config.h>
7 #include <linux/futex.h>
9 #include <asm/uaccess.h>
12 #define __FUTEX_SMP_SYNC " sync \n"
14 #define __FUTEX_SMP_SYNC
17 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
19 __asm__ __volatile__( \
23 "1: ll %1, (%3) # __futex_atomic_op1 \n" \
33 " .section .fixup,\"ax\" \n" \
37 " .section __ex_table,\"a\" \n" \
38 " "__UA_ADDR "\t1b, 4b \n" \
39 " "__UA_ADDR "\t2b, 4b \n" \
41 : "=r" (ret), "=r" (oldval) \
42 : "0" (0), "r" (uaddr), "Jr" (oparg), "i" (-EFAULT)); \
46 futex_atomic_op_inuser (int encoded_op
, int __user
*uaddr
)
48 int op
= (encoded_op
>> 28) & 7;
49 int cmp
= (encoded_op
>> 24) & 15;
50 int oparg
= (encoded_op
<< 8) >> 20;
51 int cmparg
= (encoded_op
<< 20) >> 20;
53 if (encoded_op
& (FUTEX_OP_OPARG_SHIFT
<< 28))
56 if (! access_ok (VERIFY_WRITE
, uaddr
, sizeof(int)))
63 __futex_atomic_op("move $1, %z4", ret
, oldval
, uaddr
, oparg
);
67 __futex_atomic_op("addu $1, %1, %z4",
68 ret
, oldval
, uaddr
, oparg
);
71 __futex_atomic_op("or $1, %1, %z4",
72 ret
, oldval
, uaddr
, oparg
);
75 __futex_atomic_op("and $1, %1, %z4",
76 ret
, oldval
, uaddr
, ~oparg
);
79 __futex_atomic_op("xor $1, %1, %z4",
80 ret
, oldval
, uaddr
, oparg
);
90 case FUTEX_OP_CMP_EQ
: ret
= (oldval
== cmparg
); break;
91 case FUTEX_OP_CMP_NE
: ret
= (oldval
!= cmparg
); break;
92 case FUTEX_OP_CMP_LT
: ret
= (oldval
< cmparg
); break;
93 case FUTEX_OP_CMP_GE
: ret
= (oldval
>= cmparg
); break;
94 case FUTEX_OP_CMP_LE
: ret
= (oldval
<= cmparg
); break;
95 case FUTEX_OP_CMP_GT
: ret
= (oldval
> cmparg
); break;
96 default: ret
= -ENOSYS
;
103 futex_atomic_cmpxchg_inatomic(int __user
*uaddr
, int oldval
, int newval
)