2 * Copyright (C) 1996 Paul Mackerras.
5 #include <linux/kernel.h>
6 #include <asm/bitops.h>
9 * If the bitops are not inlined in bitops.h, they are defined here.
13 void set_bit(int nr
, volatile void * addr
)
16 unsigned long mask
= 1 << (nr
& 0x1f);
17 unsigned long *p
= ((unsigned long *)addr
) + (nr
>> 5);
19 __asm__
__volatile__(SMP_WMB
"\
25 : "=&r" (old
), "=m" (*p
)
26 : "r" (mask
), "r" (p
), "m" (*p
)
30 void clear_bit(int nr
, volatile void *addr
)
33 unsigned long mask
= 1 << (nr
& 0x1f);
34 unsigned long *p
= ((unsigned long *)addr
) + (nr
>> 5);
36 __asm__
__volatile__(SMP_WMB
"\
42 : "=&r" (old
), "=m" (*p
)
43 : "r" (mask
), "r" (p
), "m" (*p
)
47 void change_bit(int nr
, volatile void *addr
)
50 unsigned long mask
= 1 << (nr
& 0x1f);
51 unsigned long *p
= ((unsigned long *)addr
) + (nr
>> 5);
53 __asm__
__volatile__(SMP_WMB
"\
59 : "=&r" (old
), "=m" (*p
)
60 : "r" (mask
), "r" (p
), "m" (*p
)
64 int test_and_set_bit(int nr
, volatile void *addr
)
67 unsigned int mask
= 1 << (nr
& 0x1f);
68 volatile unsigned int *p
= ((volatile unsigned int *)addr
) + (nr
>> 5);
70 __asm__
__volatile__(SMP_WMB
"\
76 : "=&r" (old
), "=&r" (t
), "=m" (*p
)
77 : "r" (mask
), "r" (p
), "m" (*p
)
80 return (old
& mask
) != 0;
83 int test_and_clear_bit(int nr
, volatile void *addr
)
86 unsigned int mask
= 1 << (nr
& 0x1f);
87 volatile unsigned int *p
= ((volatile unsigned int *)addr
) + (nr
>> 5);
89 __asm__
__volatile__(SMP_WMB
"\
95 : "=&r" (old
), "=&r" (t
), "=m" (*p
)
96 : "r" (mask
), "r" (p
), "m" (*p
)
99 return (old
& mask
) != 0;
102 int test_and_change_bit(int nr
, volatile void *addr
)
105 unsigned int mask
= 1 << (nr
& 0x1f);
106 volatile unsigned int *p
= ((volatile unsigned int *)addr
) + (nr
>> 5);
108 __asm__
__volatile__(SMP_WMB
"\
114 : "=&r" (old
), "=&r" (t
), "=m" (*p
)
115 : "r" (mask
), "r" (p
), "m" (*p
)
118 return (old
& mask
) != 0;
120 #endif /* !__INLINE_BITOPS */