Ok. I didn't make 2.4.0 in 2000. Tough. I tried, but we had some
[davej-history.git] / arch / ppc / kernel / bitops.c
blob69e07057a68931668a1a5407a7e1116361d76ca3
1 /*
2 * Copyright (C) 1996 Paul Mackerras.
3 */
5 #include <linux/kernel.h>
6 #include <asm/bitops.h>
8 /*
9 * If the bitops are not inlined in bitops.h, they are defined here.
10 * -- paulus
12 #if !__INLINE_BITOPS
13 void set_bit(int nr, volatile void * addr)
15 unsigned long old;
16 unsigned long mask = 1 << (nr & 0x1f);
17 unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
19 __asm__ __volatile__(SMP_WMB "\
20 1: lwarx %0,0,%3
21 or %0,%0,%2
22 stwcx. %0,0,%3
23 bne 1b"
24 SMP_MB
25 : "=&r" (old), "=m" (*p)
26 : "r" (mask), "r" (p), "m" (*p)
27 : "cc" );
30 void clear_bit(int nr, volatile void *addr)
32 unsigned long old;
33 unsigned long mask = 1 << (nr & 0x1f);
34 unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
36 __asm__ __volatile__(SMP_WMB "\
37 1: lwarx %0,0,%3
38 andc %0,%0,%2
39 stwcx. %0,0,%3
40 bne 1b"
41 SMP_MB
42 : "=&r" (old), "=m" (*p)
43 : "r" (mask), "r" (p), "m" (*p)
44 : "cc");
47 void change_bit(int nr, volatile void *addr)
49 unsigned long old;
50 unsigned long mask = 1 << (nr & 0x1f);
51 unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
53 __asm__ __volatile__(SMP_WMB "\
54 1: lwarx %0,0,%3
55 xor %0,%0,%2
56 stwcx. %0,0,%3
57 bne 1b"
58 SMP_MB
59 : "=&r" (old), "=m" (*p)
60 : "r" (mask), "r" (p), "m" (*p)
61 : "cc");
64 int test_and_set_bit(int nr, volatile void *addr)
66 unsigned int old, t;
67 unsigned int mask = 1 << (nr & 0x1f);
68 volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
70 __asm__ __volatile__(SMP_WMB "\
71 1: lwarx %0,0,%4
72 or %1,%0,%3
73 stwcx. %1,0,%4
74 bne 1b"
75 SMP_MB
76 : "=&r" (old), "=&r" (t), "=m" (*p)
77 : "r" (mask), "r" (p), "m" (*p)
78 : "cc");
80 return (old & mask) != 0;
83 int test_and_clear_bit(int nr, volatile void *addr)
85 unsigned int old, t;
86 unsigned int mask = 1 << (nr & 0x1f);
87 volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
89 __asm__ __volatile__(SMP_WMB "\
90 1: lwarx %0,0,%4
91 andc %1,%0,%3
92 stwcx. %1,0,%4
93 bne 1b"
94 SMP_MB
95 : "=&r" (old), "=&r" (t), "=m" (*p)
96 : "r" (mask), "r" (p), "m" (*p)
97 : "cc");
99 return (old & mask) != 0;
102 int test_and_change_bit(int nr, volatile void *addr)
104 unsigned int old, t;
105 unsigned int mask = 1 << (nr & 0x1f);
106 volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
108 __asm__ __volatile__(SMP_WMB "\
109 1: lwarx %0,0,%4
110 xor %1,%0,%3
111 stwcx. %1,0,%4
112 bne 1b"
113 SMP_MB
114 : "=&r" (old), "=&r" (t), "=m" (*p)
115 : "r" (mask), "r" (p), "m" (*p)
116 : "cc");
118 return (old & mask) != 0;
120 #endif /* !__INLINE_BITOPS */