Linux-2.6.12-rc2
[linux-2.6/kvm.git] / arch / ppc / kernel / bitops.c
blob7f53d193968b423b239586ccd502841d624aac84
1 /*
2 * Copyright (C) 1996 Paul Mackerras.
3 */
5 #include <linux/kernel.h>
6 #include <linux/bitops.h>
8 /*
9 * If the bitops are not inlined in bitops.h, they are defined here.
10 * -- paulus
12 #if !__INLINE_BITOPS
13 void set_bit(int nr, volatile void * addr)
15 unsigned long old;
16 unsigned long mask = 1 << (nr & 0x1f);
17 unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
19 __asm__ __volatile__(SMP_WMB "\n\
20 1: lwarx %0,0,%3 \n\
21 or %0,%0,%2 \n"
22 PPC405_ERR77(0,%3)
23 " stwcx. %0,0,%3 \n\
24 bne 1b"
25 SMP_MB
26 : "=&r" (old), "=m" (*p)
27 : "r" (mask), "r" (p), "m" (*p)
28 : "cc" );
31 void clear_bit(int nr, volatile void *addr)
33 unsigned long old;
34 unsigned long mask = 1 << (nr & 0x1f);
35 unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
37 __asm__ __volatile__(SMP_WMB "\n\
38 1: lwarx %0,0,%3 \n\
39 andc %0,%0,%2 \n"
40 PPC405_ERR77(0,%3)
41 " stwcx. %0,0,%3 \n\
42 bne 1b"
43 SMP_MB
44 : "=&r" (old), "=m" (*p)
45 : "r" (mask), "r" (p), "m" (*p)
46 : "cc");
49 void change_bit(int nr, volatile void *addr)
51 unsigned long old;
52 unsigned long mask = 1 << (nr & 0x1f);
53 unsigned long *p = ((unsigned long *)addr) + (nr >> 5);
55 __asm__ __volatile__(SMP_WMB "\n\
56 1: lwarx %0,0,%3 \n\
57 xor %0,%0,%2 \n"
58 PPC405_ERR77(0,%3)
59 " stwcx. %0,0,%3 \n\
60 bne 1b"
61 SMP_MB
62 : "=&r" (old), "=m" (*p)
63 : "r" (mask), "r" (p), "m" (*p)
64 : "cc");
67 int test_and_set_bit(int nr, volatile void *addr)
69 unsigned int old, t;
70 unsigned int mask = 1 << (nr & 0x1f);
71 volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
73 __asm__ __volatile__(SMP_WMB "\n\
74 1: lwarx %0,0,%4 \n\
75 or %1,%0,%3 \n"
76 PPC405_ERR77(0,%4)
77 " stwcx. %1,0,%4 \n\
78 bne 1b"
79 SMP_MB
80 : "=&r" (old), "=&r" (t), "=m" (*p)
81 : "r" (mask), "r" (p), "m" (*p)
82 : "cc");
84 return (old & mask) != 0;
87 int test_and_clear_bit(int nr, volatile void *addr)
89 unsigned int old, t;
90 unsigned int mask = 1 << (nr & 0x1f);
91 volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
93 __asm__ __volatile__(SMP_WMB "\n\
94 1: lwarx %0,0,%4 \n\
95 andc %1,%0,%3 \n"
96 PPC405_ERR77(0,%4)
97 " stwcx. %1,0,%4 \n\
98 bne 1b"
99 SMP_MB
100 : "=&r" (old), "=&r" (t), "=m" (*p)
101 : "r" (mask), "r" (p), "m" (*p)
102 : "cc");
104 return (old & mask) != 0;
107 int test_and_change_bit(int nr, volatile void *addr)
109 unsigned int old, t;
110 unsigned int mask = 1 << (nr & 0x1f);
111 volatile unsigned int *p = ((volatile unsigned int *)addr) + (nr >> 5);
113 __asm__ __volatile__(SMP_WMB "\n\
114 1: lwarx %0,0,%4 \n\
115 xor %1,%0,%3 \n"
116 PPC405_ERR77(0,%4)
117 " stwcx. %1,0,%4 \n\
118 bne 1b"
119 SMP_MB
120 : "=&r" (old), "=&r" (t), "=m" (*p)
121 : "r" (mask), "r" (p), "m" (*p)
122 : "cc");
124 return (old & mask) != 0;
126 #endif /* !__INLINE_BITOPS */