MOXA linux-2.6.x / linux-2.6.19-uc1 from UC-7110-LX-BOOTLOADER-1.9_VERSION-4.2.tgz
[linux-2.6.19-moxart.git] / include / asm-nios2nommu / system.h
bloba6ea4c877b3ebdfae68ce54dc3040c38dd9f677d
1 /*
2 * Taken from the m68k.
4 * Copyright (C) 2004, Microtronix Datacom Ltd.
6 * All rights reserved.
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
16 * NON INFRINGEMENT. See the GNU General Public License for more
17 * details.
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
25 #ifndef _NIOS2NOMMU_SYSTEM_H
26 #define _NIOS2NOMMU_SYSTEM_H
28 #include <linux/linkage.h>
29 #include <asm/segment.h>
30 #include <asm/entry.h>
31 #include <asm/nios.h>
34 * switch_to(n) should switch tasks to task ptr, first checking that
35 * ptr isn't the current task, in which case it does nothing. This
36 * also clears the TS-flag if the task we switched to has used the
37 * math co-processor latest.
42 asmlinkage void resume(void);
43 #define switch_to(prev,next,last) \
44 { \
45 void *_last; \
46 __asm__ __volatile__( \
47 "mov r4, %1\n" \
48 "mov r5, %2\n" \
49 "call resume\n" \
50 "mov %0,r4\n" \
51 : "=r" (_last) \
52 : "r" (prev), "r" (next) \
53 : "r4","r5","r7","r8","ra"); \
54 (last) = _last; \
57 #define local_irq_enable() __asm__ __volatile__ ( \
58 "rdctl r8, status\n" \
59 "ori r8, r8, 1\n" \
60 "wrctl status, r8\n" \
61 : : : "r8")
63 #define local_irq_disable() __asm__ __volatile__ ( \
64 "rdctl r8, status\n" \
65 "andi r8, r8, 0xfffe\n" \
66 "wrctl status, r8\n" \
67 : : : "r8")
69 #define local_save_flags(x) __asm__ __volatile__ ( \
70 "rdctl r8, status\n" \
71 "mov %0, r8\n" \
72 :"=r" (x) : : "r8", "memory")
74 #define local_irq_restore(x) __asm__ __volatile__ ( \
75 "mov r8, %0\n" \
76 "wrctl status, r8\n" \
77 : :"r" (x) : "memory")
79 /* For spinlocks etc */
80 #define local_irq_save(x) do { local_save_flags(x); local_irq_disable(); } while (0)
82 #define irqs_disabled() \
83 ({ \
84 unsigned long flags; \
85 local_save_flags(flags); \
86 ((flags & NIOS2_STATUS_PIE_MSK) == 0x0); \
89 #define iret() __asm__ __volatile__ ("eret": : :"memory", "ea")
92 * Force strict CPU ordering.
93 * Not really required on m68k...
95 #define nop() asm volatile ("nop"::)
96 #define mb() asm volatile ("" : : :"memory")
97 #define rmb() asm volatile ("" : : :"memory")
98 #define wmb() asm volatile ("" : : :"memory")
99 #define set_rmb(var, value) do { xchg(&var, value); } while (0)
100 #define set_mb(var, value) set_rmb(var, value)
101 #define set_wmb(var, value) do { var = value; wmb(); } while (0)
103 #ifdef CONFIG_SMP
104 #define smp_mb() mb()
105 #define smp_rmb() rmb()
106 #define smp_wmb() wmb()
107 #define smp_read_barrier_depends() read_barrier_depends()
108 #else
109 #define smp_mb() barrier()
110 #define smp_rmb() barrier()
111 #define smp_wmb() barrier()
112 #define smp_read_barrier_depends() do { } while(0)
113 #endif
115 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
116 #define tas(ptr) (xchg((ptr),1))
118 struct __xchg_dummy { unsigned long a[100]; };
119 #define __xg(x) ((volatile struct __xchg_dummy *)(x))
121 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
123 unsigned long tmp, flags;
125 local_irq_save(flags);
127 switch (size) {
128 case 1:
129 __asm__ __volatile__( \
130 "ldb %0, %2\n" \
131 "stb %1, %2\n" \
132 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)) : "memory");
133 break;
134 case 2:
135 __asm__ __volatile__( \
136 "ldh %0, %2\n" \
137 "sth %1, %2\n" \
138 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)) : "memory");
139 break;
140 case 4:
141 __asm__ __volatile__( \
142 "ldw %0, %2\n" \
143 "stw %1, %2\n" \
144 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)) : "memory");
145 break;
147 local_irq_restore(flags);
148 return tmp;
152 * Atomic compare and exchange. Compare OLD with MEM, if identical,
153 * store NEW in MEM. Return the initial value in MEM. Success is
154 * indicated by comparing RETURN with OLD.
156 #define __HAVE_ARCH_CMPXCHG 1
158 static __inline__ unsigned long
159 cmpxchg(volatile int *p, int old, int new)
161 unsigned long flags;
162 int prev;
164 local_irq_save(flags);
165 if ((prev = *p) == old)
166 *p = new;
167 local_irq_restore(flags);
168 return(prev);
171 #endif /* _NIOS2NOMMU_SYSTEM_H */