4 * Copyright (C) 2004, Microtronix Datacom Ltd.
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
16 * NON INFRINGEMENT. See the GNU General Public License for more
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
25 #ifndef _NIOS2NOMMU_SYSTEM_H
26 #define _NIOS2NOMMU_SYSTEM_H
28 #include <linux/linkage.h>
29 #include <asm/segment.h>
30 #include <asm/entry.h>
34 * switch_to(n) should switch tasks to task ptr, first checking that
35 * ptr isn't the current task, in which case it does nothing. This
36 * also clears the TS-flag if the task we switched to has used the
37 * math co-processor latest.
42 asmlinkage
void resume(void);
43 #define switch_to(prev,next,last) \
46 __asm__ __volatile__( \
52 : "r" (prev), "r" (next) \
53 : "r4","r5","r7","r8","ra"); \
57 #define local_irq_enable() __asm__ __volatile__ ( \
58 "rdctl r8, status\n" \
60 "wrctl status, r8\n" \
63 #define local_irq_disable() __asm__ __volatile__ ( \
64 "rdctl r8, status\n" \
65 "andi r8, r8, 0xfffe\n" \
66 "wrctl status, r8\n" \
69 #define local_save_flags(x) __asm__ __volatile__ ( \
70 "rdctl r8, status\n" \
72 :"=r" (x) : : "r8", "memory")
74 #define local_irq_restore(x) __asm__ __volatile__ ( \
76 "wrctl status, r8\n" \
77 : :"r" (x) : "memory")
79 /* For spinlocks etc */
80 #define local_irq_save(x) do { local_save_flags(x); local_irq_disable(); } while (0)
82 #define irqs_disabled() \
84 unsigned long flags; \
85 local_save_flags(flags); \
86 ((flags & NIOS2_STATUS_PIE_MSK) == 0x0); \
89 #define iret() __asm__ __volatile__ ("eret": : :"memory", "ea")
92 * Force strict CPU ordering.
93 * Not really required on m68k...
95 #define nop() asm volatile ("nop"::)
96 #define mb() asm volatile ("" : : :"memory")
97 #define rmb() asm volatile ("" : : :"memory")
98 #define wmb() asm volatile ("" : : :"memory")
99 #define set_rmb(var, value) do { xchg(&var, value); } while (0)
100 #define set_mb(var, value) set_rmb(var, value)
101 #define set_wmb(var, value) do { var = value; wmb(); } while (0)
104 #define smp_mb() mb()
105 #define smp_rmb() rmb()
106 #define smp_wmb() wmb()
107 #define smp_read_barrier_depends() read_barrier_depends()
109 #define smp_mb() barrier()
110 #define smp_rmb() barrier()
111 #define smp_wmb() barrier()
112 #define smp_read_barrier_depends() do { } while(0)
115 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
116 #define tas(ptr) (xchg((ptr),1))
118 struct __xchg_dummy
{ unsigned long a
[100]; };
119 #define __xg(x) ((volatile struct __xchg_dummy *)(x))
121 static inline unsigned long __xchg(unsigned long x
, volatile void * ptr
, int size
)
123 unsigned long tmp
, flags
;
125 local_irq_save(flags
);
129 __asm__
__volatile__( \
132 : "=&r" (tmp
) : "r" (x
), "m" (*__xg(ptr
)) : "memory");
135 __asm__
__volatile__( \
138 : "=&r" (tmp
) : "r" (x
), "m" (*__xg(ptr
)) : "memory");
141 __asm__
__volatile__( \
144 : "=&r" (tmp
) : "r" (x
), "m" (*__xg(ptr
)) : "memory");
147 local_irq_restore(flags
);
152 * Atomic compare and exchange. Compare OLD with MEM, if identical,
153 * store NEW in MEM. Return the initial value in MEM. Success is
154 * indicated by comparing RETURN with OLD.
156 #define __HAVE_ARCH_CMPXCHG 1
158 static __inline__
unsigned long
159 cmpxchg(volatile int *p
, int old
, int new)
164 local_irq_save(flags
);
165 if ((prev
= *p
) == old
)
167 local_irq_restore(flags
);
171 #endif /* _NIOS2NOMMU_SYSTEM_H */