PCI / PM: Block races between runtime PM and system sleep
[linux-2.6/linux-acpi-2.6/ibm-acpi-2.6.git] / arch / m68k / include / asm / entry_no.h
blob627d69bacc58c6afc2412c13fa6f01282c3ada4d
1 #ifndef __M68KNOMMU_ENTRY_H
2 #define __M68KNOMMU_ENTRY_H
4 #include <asm/setup.h>
5 #include <asm/page.h>
7 /*
8 * Stack layout in 'ret_from_exception':
10 * This allows access to the syscall arguments in registers d1-d5
12 * 0(sp) - d1
13 * 4(sp) - d2
14 * 8(sp) - d3
15 * C(sp) - d4
16 * 10(sp) - d5
17 * 14(sp) - a0
18 * 18(sp) - a1
19 * 1C(sp) - a2
20 * 20(sp) - d0
21 * 24(sp) - orig_d0
22 * 28(sp) - stack adjustment
23 * 2C(sp) - [ sr ] [ format & vector ]
24 * 2E(sp) - [ pc-hiword ] [ sr ]
25 * 30(sp) - [ pc-loword ] [ pc-hiword ]
26 * 32(sp) - [ format & vector ] [ pc-loword ]
27 * ^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
28 * M68K COLDFIRE
31 #define ALLOWINT (~0x700)
33 #ifdef __ASSEMBLY__
35 #define SWITCH_STACK_SIZE (6*4+4) /* Includes return address */
38 * This defines the normal kernel pt-regs layout.
40 * regs are a2-a6 and d6-d7 preserved by C code
41 * the kernel doesn't mess with usp unless it needs to
44 #ifdef CONFIG_COLDFIRE
45 #ifdef CONFIG_COLDFIRE_SW_A7
47 * This is made a little more tricky on older ColdFires. There is no
48 * separate supervisor and user stack pointers. Need to artificially
49 * construct a usp in software... When doing this we need to disable
50 * interrupts, otherwise bad things will happen.
52 .globl sw_usp
53 .globl sw_ksp
55 .macro SAVE_ALL
56 move #0x2700,%sr /* disable intrs */
57 btst #5,%sp@(2) /* from user? */
58 bnes 6f /* no, skip */
59 movel %sp,sw_usp /* save user sp */
60 addql #8,sw_usp /* remove exception */
61 movel sw_ksp,%sp /* kernel sp */
62 subql #8,%sp /* room for exception */
63 clrl %sp@- /* stkadj */
64 movel %d0,%sp@- /* orig d0 */
65 movel %d0,%sp@- /* d0 */
66 lea %sp@(-32),%sp /* space for 8 regs */
67 moveml %d1-%d5/%a0-%a2,%sp@
68 movel sw_usp,%a0 /* get usp */
69 movel %a0@-,%sp@(PT_OFF_PC) /* copy exception program counter */
70 movel %a0@-,%sp@(PT_OFF_FORMATVEC)/*copy exception format/vector/sr */
71 bra 7f
73 clrl %sp@- /* stkadj */
74 movel %d0,%sp@- /* orig d0 */
75 movel %d0,%sp@- /* d0 */
76 lea %sp@(-32),%sp /* space for 8 regs */
77 moveml %d1-%d5/%a0-%a2,%sp@
79 .endm
81 .macro RESTORE_USER
82 move #0x2700,%sr /* disable intrs */
83 movel sw_usp,%a0 /* get usp */
84 movel %sp@(PT_OFF_PC),%a0@- /* copy exception program counter */
85 movel %sp@(PT_OFF_FORMATVEC),%a0@-/*copy exception format/vector/sr */
86 moveml %sp@,%d1-%d5/%a0-%a2
87 lea %sp@(32),%sp /* space for 8 regs */
88 movel %sp@+,%d0
89 addql #4,%sp /* orig d0 */
90 addl %sp@+,%sp /* stkadj */
91 addql #8,%sp /* remove exception */
92 movel %sp,sw_ksp /* save ksp */
93 subql #8,sw_usp /* set exception */
94 movel sw_usp,%sp /* restore usp */
95 rte
96 .endm
98 .macro RDUSP
99 movel sw_usp,%a2
100 .endm
102 .macro WRUSP
103 movel %a0,sw_usp
104 .endm
106 #else /* !CONFIG_COLDFIRE_SW_A7 */
108 * Modern ColdFire parts have separate supervisor and user stack
109 * pointers. Simple load and restore macros for this case.
111 .macro SAVE_ALL
112 move #0x2700,%sr /* disable intrs */
113 clrl %sp@- /* stkadj */
114 movel %d0,%sp@- /* orig d0 */
115 movel %d0,%sp@- /* d0 */
116 lea %sp@(-32),%sp /* space for 8 regs */
117 moveml %d1-%d5/%a0-%a2,%sp@
118 .endm
120 .macro RESTORE_USER
121 moveml %sp@,%d1-%d5/%a0-%a2
122 lea %sp@(32),%sp /* space for 8 regs */
123 movel %sp@+,%d0
124 addql #4,%sp /* orig d0 */
125 addl %sp@+,%sp /* stkadj */
127 .endm
129 .macro RDUSP
130 /*move %usp,%a2*/
131 .word 0x4e6a
132 .endm
134 .macro WRUSP
135 /*move %a0,%usp*/
136 .word 0x4e60
137 .endm
139 #endif /* !CONFIG_COLDFIRE_SW_A7 */
141 .macro SAVE_SWITCH_STACK
142 lea %sp@(-24),%sp /* 6 regs */
143 moveml %a3-%a6/%d6-%d7,%sp@
144 .endm
146 .macro RESTORE_SWITCH_STACK
147 moveml %sp@,%a3-%a6/%d6-%d7
148 lea %sp@(24),%sp /* 6 regs */
149 .endm
151 #else /* !CONFIG_COLDFIRE */
154 * Standard 68k interrupt entry and exit macros.
156 .macro SAVE_ALL
157 clrl %sp@- /* stkadj */
158 movel %d0,%sp@- /* orig d0 */
159 movel %d0,%sp@- /* d0 */
160 moveml %d1-%d5/%a0-%a2,%sp@-
161 .endm
163 .macro RESTORE_ALL
164 moveml %sp@+,%a0-%a2/%d1-%d5
165 movel %sp@+,%d0
166 addql #4,%sp /* orig d0 */
167 addl %sp@+,%sp /* stkadj */
169 .endm
171 .macro SAVE_SWITCH_STACK
172 moveml %a3-%a6/%d6-%d7,%sp@-
173 .endm
175 .macro RESTORE_SWITCH_STACK
176 moveml %sp@+,%a3-%a6/%d6-%d7
177 .endm
179 #endif /* !COLDFIRE_SW_A7 */
180 #endif /* __ASSEMBLY__ */
181 #endif /* __M68KNOMMU_ENTRY_H */