[netdrvr] CS89X0: Add cleanup for dma after fail
[linux-2.6/mini2440.git] / include / asm-x86 / calling.h
blobf13e62e2cb3e75160297459ef182921b170d0850
1 /*
2 * Some macros to handle stack frames in assembly.
3 */
5 #define R15 0
6 #define R14 8
7 #define R13 16
8 #define R12 24
9 #define RBP 32
10 #define RBX 40
12 /* arguments: interrupts/non tracing syscalls only save upto here*/
13 #define R11 48
14 #define R10 56
15 #define R9 64
16 #define R8 72
17 #define RAX 80
18 #define RCX 88
19 #define RDX 96
20 #define RSI 104
21 #define RDI 112
22 #define ORIG_RAX 120 /* + error_code */
23 /* end of arguments */
25 /* cpu exception frame or undefined in case of fast syscall. */
26 #define RIP 128
27 #define CS 136
28 #define EFLAGS 144
29 #define RSP 152
30 #define SS 160
32 #define ARGOFFSET R11
33 #define SWFRAME ORIG_RAX
35 .macro SAVE_ARGS addskip=0, norcx=0, nor891011=0
36 subq $9*8+\addskip, %rsp
37 CFI_ADJUST_CFA_OFFSET 9*8+\addskip
38 movq %rdi, 8*8(%rsp)
39 CFI_REL_OFFSET rdi, 8*8
40 movq %rsi, 7*8(%rsp)
41 CFI_REL_OFFSET rsi, 7*8
42 movq %rdx, 6*8(%rsp)
43 CFI_REL_OFFSET rdx, 6*8
44 .if \norcx
45 .else
46 movq %rcx, 5*8(%rsp)
47 CFI_REL_OFFSET rcx, 5*8
48 .endif
49 movq %rax, 4*8(%rsp)
50 CFI_REL_OFFSET rax, 4*8
51 .if \nor891011
52 .else
53 movq %r8, 3*8(%rsp)
54 CFI_REL_OFFSET r8, 3*8
55 movq %r9, 2*8(%rsp)
56 CFI_REL_OFFSET r9, 2*8
57 movq %r10, 1*8(%rsp)
58 CFI_REL_OFFSET r10, 1*8
59 movq %r11, (%rsp)
60 CFI_REL_OFFSET r11, 0*8
61 .endif
62 .endm
64 #define ARG_SKIP 9*8
66 .macro RESTORE_ARGS skiprax=0, addskip=0, skiprcx=0, skipr11=0, \
67 skipr8910=0, skiprdx=0
68 .if \skipr11
69 .else
70 movq (%rsp), %r11
71 CFI_RESTORE r11
72 .endif
73 .if \skipr8910
74 .else
75 movq 1*8(%rsp), %r10
76 CFI_RESTORE r10
77 movq 2*8(%rsp), %r9
78 CFI_RESTORE r9
79 movq 3*8(%rsp), %r8
80 CFI_RESTORE r8
81 .endif
82 .if \skiprax
83 .else
84 movq 4*8(%rsp), %rax
85 CFI_RESTORE rax
86 .endif
87 .if \skiprcx
88 .else
89 movq 5*8(%rsp), %rcx
90 CFI_RESTORE rcx
91 .endif
92 .if \skiprdx
93 .else
94 movq 6*8(%rsp), %rdx
95 CFI_RESTORE rdx
96 .endif
97 movq 7*8(%rsp), %rsi
98 CFI_RESTORE rsi
99 movq 8*8(%rsp), %rdi
100 CFI_RESTORE rdi
101 .if ARG_SKIP+\addskip > 0
102 addq $ARG_SKIP+\addskip, %rsp
103 CFI_ADJUST_CFA_OFFSET -(ARG_SKIP+\addskip)
104 .endif
105 .endm
107 .macro LOAD_ARGS offset
108 movq \offset(%rsp), %r11
109 movq \offset+8(%rsp), %r10
110 movq \offset+16(%rsp), %r9
111 movq \offset+24(%rsp), %r8
112 movq \offset+40(%rsp), %rcx
113 movq \offset+48(%rsp), %rdx
114 movq \offset+56(%rsp), %rsi
115 movq \offset+64(%rsp), %rdi
116 movq \offset+72(%rsp), %rax
117 .endm
119 #define REST_SKIP 6*8
121 .macro SAVE_REST
122 subq $REST_SKIP, %rsp
123 CFI_ADJUST_CFA_OFFSET REST_SKIP
124 movq %rbx, 5*8(%rsp)
125 CFI_REL_OFFSET rbx, 5*8
126 movq %rbp, 4*8(%rsp)
127 CFI_REL_OFFSET rbp, 4*8
128 movq %r12, 3*8(%rsp)
129 CFI_REL_OFFSET r12, 3*8
130 movq %r13, 2*8(%rsp)
131 CFI_REL_OFFSET r13, 2*8
132 movq %r14, 1*8(%rsp)
133 CFI_REL_OFFSET r14, 1*8
134 movq %r15, (%rsp)
135 CFI_REL_OFFSET r15, 0*8
136 .endm
138 .macro RESTORE_REST
139 movq (%rsp), %r15
140 CFI_RESTORE r15
141 movq 1*8(%rsp), %r14
142 CFI_RESTORE r14
143 movq 2*8(%rsp), %r13
144 CFI_RESTORE r13
145 movq 3*8(%rsp), %r12
146 CFI_RESTORE r12
147 movq 4*8(%rsp), %rbp
148 CFI_RESTORE rbp
149 movq 5*8(%rsp), %rbx
150 CFI_RESTORE rbx
151 addq $REST_SKIP, %rsp
152 CFI_ADJUST_CFA_OFFSET -(REST_SKIP)
153 .endm
155 .macro SAVE_ALL
156 SAVE_ARGS
157 SAVE_REST
158 .endm
160 .macro RESTORE_ALL addskip=0
161 RESTORE_REST
162 RESTORE_ARGS 0, \addskip
163 .endm
165 .macro icebp
166 .byte 0xf1
167 .endm