[PATCH] x86-64 architecture specific sync for 2.5.8
[linux-2.6/history.git] / include / asm-x86_64 / calling.h
blobd53a8e11c7b0b546e11bf44e10df9b786012052e
1 /*
2 * Some macros to handle stack frames in assembly.
3 */
5 #include <linux/config.h>
7 #define R15 0
8 #define R14 8
9 #define R13 16
10 #define R12 24
11 #define RBP 36
12 #define RBX 40
13 /* arguments: interrupts/non tracing syscalls only save upto here*/
14 #define R11 48
15 #define R10 56
16 #define R9 64
17 #define R8 72
18 #define RAX 80
19 #define RCX 88
20 #define RDX 96
21 #define RSI 104
22 #define RDI 112
23 #define ORIG_RAX 120 /* + error_code */
24 /* end of arguments */
25 /* cpu exception frame or undefined in case of fast syscall. */
26 #define RIP 128
27 #define CS 136
28 #define EFLAGS 144
29 #define RSP 152
30 #define SS 160
31 #define ARGOFFSET R11
33 .macro SAVE_ARGS addskip=0,norcx=0
34 subq $9*8+\addskip,%rsp
35 movq %rdi,8*8(%rsp)
36 movq %rsi,7*8(%rsp)
37 movq %rdx,6*8(%rsp)
38 .if \norcx
39 .else
40 movq %rcx,5*8(%rsp)
41 .endif
42 movq %rax,4*8(%rsp)
43 movq %r8,3*8(%rsp)
44 movq %r9,2*8(%rsp)
45 movq %r10,1*8(%rsp)
46 movq %r11,(%rsp)
47 .endm
49 #define ARG_SKIP 9*8
50 .macro RESTORE_ARGS skiprax=0,addskip=0,skiprcx=0
51 movq (%rsp),%r11
52 movq 1*8(%rsp),%r10
53 movq 2*8(%rsp),%r9
54 movq 3*8(%rsp),%r8
55 .if \skiprax
56 .else
57 movq 4*8(%rsp),%rax
58 .endif
59 .if \skiprcx
60 .else
61 movq 5*8(%rsp),%rcx
62 .endif
63 movq 6*8(%rsp),%rdx
64 movq 7*8(%rsp),%rsi
65 movq 8*8(%rsp),%rdi
66 .if ARG_SKIP+\addskip > 0
67 addq $ARG_SKIP+\addskip,%rsp
68 .endif
69 .endm
71 .macro LOAD_ARGS offset
72 movq \offset(%rsp),%r11
73 movq \offset+8(%rsp),%r10
74 movq \offset+16(%rsp),%r9
75 movq \offset+24(%rsp),%r8
76 movq \offset+40(%rsp),%rcx
77 movq \offset+48(%rsp),%rdx
78 movq \offset+56(%rsp),%rsi
79 movq \offset+64(%rsp),%rdi
80 movq \offset+72(%rsp),%rax
81 .endm
83 .macro SAVE_REST
84 subq $6*8,%rsp
85 movq %rbx,5*8(%rsp)
86 movq %rbp,4*8(%rsp)
87 movq %r12,3*8(%rsp)
88 movq %r13,2*8(%rsp)
89 movq %r14,1*8(%rsp)
90 movq %r15,(%rsp)
91 .endm
93 #define REST_SKIP 6*8
94 .macro RESTORE_REST
95 movq (%rsp),%r15
96 movq 1*8(%rsp),%r14
97 movq 2*8(%rsp),%r13
98 movq 3*8(%rsp),%r12
99 movq 4*8(%rsp),%rbp
100 movq 5*8(%rsp),%rbx
101 addq $REST_SKIP,%rsp
102 .endm
104 .macro SAVE_ALL
105 SAVE_ARGS
106 SAVE_REST
107 .endm
109 .macro RESTORE_ALL addskip=0
110 RESTORE_REST
111 RESTORE_ARGS 0,\addskip
112 .endm
114 /* push in order ss, rsp, eflags, cs, rip */
115 .macro FAKE_STACK_FRAME child_rip
116 xorl %eax,%eax
117 subq $6*8,%rsp
118 movq %rax,5*8(%rsp) /* ss */
119 movq %rax,4*8(%rsp) /* rsp */
120 movq %rax,3*8(%rsp) /* eflags */
121 movq $__KERNEL_CS,2*8(%rsp) /* cs */
122 movq \child_rip,1*8(%rsp) /* rip */
123 movq %rax,(%rsp) /* orig_rax */
124 .endm
126 .macro UNFAKE_STACK_FRAME
127 addq $8*6, %rsp
128 .endm
130 .macro icebp
131 .byte 0xf1
132 .endm
134 #ifdef CONFIG_FRAME_POINTER
135 #define ENTER enter
136 #define LEAVE leave
137 #else
138 #define ENTER
139 #define LEAVE
140 #endif