Commit | Line | Data |
---|---|---|
0c2bd5a5 | 1 | /* |
1da177e4 | 2 | * Some macros to handle stack frames in assembly. |
0c2bd5a5 | 3 | */ |
1da177e4 | 4 | |
0c2bd5a5 IM |
5 | #define R15 0 |
6 | #define R14 8 | |
7 | #define R13 16 | |
8 | #define R12 24 | |
9 | #define RBP 32 | |
10 | #define RBX 40 | |
1da177e4 | 11 | |
1da177e4 | 12 | /* arguments: interrupts/non tracing syscalls only save upto here*/ |
0c2bd5a5 IM |
13 | #define R11 48 |
14 | #define R10 56 | |
15 | #define R9 64 | |
16 | #define R8 72 | |
17 | #define RAX 80 | |
18 | #define RCX 88 | |
19 | #define RDX 96 | |
20 | #define RSI 104 | |
21 | #define RDI 112 | |
22 | #define ORIG_RAX 120 /* + error_code */ | |
23 | /* end of arguments */ | |
24 | ||
1da177e4 | 25 | /* cpu exception frame or undefined in case of fast syscall. */ |
0c2bd5a5 IM |
26 | #define RIP 128 |
27 | #define CS 136 | |
28 | #define EFLAGS 144 | |
29 | #define RSP 152 | |
30 | #define SS 160 | |
31 | ||
32 | #define ARGOFFSET R11 | |
33 | #define SWFRAME ORIG_RAX | |
1da177e4 | 34 | |
0c2bd5a5 IM |
35 | .macro SAVE_ARGS addskip=0, norcx=0, nor891011=0 |
36 | subq $9*8+\addskip, %rsp | |
1da177e4 | 37 | CFI_ADJUST_CFA_OFFSET 9*8+\addskip |
0c2bd5a5 IM |
38 | movq %rdi, 8*8(%rsp) |
39 | CFI_REL_OFFSET rdi, 8*8 | |
40 | movq %rsi, 7*8(%rsp) | |
41 | CFI_REL_OFFSET rsi, 7*8 | |
42 | movq %rdx, 6*8(%rsp) | |
43 | CFI_REL_OFFSET rdx, 6*8 | |
1da177e4 LT |
44 | .if \norcx |
45 | .else | |
0c2bd5a5 IM |
46 | movq %rcx, 5*8(%rsp) |
47 | CFI_REL_OFFSET rcx, 5*8 | |
1da177e4 | 48 | .endif |
0c2bd5a5 IM |
49 | movq %rax, 4*8(%rsp) |
50 | CFI_REL_OFFSET rax, 4*8 | |
1da177e4 LT |
51 | .if \nor891011 |
52 | .else | |
0c2bd5a5 IM |
53 | movq %r8, 3*8(%rsp) |
54 | CFI_REL_OFFSET r8, 3*8 | |
55 | movq %r9, 2*8(%rsp) | |
56 | CFI_REL_OFFSET r9, 2*8 | |
57 | movq %r10, 1*8(%rsp) | |
58 | CFI_REL_OFFSET r10, 1*8 | |
59 | movq %r11, (%rsp) | |
60 | CFI_REL_OFFSET r11, 0*8 | |
1da177e4 LT |
61 | .endif |
62 | .endm | |
63 | ||
0c2bd5a5 IM |
64 | #define ARG_SKIP 9*8 |
65 | ||
66 | .macro RESTORE_ARGS skiprax=0, addskip=0, skiprcx=0, skipr11=0, \ | |
67 | skipr8910=0, skiprdx=0 | |
1da177e4 LT |
68 | .if \skipr11 |
69 | .else | |
0c2bd5a5 | 70 | movq (%rsp), %r11 |
7effaa88 | 71 | CFI_RESTORE r11 |
1da177e4 LT |
72 | .endif |
73 | .if \skipr8910 | |
74 | .else | |
0c2bd5a5 | 75 | movq 1*8(%rsp), %r10 |
7effaa88 | 76 | CFI_RESTORE r10 |
0c2bd5a5 | 77 | movq 2*8(%rsp), %r9 |
7effaa88 | 78 | CFI_RESTORE r9 |
0c2bd5a5 | 79 | movq 3*8(%rsp), %r8 |
7effaa88 | 80 | CFI_RESTORE r8 |
1da177e4 LT |
81 | .endif |
82 | .if \skiprax | |
83 | .else | |
0c2bd5a5 | 84 | movq 4*8(%rsp), %rax |
7effaa88 | 85 | CFI_RESTORE rax |
1da177e4 LT |
86 | .endif |
87 | .if \skiprcx | |
88 | .else | |
0c2bd5a5 | 89 | movq 5*8(%rsp), %rcx |
7effaa88 | 90 | CFI_RESTORE rcx |
1da177e4 LT |
91 | .endif |
92 | .if \skiprdx | |
93 | .else | |
0c2bd5a5 | 94 | movq 6*8(%rsp), %rdx |
7effaa88 | 95 | CFI_RESTORE rdx |
1da177e4 | 96 | .endif |
0c2bd5a5 | 97 | movq 7*8(%rsp), %rsi |
7effaa88 | 98 | CFI_RESTORE rsi |
0c2bd5a5 | 99 | movq 8*8(%rsp), %rdi |
7effaa88 | 100 | CFI_RESTORE rdi |
1da177e4 | 101 | .if ARG_SKIP+\addskip > 0 |
0c2bd5a5 | 102 | addq $ARG_SKIP+\addskip, %rsp |
1da177e4 LT |
103 | CFI_ADJUST_CFA_OFFSET -(ARG_SKIP+\addskip) |
104 | .endif | |
0c2bd5a5 | 105 | .endm |
1da177e4 | 106 | |
d4d67150 | 107 | .macro LOAD_ARGS offset, skiprax=0 |
0c2bd5a5 IM |
108 | movq \offset(%rsp), %r11 |
109 | movq \offset+8(%rsp), %r10 | |
110 | movq \offset+16(%rsp), %r9 | |
111 | movq \offset+24(%rsp), %r8 | |
112 | movq \offset+40(%rsp), %rcx | |
113 | movq \offset+48(%rsp), %rdx | |
114 | movq \offset+56(%rsp), %rsi | |
115 | movq \offset+64(%rsp), %rdi | |
d4d67150 RM |
116 | .if \skiprax |
117 | .else | |
0c2bd5a5 | 118 | movq \offset+72(%rsp), %rax |
d4d67150 | 119 | .endif |
1da177e4 | 120 | .endm |
0c2bd5a5 IM |
121 | |
122 | #define REST_SKIP 6*8 | |
123 | ||
1da177e4 | 124 | .macro SAVE_REST |
0c2bd5a5 | 125 | subq $REST_SKIP, %rsp |
1da177e4 | 126 | CFI_ADJUST_CFA_OFFSET REST_SKIP |
0c2bd5a5 IM |
127 | movq %rbx, 5*8(%rsp) |
128 | CFI_REL_OFFSET rbx, 5*8 | |
129 | movq %rbp, 4*8(%rsp) | |
130 | CFI_REL_OFFSET rbp, 4*8 | |
131 | movq %r12, 3*8(%rsp) | |
132 | CFI_REL_OFFSET r12, 3*8 | |
133 | movq %r13, 2*8(%rsp) | |
134 | CFI_REL_OFFSET r13, 2*8 | |
135 | movq %r14, 1*8(%rsp) | |
136 | CFI_REL_OFFSET r14, 1*8 | |
137 | movq %r15, (%rsp) | |
138 | CFI_REL_OFFSET r15, 0*8 | |
139 | .endm | |
1da177e4 LT |
140 | |
141 | .macro RESTORE_REST | |
0c2bd5a5 | 142 | movq (%rsp), %r15 |
7effaa88 | 143 | CFI_RESTORE r15 |
0c2bd5a5 | 144 | movq 1*8(%rsp), %r14 |
7effaa88 | 145 | CFI_RESTORE r14 |
0c2bd5a5 | 146 | movq 2*8(%rsp), %r13 |
7effaa88 | 147 | CFI_RESTORE r13 |
0c2bd5a5 | 148 | movq 3*8(%rsp), %r12 |
7effaa88 | 149 | CFI_RESTORE r12 |
0c2bd5a5 | 150 | movq 4*8(%rsp), %rbp |
7effaa88 | 151 | CFI_RESTORE rbp |
0c2bd5a5 | 152 | movq 5*8(%rsp), %rbx |
7effaa88 | 153 | CFI_RESTORE rbx |
0c2bd5a5 | 154 | addq $REST_SKIP, %rsp |
1da177e4 LT |
155 | CFI_ADJUST_CFA_OFFSET -(REST_SKIP) |
156 | .endm | |
0c2bd5a5 | 157 | |
1da177e4 LT |
158 | .macro SAVE_ALL |
159 | SAVE_ARGS | |
160 | SAVE_REST | |
161 | .endm | |
0c2bd5a5 | 162 | |
1da177e4 LT |
163 | .macro RESTORE_ALL addskip=0 |
164 | RESTORE_REST | |
0c2bd5a5 | 165 | RESTORE_ARGS 0, \addskip |
1da177e4 LT |
166 | .endm |
167 | ||
168 | .macro icebp | |
169 | .byte 0xf1 | |
170 | .endm |